heimdall_tools 1.3.41 → 1.3.46

Sign up to get free protection for your applications and to get access to all the features.
@@ -16,4 +16,6 @@ module HeimdallTools
16
16
  autoload :DBProtectMapper, 'heimdall_tools/dbprotect_mapper'
17
17
  autoload :AwsConfigMapper, 'heimdall_tools/aws_config_mapper'
18
18
  autoload :NetsparkerMapper, 'heimdall_tools/netsparker_mapper'
19
+ autoload :SarifMapper, 'heimdall_tools/sarif_mapper'
20
+ autoload :ScoutSuiteMapper, 'heimdall_tools/scoutsuite_mapper'
19
21
  end
@@ -18,11 +18,14 @@ INSUFFICIENT_DATA_MSG = 'Not enough data has been collectd to determine complian
18
18
  #
19
19
  module HeimdallTools
20
20
  class AwsConfigMapper
21
- def initialize(custom_mapping, verbose = false)
22
- @verbose = verbose
21
+ def initialize(custom_mapping, endpoint = nil)
23
22
  @default_mapping = get_rule_mapping(AWS_CONFIG_MAPPING_FILE)
24
23
  @custom_mapping = custom_mapping.nil? ? {} : get_rule_mapping(custom_mapping)
25
- @client = Aws::ConfigService::Client.new
24
+ if endpoint.nil?
25
+ @client = Aws::ConfigService::Client.new
26
+ else
27
+ @client = Aws::ConfigService::Client.new(endpoint: endpoint)
28
+ end
26
29
  @issues = get_all_config_rules
27
30
  end
28
31
 
@@ -34,8 +37,8 @@ module HeimdallTools
34
37
  def to_hdf
35
38
  controls = @issues.map do |issue|
36
39
  @item = {}
37
- @item['id'] = issue[:config_rule_name]
38
- @item['title'] = issue[:config_rule_name]
40
+ @item['id'] = issue[:config_rule_id]
41
+ @item['title'] = "#{get_account_id(issue[:config_rule_arn])} - #{issue[:config_rule_name]}"
39
42
  @item['desc'] = issue[:description]
40
43
  @item['impact'] = 0.5
41
44
  @item['tags'] = hdf_tags(issue)
@@ -51,18 +54,33 @@ module HeimdallTools
51
54
  @item
52
55
  end
53
56
  end
57
+
54
58
  results = HeimdallDataFormat.new(
55
59
  profile_name: 'AWS Config',
56
- title: 'AWS Config',
57
- summary: 'AWS Config',
58
- controls: controls,
59
- statistics: { aws_config_sdk_version: Aws::ConfigService::GEM_VERSION },
60
+ title: 'AWS Config',
61
+ summary: 'AWS Config',
62
+ controls: controls,
63
+ statistics: { aws_config_sdk_version: Aws::ConfigService::GEM_VERSION },
60
64
  )
61
65
  results.to_hdf
62
66
  end
63
67
 
64
68
  private
65
69
 
70
+ ##
71
+ # Gets the account ID from a config rule ARN
72
+ #
73
+ # https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html
74
+ # https://docs.aws.amazon.com/general/latest/gr/acct-identifiers.html
75
+ #
76
+ # Params:
77
+ # - arn: The ARN of the config rule
78
+ #
79
+ # Returns: The account ID portion of the ARN
80
+ def get_account_id(arn)
81
+ /:(\d{12}):config-rule/.match(arn)&.captures&.first || 'no-account-id'
82
+ end
83
+
66
84
  ##
67
85
  # Read in a config rule -> 800-53 control mapping CSV.
68
86
  #
@@ -71,7 +89,7 @@ module HeimdallTools
71
89
  #
72
90
  # Returns: A mapped version of the csv in the format { rule_name: row, ... }
73
91
  def get_rule_mapping(path)
74
- CSV.read(path, headers: true).map { |row| [row[0], row] }.to_h
92
+ CSV.read(path, headers: true).map { |row| [row['AwsConfigRuleSourceIdentifier'], row] }.to_h
75
93
  end
76
94
 
77
95
  ##
@@ -238,18 +256,17 @@ module HeimdallTools
238
256
  def hdf_tags(config_rule)
239
257
  result = {}
240
258
 
241
- @default_mapping
242
- @custom_mapping
259
+ source_identifier = config_rule.dig(:source, :source_identifier)
243
260
 
244
261
  # NIST tag
245
262
  result['nist'] = []
246
- default_mapping_match = @default_mapping[config_rule[:config_rule_name]]
263
+ default_mapping_match = @default_mapping[source_identifier]
247
264
 
248
- result['nist'] += default_mapping_match[1].split('|') unless default_mapping_match.nil?
265
+ result['nist'] += default_mapping_match['NIST-ID'].split('|') unless default_mapping_match.nil?
249
266
 
250
- custom_mapping_match = @custom_mapping[config_rule[:config_rule_name]]
267
+ custom_mapping_match = @custom_mapping[source_identifier]
251
268
 
252
- result['nist'] += custom_mapping_match[1].split('|').map { |name| "#{name} (user provided)" } unless custom_mapping_match.nil?
269
+ result['nist'] += custom_mapping_match['NIST-ID'].split('|').map { |name| "#{name} (user provided)" } unless custom_mapping_match.nil?
253
270
 
254
271
  result['nist'] = ['unmapped'] if result['nist'].empty?
255
272
 
@@ -260,7 +277,8 @@ module HeimdallTools
260
277
  # If no input parameters, then provide an empty JSON array to the JSON
261
278
  # parser because passing nil to JSON.parse throws an exception.
262
279
  params = (JSON.parse(config_rule[:input_parameters] || '[]').map { |key, value| "#{key}: #{value}" }).join('<br/>')
263
- check_text = config_rule[:config_rule_arn] || ''
280
+ check_text = "ARN: #{config_rule[:config_rule_arn] || 'N/A'}"
281
+ check_text += "<br/>Source Identifier: #{config_rule.dig(:source, :source_identifier) || 'N/A'}"
264
282
  check_text += "<br/>#{params}" unless params.empty?
265
283
  check_text
266
284
  end
@@ -20,9 +20,8 @@ DEFAULT_NIST_TAG = %w{SA-11 RA-5 Rev_4}.freeze
20
20
 
21
21
  module HeimdallTools
22
22
  class BurpSuiteMapper
23
- def initialize(burps_xml, _name = nil, verbose = false)
23
+ def initialize(burps_xml, _name = nil)
24
24
  @burps_xml = burps_xml
25
- @verbose = verbose
26
25
 
27
26
  begin
28
27
  @cwe_nist_mapping = parse_mapper
@@ -6,7 +6,6 @@ module HeimdallTools
6
6
  long_desc Help.text(:fortify_mapper)
7
7
  option :fvdl, required: true, aliases: '-f'
8
8
  option :output, required: true, aliases: '-o'
9
- option :verbose, type: :boolean, aliases: '-V'
10
9
  def fortify_mapper
11
10
  hdf = HeimdallTools::FortifyMapper.new(File.read(options[:fvdl])).to_hdf
12
11
  File.write(options[:output], hdf)
@@ -17,7 +16,6 @@ module HeimdallTools
17
16
  option :json, required: true, aliases: '-j'
18
17
  option :name, required: true, aliases: '-n'
19
18
  option :output, required: true, aliases: '-o'
20
- option :verbose, type: :boolean, aliases: '-V'
21
19
  def zap_mapper
22
20
  hdf = HeimdallTools::ZapMapper.new(File.read(options[:json]), options[:name]).to_hdf
23
21
  File.write(options[:output], hdf)
@@ -29,7 +27,6 @@ module HeimdallTools
29
27
  option :api_url, required: true, aliases: '-u'
30
28
  option :auth, type: :string, required: false
31
29
  option :output, required: true, aliases: '-o'
32
- option :verbose, type: :boolean, aliases: '-V'
33
30
  def sonarqube_mapper
34
31
  hdf = HeimdallTools::SonarQubeMapper.new(options[:name], options[:api_url], options[:auth]).to_hdf
35
32
  File.write(options[:output], hdf)
@@ -39,7 +36,6 @@ module HeimdallTools
39
36
  long_desc Help.text(:burpsuite_mapper)
40
37
  option :xml, required: true, aliases: '-x'
41
38
  option :output, required: true, aliases: '-o'
42
- option :verbose, type: :boolean, aliases: '-V'
43
39
  def burpsuite_mapper
44
40
  hdf = HeimdallTools::BurpSuiteMapper.new(File.read(options[:xml])).to_hdf
45
41
  File.write(options[:output], hdf)
@@ -49,7 +45,6 @@ module HeimdallTools
49
45
  long_desc Help.text(:nessus_mapper)
50
46
  option :xml, required: true, aliases: '-x'
51
47
  option :output_prefix, required: true, aliases: '-o'
52
- option :verbose, type: :boolean, aliases: '-V'
53
48
  def nessus_mapper
54
49
  hdfs = HeimdallTools::NessusMapper.new(File.read(options[:xml])).to_hdf
55
50
 
@@ -64,7 +59,6 @@ module HeimdallTools
64
59
  long_desc Help.text(:snyk_mapper)
65
60
  option :json, required: true, aliases: '-j'
66
61
  option :output_prefix, required: true, aliases: '-o'
67
- option :verbose, type: :boolean, aliases: '-V'
68
62
  def snyk_mapper
69
63
  hdfs = HeimdallTools::SnykMapper.new(File.read(options[:json]), options[:name]).to_hdf
70
64
  puts "\r\HDF Generated:\n"
@@ -78,7 +72,6 @@ module HeimdallTools
78
72
  long_desc Help.text(:nikto_mapper)
79
73
  option :json, required: true, aliases: '-j'
80
74
  option :output, required: true, aliases: '-o'
81
- option :verbose, type: :boolean, aliases: '-V'
82
75
  def nikto_mapper
83
76
  hdf = HeimdallTools::NiktoMapper.new(File.read(options[:json])).to_hdf
84
77
  File.write(options[:output], hdf)
@@ -90,7 +83,6 @@ module HeimdallTools
90
83
  long_desc Help.text(:jfrog_xray_mapper)
91
84
  option :json, required: true, aliases: '-j'
92
85
  option :output, required: true, aliases: '-o'
93
- option :verbose, type: :boolean, aliases: '-V'
94
86
  def jfrog_xray_mapper
95
87
  hdf = HeimdallTools::JfrogXrayMapper.new(File.read(options[:json])).to_hdf
96
88
  File.write(options[:output], hdf)
@@ -102,7 +94,6 @@ module HeimdallTools
102
94
  long_desc Help.text(:dbprotect_mapper)
103
95
  option :xml, required: true, aliases: '-x'
104
96
  option :output, required: true, aliases: '-o'
105
- option :verbose, type: :boolean, aliases: '-V'
106
97
  def dbprotect_mapper
107
98
  hdf = HeimdallTools::DBProtectMapper.new(File.read(options[:xml])).to_hdf
108
99
  File.write(options[:output], hdf)
@@ -114,7 +105,6 @@ module HeimdallTools
114
105
  long_desc Help.text(:aws_config_mapper)
115
106
  # option :custom_mapping, required: false, aliases: '-m'
116
107
  option :output, required: true, aliases: '-o'
117
- option :verbose, type: :boolean, aliases: '-V'
118
108
  def aws_config_mapper
119
109
  hdf = HeimdallTools::AwsConfigMapper.new(options[:custom_mapping]).to_hdf
120
110
  File.write(options[:output], hdf)
@@ -126,7 +116,6 @@ module HeimdallTools
126
116
  long_desc Help.text(:netsparker_mapper)
127
117
  option :xml, required: true, aliases: '-x'
128
118
  option :output, required: true, aliases: '-o'
129
- option :verbose, type: :boolean, aliases: '-V'
130
119
  def netsparker_mapper
131
120
  hdf = HeimdallTools::NetsparkerMapper.new(File.read(options[:xml])).to_hdf
132
121
  File.write(options[:output], hdf)
@@ -134,6 +123,29 @@ module HeimdallTools
134
123
  puts options[:output].to_s
135
124
  end
136
125
 
126
+ desc 'sarif_mapper', 'sarif_mapper translates a SARIF JSON file into HDF format JSON to be viewable in Heimdall'
127
+ long_desc Help.text(:sarif_mapper)
128
+ option :json, required: true, aliases: '-j'
129
+ option :output, required: true, aliases: '-o'
130
+ option :verbose, type: :boolean, aliases: '-V'
131
+ def sarif_mapper
132
+ hdf = HeimdallTools::SarifMapper.new(File.read(options[:json])).to_hdf
133
+ File.write(options[:output], hdf)
134
+ puts "\r\HDF Generated:\n"
135
+ puts options[:output].to_s
136
+ end
137
+
138
+ desc 'scoutsuite_mapper', 'scoutsuite_mapper translates Scout Suite results from Javascript to HDF-formatted JSON so as to be viewable on Heimdall'
139
+ long_desc Help.text(:scoutsuite_mapper)
140
+ option :javascript, required: true, banner: 'SCOUTSUITE-RESULTS-JS', aliases: ['-i', '--input', '-j']
141
+ option :output, required: true, banner: 'HDF-SCAN-RESULTS-JSON', aliases: '-o'
142
+ def scoutsuite_mapper
143
+ hdf = HeimdallTools::ScoutSuiteMapper.new(File.read(options[:javascript])).to_hdf
144
+ File.write(options[:output], hdf)
145
+ puts "\rHDF Generated:\n"
146
+ puts options[:output].to_s
147
+ end
148
+
137
149
  desc 'version', 'prints version'
138
150
  def version
139
151
  puts VERSION
@@ -12,15 +12,11 @@ IMPACT_MAPPING = {
12
12
 
13
13
  module HeimdallTools
14
14
  class DBProtectMapper
15
- def initialize(xml, _name = nil, verbose = false)
16
- @verbose = verbose
17
-
18
- begin
19
- dataset = xml_to_hash(xml)
20
- @entries = compile_findings(dataset['dataset'])
21
- rescue StandardError => e
22
- raise "Invalid DBProtect XML file provided Exception: #{e};\nNote that XML must be of kind `Check Results Details`."
23
- end
15
+ def initialize(xml, _name = nil)
16
+ dataset = xml_to_hash(xml)
17
+ @entries = compile_findings(dataset['dataset'])
18
+ rescue StandardError => e
19
+ raise "Invalid DBProtect XML file provided Exception: #{e};\nNote that XML must be of kind `Check Results Details`."
24
20
  end
25
21
 
26
22
  def to_hdf
@@ -7,9 +7,8 @@ DEFAULT_NIST_TAG = %w{SA-11 RA-5}.freeze
7
7
 
8
8
  module HeimdallTools
9
9
  class FortifyMapper
10
- def initialize(fvdl, verbose = false)
10
+ def initialize(fvdl)
11
11
  @fvdl = fvdl
12
- @verbose = verbose
13
12
 
14
13
  begin
15
14
  data = xml_to_hash(fvdl)
@@ -0,0 +1,12 @@
1
+ sarif_mapper translates a SARIF JSON file into HDF format JSON to be viewable in Heimdall
2
+
3
+ SARIF level to HDF impact Mapping:
4
+ SARIF level error -> HDF impact 0.7
5
+ SARIF level warning -> HDF impact 0.5
6
+ SARIF level note -> HDF impact 0.3
7
+ SARIF level none -> HDF impact 0.1
8
+ SARIF level not provided -> HDF impact 0.1 as default
9
+
10
+ Examples:
11
+
12
+ heimdall_tools sarif_mapper [OPTIONS] -j <sarif-results-json> -o <hdf-scan-results.json>
@@ -0,0 +1,7 @@
1
+ scoutsuite_mapper translates Scout Suite results from Javascript to HDF-formatted JSON so as to be viewable on Heimdall
2
+
3
+ Note: Currently this mapper only supports AWS.
4
+
5
+ Examples:
6
+
7
+ heimdall_tools scoutsuite_mapper -i <scoutsuite-results-js> -o <hdf-scan-results-json>
@@ -27,9 +27,8 @@ end
27
27
 
28
28
  module HeimdallTools
29
29
  class JfrogXrayMapper
30
- def initialize(xray_json, _name = nil, verbose = false)
30
+ def initialize(xray_json, _name = nil)
31
31
  @xray_json = xray_json
32
- @verbose = verbose
33
32
 
34
33
  begin
35
34
  @cwe_nist_mapping = parse_mapper
@@ -39,9 +39,8 @@ end
39
39
 
40
40
  module HeimdallTools
41
41
  class NessusMapper
42
- def initialize(nessus_xml, verbose = false)
42
+ def initialize(nessus_xml)
43
43
  @nessus_xml = nessus_xml
44
- @verbose = verbose
45
44
  read_cci_xml
46
45
  begin
47
46
  @cwe_nist_mapping = parse_mapper
@@ -72,7 +71,8 @@ module HeimdallTools
72
71
  info = {}
73
72
 
74
73
  info['policyName'] = policy['policyName']
75
- info['version'] = policy['Preferences']['ServerPreferences']['preference'].select { |x| x['name'].eql? 'sc_version' }.first['value']
74
+ scanner_version = policy['Preferences']['ServerPreferences']['preference'].select { |x| x['name'].eql? 'sc_version' }
75
+ info['version'] = scanner_version.empty? ? NA_STRING : scanner_version.first['value']
76
76
  info
77
77
  rescue StandardError => e
78
78
  raise "Invalid Nessus XML file provided Exception: #{e}"
@@ -21,19 +21,15 @@ DEFAULT_NIST_TAG = %w{SA-11 RA-5}.freeze
21
21
 
22
22
  module HeimdallTools
23
23
  class NetsparkerMapper
24
- def initialize(xml, _name = nil, verbose = false)
25
- @verbose = verbose
26
-
27
- begin
28
- @cwe_nist_mapping = parse_mapper(CWE_NIST_MAPPING_FILE)
29
- @owasp_nist_mapping = parse_mapper(OWASP_NIST_MAPPING_FILE)
30
- data = xml_to_hash(xml)
31
-
32
- @vulnerabilities = data['netsparker-enterprise']['vulnerabilities']['vulnerability']
33
- @scan_info = data['netsparker-enterprise']['target']
34
- rescue StandardError => e
35
- raise "Invalid Netsparker XML file provided Exception: #{e}"
36
- end
24
+ def initialize(xml, _name = nil)
25
+ @cwe_nist_mapping = parse_mapper(CWE_NIST_MAPPING_FILE)
26
+ @owasp_nist_mapping = parse_mapper(OWASP_NIST_MAPPING_FILE)
27
+ data = xml_to_hash(xml)
28
+
29
+ @vulnerabilities = data['netsparker-enterprise']['vulnerabilities']['vulnerability']
30
+ @scan_info = data['netsparker-enterprise']['target']
31
+ rescue StandardError => e
32
+ raise "Invalid Netsparker XML file provided Exception: #{e}"
37
33
  end
38
34
 
39
35
  def to_hdf
@@ -26,9 +26,8 @@ end
26
26
 
27
27
  module HeimdallTools
28
28
  class NiktoMapper
29
- def initialize(nikto_json, _name = nil, verbose = false)
29
+ def initialize(nikto_json, _name = nil)
30
30
  @nikto_json = nikto_json
31
- @verbose = verbose
32
31
 
33
32
  begin
34
33
  @nikto_nist_mapping = parse_mapper
@@ -0,0 +1,198 @@
1
+ require 'json'
2
+ require 'csv'
3
+ require 'heimdall_tools/hdf'
4
+
5
+ RESOURCE_DIR = Pathname.new(__FILE__).join('../../data')
6
+
7
+ CWE_NIST_MAPPING_FILE = File.join(RESOURCE_DIR, 'cwe-nist-mapping.csv')
8
+
9
+ IMPACT_MAPPING = {
10
+ error: 0.7,
11
+ warning: 0.5,
12
+ note: 0.3,
13
+ none: 0.0
14
+ }.freeze
15
+
16
+ DEFAULT_NIST_TAG = %w{SA-11 RA-5}.freeze
17
+
18
+ # Loading spinner sign
19
+ $spinner = Enumerator.new do |e|
20
+ loop do
21
+ e.yield '|'
22
+ e.yield '/'
23
+ e.yield '-'
24
+ e.yield '\\'
25
+ end
26
+ end
27
+
28
+ module HeimdallTools
29
+ class SarifMapper
30
+ def initialize(sarif_json, _name = nil, verbose = false)
31
+ @sarif_json = sarif_json
32
+ @verbose = verbose
33
+ begin
34
+ @cwe_nist_mapping = parse_mapper
35
+ @sarif_log = JSON.parse(@sarif_json)
36
+ rescue StandardError => e
37
+ raise "Invalid SARIF JSON file provided\n\nException: #{e}"
38
+ end
39
+ end
40
+
41
+ def extract_scaninfo(sarif_log)
42
+ info = {}
43
+ begin
44
+ info['policy'] = 'SARIF'
45
+ info['version'] = sarif_log['version']
46
+ info['projectName'] = 'Static Analysis Results Interchange Format'
47
+ info['summary'] = NA_STRING
48
+ info
49
+ rescue StandardError => e
50
+ raise "Error extracting project info from SARIF JSON file provided Exception: #{e}"
51
+ end
52
+ end
53
+
54
+ def finding(result)
55
+ finding = {}
56
+ finding['status'] = 'failed'
57
+ finding['code_desc'] = ''
58
+ if get_location(result)['uri']
59
+ finding['code_desc'] += " URL : #{get_location(result)['uri']}"
60
+ end
61
+ if get_location(result)['start_line']
62
+ finding['code_desc'] += " LINE : #{get_location(result)['start_line']}"
63
+ end
64
+ if get_location(result)['start_column']
65
+ finding['code_desc'] += " COLUMN : #{get_location(result)['start_column']}"
66
+ end
67
+ finding['code_desc'].strip!
68
+ finding['run_time'] = NA_FLOAT
69
+ finding['start_time'] = NA_STRING
70
+ finding
71
+ end
72
+
73
+ def add_nist_tag_from_cwe(cweid, taxonomy_name, tags_node)
74
+ entries = @cwe_nist_mapping.select { |x| cweid.include?(x[:cweid].to_s) && !x[:nistid].nil? }
75
+ tags = entries.map { |x| x[:nistid] }
76
+ result_tags = tags.empty? ? DEFAULT_NIST_TAG : tags.flatten.uniq
77
+ if result_tags.count.positive?
78
+ if !tags_node
79
+ tags_node = {}
80
+ end
81
+ if !tags_node.key?(taxonomy_name)
82
+ tags_node[taxonomy_name] = []
83
+ end
84
+ result_tags.each do |t|
85
+ tags_node[taxonomy_name] |= [t]
86
+ end
87
+ end
88
+ tags_node
89
+ end
90
+
91
+ def get_location(result)
92
+ location_info = {}
93
+ location_info['uri'] = result.dig('locations', 0, 'physicalLocation', 'artifactLocation', 'uri')
94
+ location_info['start_line'] = result.dig('locations', 0, 'physicalLocation', 'region', 'startLine')
95
+ location_info['start_column'] = result.dig('locations', 0, 'physicalLocation', 'region', 'startColumn')
96
+ location_info
97
+ end
98
+
99
+ def get_rule_info(run, result, rule_id)
100
+ finding = {}
101
+ driver = run.dig('tool', 'driver')
102
+ finding['driver_name'] = driver['name']
103
+ finding['driver_version'] = driver['version']
104
+ rules = driver['rules']
105
+ if rules
106
+ rule = rules.find { |x| x['id'].eql?(rule_id) }
107
+ if rule
108
+ finding['rule_name'] = rule&.[]('name')
109
+ finding['rule_short_description'] = rule&.[]('shortDescription')&.[]('text')
110
+ finding['rule_tags'] = get_tags(rule)
111
+ finding['rule_name'] = rule&.[]('messageStrings')&.[]('default')&.[]('text') unless finding['rule_name']
112
+ end
113
+ end
114
+ finding['rule_name'] = result&.[]('message')&.[]('text') unless finding['rule_name']
115
+ finding
116
+ end
117
+
118
+ def get_tags(rule)
119
+ result = {}
120
+ Array(rule&.[]('relationships')).each do |relationship|
121
+ taxonomy_name = relationship['target']['toolComponent']['name'].downcase
122
+ taxonomy_id = relationship['target']['id']
123
+ if !result.key?(taxonomy_name)
124
+ result[taxonomy_name] = []
125
+ end
126
+ result[taxonomy_name] |= [taxonomy_id]
127
+ end
128
+ result
129
+ end
130
+
131
+ def parse_identifiers(rule_tags, ref)
132
+ # Extracting id number from reference style CWE-297
133
+ rule_tags[ref.downcase].map { |e| e.downcase.split("#{ref.downcase}-")[1] }
134
+ rescue StandardError
135
+ []
136
+ end
137
+
138
+ def impact(severity)
139
+ severity_mapping = IMPACT_MAPPING[severity.to_sym]
140
+ severity_mapping.nil? ? 0.1 : severity_mapping
141
+ end
142
+
143
+ def parse_mapper
144
+ csv_data = CSV.read(CWE_NIST_MAPPING_FILE, **{ encoding: 'UTF-8',
145
+ headers: true,
146
+ header_converters: :symbol,
147
+ converters: :all })
148
+ csv_data.map(&:to_hash)
149
+ end
150
+
151
+ def desc_tags(data, label)
152
+ { data: data || NA_STRING, label: label || NA_STRING }
153
+ end
154
+
155
+ def process_item(run, result, controls)
156
+ printf("\rProcessing: %s", $spinner.next)
157
+ control = controls.find { |x| x['id'].eql?(result['ruleId']) }
158
+
159
+ if control
160
+ control['results'] << finding(result)
161
+ else
162
+ rule_info = get_rule_info(run, result, result['ruleId'])
163
+ item = {}
164
+ item['tags'] = rule_info['rule_tags']
165
+ item['descriptions'] = []
166
+ item['refs'] = NA_ARRAY
167
+ item['source_location'] = { ref: get_location(result)['uri'], line: get_location(result)['start_line'] }
168
+ item['descriptions'] = NA_ARRAY
169
+ item['title'] = rule_info['rule_name'].to_s
170
+ item['id'] = result['ruleId'].to_s
171
+ item['desc'] = rule_info['rule_short_description'].to_s
172
+ item['impact'] = impact(result['level'].to_s)
173
+ item['code'] = NA_STRING
174
+ item['results'] = [finding(result)]
175
+ item['tags'] = add_nist_tag_from_cwe(parse_identifiers(rule_info['rule_tags'], 'CWE'), 'nist', item['tags'])
176
+ controls << item
177
+ end
178
+ end
179
+
180
+ def to_hdf
181
+ controls = []
182
+ @sarif_log['runs'].each do |run|
183
+ run['results'].each do |result|
184
+ process_item(run, result, controls)
185
+ end
186
+ end
187
+
188
+ scaninfo = extract_scaninfo(@sarif_log)
189
+ results = HeimdallDataFormat.new(profile_name: scaninfo['policy'],
190
+ version: scaninfo['version'],
191
+ title: scaninfo['projectName'],
192
+ summary: scaninfo['summary'],
193
+ controls: controls,
194
+ target_id: scaninfo['projectName'])
195
+ results.to_hdf
196
+ end
197
+ end
198
+ end