heimdall_tools 1.3.28 → 1.3.33
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/README.md +36 -0
- data/lib/data/U_CCI_List.xml +38403 -0
- data/lib/data/cwe-nist-mapping.csv +8 -4
- data/lib/data/nikto-nist-mapping.csv +8942 -0
- data/lib/heimdall_tools.rb +2 -0
- data/lib/heimdall_tools/cli.rb +28 -1
- data/lib/heimdall_tools/help/nikto_mapper.md +7 -0
- data/lib/heimdall_tools/help/snyk_mapper.md +7 -0
- data/lib/heimdall_tools/nessus_mapper.rb +37 -14
- data/lib/heimdall_tools/nikto_mapper.rb +152 -0
- data/lib/heimdall_tools/snyk_mapper.rb +161 -0
- metadata +8 -4
- data/CHANGELOG.md +0 -261
- data/lib/data/gitkeep +0 -0
data/lib/heimdall_tools.rb
CHANGED
@@ -10,4 +10,6 @@ module HeimdallTools
|
|
10
10
|
autoload :SonarQubeMapper, 'heimdall_tools/sonarqube_mapper'
|
11
11
|
autoload :BurpSuiteMapper, 'heimdall_tools/burpsuite_mapper'
|
12
12
|
autoload :NessusMapper, 'heimdall_tools/nessus_mapper'
|
13
|
+
autoload :SnykMapper, 'heimdall_tools/snyk_mapper'
|
14
|
+
autoload :NiktoMapper, 'heimdall_tools/nikto_mapper'
|
13
15
|
end
|
data/lib/heimdall_tools/cli.rb
CHANGED
@@ -53,13 +53,40 @@ module HeimdallTools
|
|
53
53
|
def nessus_mapper
|
54
54
|
hdfs = HeimdallTools::NessusMapper.new(File.read(options[:xml])).to_hdf
|
55
55
|
|
56
|
+
puts "\nHDF Generated:"
|
56
57
|
hdfs.keys.each do | host |
|
57
58
|
File.write("#{options[:output_prefix]}-#{host}.json", hdfs[host])
|
58
|
-
puts "
|
59
|
+
puts "#{options[:output_prefix]}-#{host}.json"
|
59
60
|
end
|
60
61
|
|
61
62
|
end
|
62
63
|
|
64
|
+
desc 'snyk_mapper', 'snyk_mapper translates Snyk results Json to HDF format Json be viewed on Heimdall'
|
65
|
+
long_desc Help.text(:snyk_mapper)
|
66
|
+
option :json, required: true, aliases: '-j'
|
67
|
+
option :output_prefix, required: true, aliases: '-o'
|
68
|
+
option :verbose, type: :boolean, aliases: '-V'
|
69
|
+
def snyk_mapper
|
70
|
+
hdfs = HeimdallTools::SnykMapper.new(File.read(options[:json]), options[:name]).to_hdf
|
71
|
+
puts "\r\HDF Generated:\n"
|
72
|
+
hdfs.keys.each do | host |
|
73
|
+
File.write("#{options[:output_prefix]}-#{host}.json", hdfs[host])
|
74
|
+
puts "#{options[:output_prefix]}-#{host}.json"
|
75
|
+
end
|
76
|
+
end
|
77
|
+
|
78
|
+
desc 'nikto_mapper', 'nikto_mapper translates Nikto results Json to HDF format Json be viewed on Heimdall'
|
79
|
+
long_desc Help.text(:nikto_mapper)
|
80
|
+
option :json, required: true, aliases: '-j'
|
81
|
+
option :output, required: true, aliases: '-o'
|
82
|
+
option :verbose, type: :boolean, aliases: '-V'
|
83
|
+
def nikto_mapper
|
84
|
+
hdf = HeimdallTools::NiktoMapper.new(File.read(options[:json])).to_hdf
|
85
|
+
File.write(options[:output], hdf)
|
86
|
+
puts "\r\HDF Generated:\n"
|
87
|
+
puts "#{options[:output]}"
|
88
|
+
end
|
89
|
+
|
63
90
|
desc 'version', 'prints version'
|
64
91
|
def version
|
65
92
|
puts VERSION
|
@@ -0,0 +1,7 @@
|
|
1
|
+
nikto_mapper translates an Nikto results JSON file into HDF format JSON to be viewable in Heimdall
|
2
|
+
|
3
|
+
Note: Current this mapper only support single target Nikto Scans.
|
4
|
+
|
5
|
+
Examples:
|
6
|
+
|
7
|
+
heimdall_tools nikto_mapper [OPTIONS] -x <nikto-results-json> -o <hdf-scan-results.json>
|
@@ -2,10 +2,12 @@ require 'json'
|
|
2
2
|
require 'csv'
|
3
3
|
require 'heimdall_tools/hdf'
|
4
4
|
require 'utilities/xml_to_hash'
|
5
|
+
require 'nokogiri'
|
5
6
|
|
6
7
|
RESOURCE_DIR = Pathname.new(__FILE__).join('../../data')
|
7
8
|
|
8
9
|
NESSUS_PLUGINS_NIST_MAPPING_FILE = File.join(RESOURCE_DIR, 'nessus-plugins-nist-mapping.csv')
|
10
|
+
U_CCI_LIST = File.join(RESOURCE_DIR, 'U_CCI_List.xml')
|
9
11
|
|
10
12
|
IMPACT_MAPPING = {
|
11
13
|
Info: 0.0,
|
@@ -25,18 +27,25 @@ NA_PLUGIN_OUTPUT = "This Nessus Plugin does not provide output message.".freeze
|
|
25
27
|
|
26
28
|
# rubocop:disable Metrics/AbcSize
|
27
29
|
|
30
|
+
# Loading spinner sign
|
31
|
+
$spinner = Enumerator.new do |e|
|
32
|
+
loop do
|
33
|
+
e.yield '|'
|
34
|
+
e.yield '/'
|
35
|
+
e.yield '-'
|
36
|
+
e.yield '\\'
|
37
|
+
end
|
38
|
+
end
|
39
|
+
|
28
40
|
module HeimdallTools
|
29
41
|
class NessusMapper
|
30
42
|
def initialize(nessus_xml, verbose = false)
|
31
43
|
@nessus_xml = nessus_xml
|
32
44
|
@verbose = verbose
|
33
|
-
|
45
|
+
read_cci_xml
|
34
46
|
begin
|
35
47
|
@cwe_nist_mapping = parse_mapper
|
36
48
|
@data = xml_to_hash(nessus_xml)
|
37
|
-
|
38
|
-
File.write("273970.json", @data.to_json)
|
39
|
-
|
40
49
|
@reports = extract_report
|
41
50
|
@scaninfo = extract_scaninfo
|
42
51
|
rescue StandardError => e
|
@@ -56,6 +65,7 @@ module HeimdallTools
|
|
56
65
|
raise "Invalid Nessus XML file provided Exception: #{e}"
|
57
66
|
end
|
58
67
|
end
|
68
|
+
|
59
69
|
def parse_refs(refs, key)
|
60
70
|
refs.split(',').map { |x| x.split('|')[1] if x.include?(key) }.compact
|
61
71
|
end
|
@@ -110,7 +120,26 @@ module HeimdallTools
|
|
110
120
|
[finding]
|
111
121
|
end
|
112
122
|
|
113
|
-
def
|
123
|
+
def read_cci_xml
|
124
|
+
@cci_xml = Nokogiri::XML(File.open(U_CCI_LIST))
|
125
|
+
@cci_xml.remove_namespaces!
|
126
|
+
rescue StandardError => e
|
127
|
+
puts "Exception: #{e.message}"
|
128
|
+
end
|
129
|
+
|
130
|
+
def cci_nist_tag(cci_refs)
|
131
|
+
nist_tags = []
|
132
|
+
cci_refs.each do | cci_ref |
|
133
|
+
item_node = @cci_xml.xpath("//cci_list/cci_items/cci_item[@id='#{cci_ref}']")[0] unless @cci_xml.nil?
|
134
|
+
unless item_node.nil?
|
135
|
+
nist_ref = item_node.xpath('./references/reference[not(@version <= preceding-sibling::reference/@version) and not(@version <=following-sibling::reference/@version)]/@index').text
|
136
|
+
end
|
137
|
+
nist_tags << nist_ref
|
138
|
+
end
|
139
|
+
nist_tags
|
140
|
+
end
|
141
|
+
|
142
|
+
def plugin_nist_tag(pluginfamily, pluginid)
|
114
143
|
entries = @cwe_nist_mapping.select { |x| (x[:pluginfamily].eql?(pluginfamily) && (x[:pluginid].eql?('*') || x[:pluginid].eql?(pluginid.to_i)) ) }
|
115
144
|
tags = entries.map { |x| [x[:nistid].split('|'), "Rev_#{x[:rev]}"] }
|
116
145
|
tags.empty? ? DEFAULT_NIST_TAG : tags.flatten.uniq
|
@@ -166,6 +195,7 @@ module HeimdallTools
|
|
166
195
|
@reports.each do | report|
|
167
196
|
controls = []
|
168
197
|
report['ReportItem'].each do | item |
|
198
|
+
printf("\rProcessing: %s", $spinner.next)
|
169
199
|
@item = {}
|
170
200
|
@item['tags'] = {}
|
171
201
|
@item['descriptions'] = []
|
@@ -197,16 +227,9 @@ module HeimdallTools
|
|
197
227
|
@item['impact'] = impact(item['severity'])
|
198
228
|
end
|
199
229
|
if item['compliance-reference']
|
200
|
-
|
201
|
-
@item['tags']['nist'] = parse_refs(item['compliance-reference'],'800-53') << DEFAULT_NIST_REV
|
202
|
-
else
|
203
|
-
@item['tags']['nist'] = nist_tag(item['pluginFamily'],item['pluginID'])
|
204
|
-
end
|
205
|
-
if item['compliance-solution']
|
206
|
-
# TODO: Cover cases where 800-53 refs are not provided in nessus `compliance-reference` field
|
207
|
-
@item['tags']['nist'] = parse_refs(item['compliance-reference'],'800-53') << DEFAULT_NIST_REV
|
230
|
+
@item['tags']['nist'] = cci_nist_tag(parse_refs(item['compliance-reference'],'CCI'))
|
208
231
|
else
|
209
|
-
@item['tags']['nist'] =
|
232
|
+
@item['tags']['nist'] = plugin_nist_tag(item['pluginFamily'],item['pluginID'])
|
210
233
|
end
|
211
234
|
if item['compliance-solution']
|
212
235
|
@item['descriptions'] << desc_tags(item['compliance-solution'], 'check')
|
@@ -0,0 +1,152 @@
|
|
1
|
+
require 'json'
|
2
|
+
require 'csv'
|
3
|
+
require 'heimdall_tools/hdf'
|
4
|
+
|
5
|
+
RESOURCE_DIR = Pathname.new(__FILE__).join('../../data')
|
6
|
+
|
7
|
+
NIKTO_NIST_MAPPING_FILE = File.join(RESOURCE_DIR, 'nikto-nist-mapping.csv')
|
8
|
+
|
9
|
+
IMPACT_MAPPING = {
|
10
|
+
high: 0.7,
|
11
|
+
medium: 0.5,
|
12
|
+
low: 0.3,
|
13
|
+
}.freeze
|
14
|
+
|
15
|
+
DEFAULT_NIST_TAG = ["SA-11", "RA-5"].freeze
|
16
|
+
|
17
|
+
# Loading spinner sign
|
18
|
+
$spinner = Enumerator.new do |e|
|
19
|
+
loop do
|
20
|
+
e.yield '|'
|
21
|
+
e.yield '/'
|
22
|
+
e.yield '-'
|
23
|
+
e.yield '\\'
|
24
|
+
end
|
25
|
+
end
|
26
|
+
|
27
|
+
module HeimdallTools
|
28
|
+
class NiktoMapper
|
29
|
+
def initialize(nikto_json, name=nil, verbose = false)
|
30
|
+
@nikto_json = nikto_json
|
31
|
+
@verbose = verbose
|
32
|
+
|
33
|
+
begin
|
34
|
+
@nikto_nist_mapping = parse_mapper
|
35
|
+
rescue StandardError => e
|
36
|
+
raise "Invalid Nikto to NIST mapping file: Exception: #{e}"
|
37
|
+
end
|
38
|
+
|
39
|
+
# TODO: Support Multi-target scan results
|
40
|
+
# Nikto multi-target scans generate invalid format JSONs
|
41
|
+
# Possible workaround to use https://stackoverflow.com/a/58209963/1670307
|
42
|
+
|
43
|
+
begin
|
44
|
+
@project = JSON.parse(nikto_json)
|
45
|
+
rescue StandardError => e
|
46
|
+
raise "Invalid Nikto JSON file provided Exception: #{e}"
|
47
|
+
end
|
48
|
+
end
|
49
|
+
|
50
|
+
def extract_scaninfo(project)
|
51
|
+
info = {}
|
52
|
+
begin
|
53
|
+
info['policy'] = 'Nikto Website Scanner'
|
54
|
+
info['version'] = NA_STRING
|
55
|
+
info['projectName'] = "Host: #{project['host']} Port: #{project['port']}"
|
56
|
+
info['summary'] = "Banner: #{project['banner']}"
|
57
|
+
|
58
|
+
info
|
59
|
+
rescue StandardError => e
|
60
|
+
raise "Error extracting project info from nikto JSON file provided Exception: #{e}"
|
61
|
+
end
|
62
|
+
end
|
63
|
+
|
64
|
+
def finding(vulnerability)
|
65
|
+
finding = {}
|
66
|
+
finding['status'] = 'failed'
|
67
|
+
finding['code_desc'] = "URL : #{vulnerability['url'].to_s } Method: #{vulnerability['method'].to_s}"
|
68
|
+
finding['run_time'] = NA_FLOAT
|
69
|
+
|
70
|
+
# Nikto results JSON does not profile scan timestamp; using current time to satisfy HDF format
|
71
|
+
finding['start_time'] = NA_STRING
|
72
|
+
[finding]
|
73
|
+
end
|
74
|
+
|
75
|
+
def nist_tag(niktoid)
|
76
|
+
entries = @nikto_nist_mapping.select { |x| niktoid.eql?(x[:niktoid].to_s) }
|
77
|
+
tags = entries.map { |x| x[:nistid] }
|
78
|
+
tags.empty? ? DEFAULT_NIST_TAG : tags.flatten.uniq
|
79
|
+
end
|
80
|
+
|
81
|
+
def impact(severity)
|
82
|
+
IMPACT_MAPPING[severity.to_sym]
|
83
|
+
end
|
84
|
+
|
85
|
+
def parse_mapper
|
86
|
+
csv_data = CSV.read(NIKTO_NIST_MAPPING_FILE, **{ encoding: 'UTF-8',
|
87
|
+
headers: true,
|
88
|
+
header_converters: :symbol})
|
89
|
+
csv_data.map(&:to_hash)
|
90
|
+
end
|
91
|
+
|
92
|
+
def desc_tags(data, label)
|
93
|
+
{ "data": data || NA_STRING, "label": label || NA_STRING }
|
94
|
+
end
|
95
|
+
|
96
|
+
# Nikto report could have multiple vulnerability entries for multiple findings of same issue type.
|
97
|
+
# The meta data is identical across entries
|
98
|
+
# method collapse_duplicates return unique controls with applicable findings collapsed into it.
|
99
|
+
def collapse_duplicates(controls)
|
100
|
+
unique_controls = []
|
101
|
+
|
102
|
+
controls.map { |x| x['id'] }.uniq.each do |id|
|
103
|
+
collapsed_results = controls.select { |x| x['id'].eql?(id) }.map {|x| x['results']}
|
104
|
+
unique_control = controls.find { |x| x['id'].eql?(id) }
|
105
|
+
unique_control['results'] = collapsed_results.flatten
|
106
|
+
unique_controls << unique_control
|
107
|
+
end
|
108
|
+
unique_controls
|
109
|
+
end
|
110
|
+
|
111
|
+
|
112
|
+
def to_hdf
|
113
|
+
controls = []
|
114
|
+
@project['vulnerabilities'].each do | vulnerability |
|
115
|
+
printf("\rProcessing: %s", $spinner.next)
|
116
|
+
|
117
|
+
item = {}
|
118
|
+
item['tags'] = {}
|
119
|
+
item['descriptions'] = []
|
120
|
+
item['refs'] = NA_ARRAY
|
121
|
+
item['source_location'] = NA_HASH
|
122
|
+
item['descriptions'] = NA_ARRAY
|
123
|
+
|
124
|
+
item['title'] = vulnerability['msg'].to_s
|
125
|
+
item['id'] = vulnerability['id'].to_s
|
126
|
+
|
127
|
+
# Nikto results JSON does not description fields
|
128
|
+
# Duplicating vulnerability msg field
|
129
|
+
item['desc'] = vulnerability['msg'].to_s
|
130
|
+
|
131
|
+
# Nitko does not provide finding severity; hard-coding severity to medium
|
132
|
+
item['impact'] = impact('medium')
|
133
|
+
item['code'] = NA_STRING
|
134
|
+
item['results'] = finding(vulnerability)
|
135
|
+
item['tags']['nist'] = nist_tag( vulnerability['id'].to_s )
|
136
|
+
item['tags']['ösvdb'] = vulnerability['OSVDB']
|
137
|
+
|
138
|
+
controls << item
|
139
|
+
end
|
140
|
+
|
141
|
+
controls = collapse_duplicates(controls)
|
142
|
+
scaninfo = extract_scaninfo(@project)
|
143
|
+
results = HeimdallDataFormat.new(profile_name: scaninfo['policy'],
|
144
|
+
version: scaninfo['version'],
|
145
|
+
title: "Nikto Target: #{scaninfo['projectName']}",
|
146
|
+
summary: "Banner: #{scaninfo['summary']}",
|
147
|
+
controls: controls,
|
148
|
+
target_id: scaninfo['projectName'])
|
149
|
+
results.to_hdf
|
150
|
+
end
|
151
|
+
end
|
152
|
+
end
|
@@ -0,0 +1,161 @@
|
|
1
|
+
require 'json'
|
2
|
+
require 'csv'
|
3
|
+
require 'heimdall_tools/hdf'
|
4
|
+
require 'utilities/xml_to_hash'
|
5
|
+
|
6
|
+
RESOURCE_DIR = Pathname.new(__FILE__).join('../../data')
|
7
|
+
|
8
|
+
CWE_NIST_MAPPING_FILE = File.join(RESOURCE_DIR, 'cwe-nist-mapping.csv')
|
9
|
+
|
10
|
+
IMPACT_MAPPING = {
|
11
|
+
high: 0.7,
|
12
|
+
medium: 0.5,
|
13
|
+
low: 0.3,
|
14
|
+
}.freeze
|
15
|
+
|
16
|
+
SNYK_VERSION_REGEX = 'v(\d+.)(\d+.)(\d+)'.freeze
|
17
|
+
|
18
|
+
DEFAULT_NIST_TAG = ["SA-11", "RA-5"].freeze
|
19
|
+
|
20
|
+
# Loading spinner sign
|
21
|
+
$spinner = Enumerator.new do |e|
|
22
|
+
loop do
|
23
|
+
e.yield '|'
|
24
|
+
e.yield '/'
|
25
|
+
e.yield '-'
|
26
|
+
e.yield '\\'
|
27
|
+
end
|
28
|
+
end
|
29
|
+
|
30
|
+
module HeimdallTools
|
31
|
+
class SnykMapper
|
32
|
+
def initialize(synk_json, name=nil, verbose = false)
|
33
|
+
@synk_json = synk_json
|
34
|
+
@verbose = verbose
|
35
|
+
|
36
|
+
begin
|
37
|
+
@cwe_nist_mapping = parse_mapper
|
38
|
+
@projects = JSON.parse(synk_json)
|
39
|
+
|
40
|
+
# Cover single and multi-project scan use cases.
|
41
|
+
unless @projects.kind_of?(Array)
|
42
|
+
@projects = [ @projects ]
|
43
|
+
end
|
44
|
+
|
45
|
+
rescue StandardError => e
|
46
|
+
raise "Invalid Snyk JSON file provided Exception: #{e}"
|
47
|
+
end
|
48
|
+
end
|
49
|
+
|
50
|
+
def extract_scaninfo(project)
|
51
|
+
info = {}
|
52
|
+
begin
|
53
|
+
info['policy'] = project['policy']
|
54
|
+
reg = Regexp.new(SNYK_VERSION_REGEX, Regexp::IGNORECASE)
|
55
|
+
info['version'] = info['policy'].scan(reg).join
|
56
|
+
info['projectName'] = project['projectName']
|
57
|
+
info['summary'] = project['summary']
|
58
|
+
|
59
|
+
info
|
60
|
+
rescue StandardError => e
|
61
|
+
raise "Error extracting project info from Synk JSON file provided Exception: #{e}"
|
62
|
+
end
|
63
|
+
end
|
64
|
+
|
65
|
+
def finding(vulnerability)
|
66
|
+
finding = {}
|
67
|
+
finding['status'] = 'failed'
|
68
|
+
finding['code_desc'] = "From : [ #{vulnerability['from'].join(" , ").to_s } ]"
|
69
|
+
finding['run_time'] = NA_FLOAT
|
70
|
+
|
71
|
+
# Snyk results does not profile scan timestamp; using current time to satisfy HDF format
|
72
|
+
finding['start_time'] = NA_STRING
|
73
|
+
[finding]
|
74
|
+
end
|
75
|
+
|
76
|
+
def nist_tag(cweid)
|
77
|
+
entries = @cwe_nist_mapping.select { |x| cweid.include? x[:cweid].to_s }
|
78
|
+
tags = entries.map { |x| x[:nistid] }
|
79
|
+
tags.empty? ? DEFAULT_NIST_TAG : tags.flatten.uniq
|
80
|
+
end
|
81
|
+
|
82
|
+
def parse_identifiers(vulnerability, ref)
|
83
|
+
# Extracting id number from reference style CWE-297
|
84
|
+
vulnerability['identifiers'][ref].map { |e| e.split("#{ref}-")[1] }
|
85
|
+
rescue
|
86
|
+
return []
|
87
|
+
end
|
88
|
+
|
89
|
+
def impact(severity)
|
90
|
+
IMPACT_MAPPING[severity.to_sym]
|
91
|
+
end
|
92
|
+
|
93
|
+
def parse_mapper
|
94
|
+
csv_data = CSV.read(CWE_NIST_MAPPING_FILE, **{ encoding: 'UTF-8',
|
95
|
+
headers: true,
|
96
|
+
header_converters: :symbol,
|
97
|
+
converters: :all })
|
98
|
+
csv_data.map(&:to_hash)
|
99
|
+
end
|
100
|
+
|
101
|
+
def desc_tags(data, label)
|
102
|
+
{ "data": data || NA_STRING, "label": label || NA_STRING }
|
103
|
+
end
|
104
|
+
|
105
|
+
# Snyk report could have multiple vulnerability entries for multiple findings of same issue type.
|
106
|
+
# The meta data is identical across entries
|
107
|
+
# method collapse_duplicates return unique controls with applicable findings collapsed into it.
|
108
|
+
def collapse_duplicates(controls)
|
109
|
+
unique_controls = []
|
110
|
+
|
111
|
+
controls.map { |x| x['id'] }.uniq.each do |id|
|
112
|
+
collapsed_results = controls.select { |x| x['id'].eql?(id) }.map {|x| x['results']}
|
113
|
+
unique_control = controls.find { |x| x['id'].eql?(id) }
|
114
|
+
unique_control['results'] = collapsed_results.flatten
|
115
|
+
unique_controls << unique_control
|
116
|
+
end
|
117
|
+
unique_controls
|
118
|
+
end
|
119
|
+
|
120
|
+
|
121
|
+
def to_hdf
|
122
|
+
project_results = {}
|
123
|
+
@projects.each do | project |
|
124
|
+
controls = []
|
125
|
+
project['vulnerabilities'].each do | vulnerability |
|
126
|
+
printf("\rProcessing: %s", $spinner.next)
|
127
|
+
|
128
|
+
item = {}
|
129
|
+
item['tags'] = {}
|
130
|
+
item['descriptions'] = []
|
131
|
+
item['refs'] = NA_ARRAY
|
132
|
+
item['source_location'] = NA_HASH
|
133
|
+
item['descriptions'] = NA_ARRAY
|
134
|
+
|
135
|
+
item['title'] = vulnerability['title'].to_s
|
136
|
+
item['id'] = vulnerability['id'].to_s
|
137
|
+
item['desc'] = vulnerability['description'].to_s
|
138
|
+
item['impact'] = impact(vulnerability['severity'])
|
139
|
+
item['code'] = ''
|
140
|
+
item['results'] = finding(vulnerability)
|
141
|
+
item['tags']['nist'] = nist_tag( parse_identifiers( vulnerability, 'CWE') )
|
142
|
+
item['tags']['cweid'] = parse_identifiers( vulnerability, 'CWE')
|
143
|
+
item['tags']['cveid'] = parse_identifiers( vulnerability, 'CVE')
|
144
|
+
item['tags']['ghsaid'] = parse_identifiers( vulnerability, 'GHSA')
|
145
|
+
|
146
|
+
controls << item
|
147
|
+
end
|
148
|
+
controls = collapse_duplicates(controls)
|
149
|
+
scaninfo = extract_scaninfo(project)
|
150
|
+
results = HeimdallDataFormat.new(profile_name: scaninfo['policy'],
|
151
|
+
version: scaninfo['version'],
|
152
|
+
title: "Snyk Project: #{scaninfo['projectName']}",
|
153
|
+
summary: "Snyk Summary: #{scaninfo['summary']}",
|
154
|
+
controls: controls,
|
155
|
+
target_id: scaninfo['projectName'])
|
156
|
+
project_results[scaninfo['projectName']] = results.to_hdf
|
157
|
+
end
|
158
|
+
project_results
|
159
|
+
end
|
160
|
+
end
|
161
|
+
end
|