heimdall_tools 1.3.39.pre1 → 1.3.43
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/README.md +18 -0
- data/lib/data/aws-config-mapping.csv +107 -107
- data/lib/heimdall_tools.rb +1 -0
- data/lib/heimdall_tools/aws_config_mapper.rb +55 -36
- data/lib/heimdall_tools/burpsuite_mapper.rb +7 -11
- data/lib/heimdall_tools/cli.rb +19 -8
- data/lib/heimdall_tools/command.rb +0 -2
- data/lib/heimdall_tools/dbprotect_mapper.rb +9 -18
- data/lib/heimdall_tools/fortify_mapper.rb +1 -2
- data/lib/heimdall_tools/hdf.rb +4 -5
- data/lib/heimdall_tools/help/netsparker_mapper.md +7 -0
- data/lib/heimdall_tools/jfrog_xray_mapper.rb +33 -26
- data/lib/heimdall_tools/nessus_mapper.rb +39 -46
- data/lib/heimdall_tools/netsparker_mapper.rb +164 -0
- data/lib/heimdall_tools/nikto_mapper.rb +27 -27
- data/lib/heimdall_tools/snyk_mapper.rb +20 -22
- data/lib/heimdall_tools/sonarqube_mapper.rb +23 -21
- data/lib/heimdall_tools/zap_mapper.rb +3 -4
- data/lib/utilities/xml_to_hash.rb +6 -6
- metadata +43 -27
@@ -0,0 +1,164 @@
|
|
1
|
+
require 'json'
|
2
|
+
require 'csv'
|
3
|
+
require 'heimdall_tools/hdf'
|
4
|
+
require 'utilities/xml_to_hash'
|
5
|
+
|
6
|
+
RESOURCE_DIR = Pathname.new(__FILE__).join('../../data')
|
7
|
+
|
8
|
+
CWE_NIST_MAPPING_FILE = File.join(RESOURCE_DIR, 'cwe-nist-mapping.csv')
|
9
|
+
OWASP_NIST_MAPPING_FILE = File.join(RESOURCE_DIR, 'owasp-nist-mapping.csv')
|
10
|
+
|
11
|
+
IMPACT_MAPPING = {
|
12
|
+
Critical: 1.0,
|
13
|
+
High: 0.7,
|
14
|
+
Medium: 0.5,
|
15
|
+
Low: 0.3,
|
16
|
+
Best_Practice: 0.0,
|
17
|
+
Information: 0.0
|
18
|
+
}.freeze
|
19
|
+
|
20
|
+
DEFAULT_NIST_TAG = %w{SA-11 RA-5}.freeze
|
21
|
+
|
22
|
+
module HeimdallTools
|
23
|
+
class NetsparkerMapper
|
24
|
+
def initialize(xml, _name = nil, verbose = false)
|
25
|
+
@verbose = verbose
|
26
|
+
|
27
|
+
begin
|
28
|
+
@cwe_nist_mapping = parse_mapper(CWE_NIST_MAPPING_FILE)
|
29
|
+
@owasp_nist_mapping = parse_mapper(OWASP_NIST_MAPPING_FILE)
|
30
|
+
data = xml_to_hash(xml)
|
31
|
+
|
32
|
+
@vulnerabilities = data['netsparker-enterprise']['vulnerabilities']['vulnerability']
|
33
|
+
@scan_info = data['netsparker-enterprise']['target']
|
34
|
+
rescue StandardError => e
|
35
|
+
raise "Invalid Netsparker XML file provided Exception: #{e}"
|
36
|
+
end
|
37
|
+
end
|
38
|
+
|
39
|
+
def to_hdf
|
40
|
+
controls = []
|
41
|
+
@vulnerabilities.each do |vulnerability|
|
42
|
+
@item = {}
|
43
|
+
@item['id'] = vulnerability['LookupId'].to_s
|
44
|
+
@item['title'] = vulnerability['name'].to_s
|
45
|
+
@item['desc'] = format_control_desc(vulnerability)
|
46
|
+
@item['impact'] = impact(vulnerability['severity'])
|
47
|
+
@item['tags'] = {}
|
48
|
+
@item['descriptions'] = []
|
49
|
+
|
50
|
+
@item['descriptions'] << desc_tags(format_check_text(vulnerability), 'check')
|
51
|
+
@item['descriptions'] << desc_tags(format_fix_text(vulnerability), 'fix')
|
52
|
+
@item['refs'] = NA_ARRAY
|
53
|
+
@item['source_location'] = NA_HASH
|
54
|
+
@item['tags']['nist'] = nist_tag(vulnerability['classification'])
|
55
|
+
@item['code'] = ''
|
56
|
+
@item['results'] = finding(vulnerability)
|
57
|
+
|
58
|
+
controls << @item
|
59
|
+
end
|
60
|
+
controls = collapse_duplicates(controls)
|
61
|
+
results = HeimdallDataFormat.new(profile_name: 'Netsparker Enterprise Scan',
|
62
|
+
title: "Netsparker Enterprise Scan ID: #{@scan_info['scan-id']} URL: #{@scan_info['url']}",
|
63
|
+
summary: 'Netsparker Enterprise Scan',
|
64
|
+
target_id: @scan_info['url'],
|
65
|
+
controls: controls)
|
66
|
+
results.to_hdf
|
67
|
+
end
|
68
|
+
|
69
|
+
private
|
70
|
+
|
71
|
+
def parse_html(block)
|
72
|
+
block['#cdata-section'].to_s.strip unless block.nil?
|
73
|
+
end
|
74
|
+
|
75
|
+
def finding(vulnerability)
|
76
|
+
finding = {}
|
77
|
+
finding['status'] = 'failed'
|
78
|
+
finding['code_desc'] = []
|
79
|
+
finding['code_desc'] << "http-request : #{parse_html(vulnerability['http-request']['content'])}"
|
80
|
+
finding['code_desc'] << "method : #{vulnerability['http-request']['method']}"
|
81
|
+
finding['code_desc'] = finding['code_desc'].join("\n")
|
82
|
+
|
83
|
+
finding['message'] = []
|
84
|
+
finding['message'] << "http-response : #{parse_html(vulnerability['http-response']['content'])}"
|
85
|
+
finding['message'] << "duration : #{vulnerability['http-response']['duration']}"
|
86
|
+
finding['message'] << "status-code : #{vulnerability['http-response']['status-code']}"
|
87
|
+
finding['message'] = finding['message'].join("\n")
|
88
|
+
finding['run_time'] = NA_FLOAT
|
89
|
+
|
90
|
+
finding['start_time'] = @scan_info['initiated']
|
91
|
+
[finding]
|
92
|
+
end
|
93
|
+
|
94
|
+
def format_control_desc(vulnerability)
|
95
|
+
text = []
|
96
|
+
text << parse_html(vulnerability['description']).to_s unless vulnerability['description'].nil?
|
97
|
+
text << "Exploitation-skills: #{parse_html(vulnerability['exploitation-skills'])}" unless vulnerability['exploitation-skills'].nil?
|
98
|
+
text << "Extra-information: #{vulnerability['extra-information']}" unless vulnerability['extra-information'].nil?
|
99
|
+
text << "Classification: #{vulnerability['classification']}" unless vulnerability['classification'].nil?
|
100
|
+
text << "Impact: #{parse_html(vulnerability['impact'])}" unless vulnerability['impact'].nil?
|
101
|
+
text << "FirstSeenDate: #{vulnerability['FirstSeenDate']}" unless vulnerability['FirstSeenDate'].nil?
|
102
|
+
text << "LastSeenDate: #{vulnerability['LastSeenDate']}" unless vulnerability['LastSeenDate'].nil?
|
103
|
+
text << "Certainty: #{vulnerability['certainty']}" unless vulnerability['certainty'].nil?
|
104
|
+
text << "Type: #{vulnerability['type']}" unless vulnerability['type'].nil?
|
105
|
+
text << "Confirmed: #{vulnerability['confirmed']}" unless vulnerability['confirmed'].nil?
|
106
|
+
text.join('<br>')
|
107
|
+
end
|
108
|
+
|
109
|
+
def format_check_text(vulnerability)
|
110
|
+
text = []
|
111
|
+
text << "Exploitation-skills: #{parse_html(vulnerability['exploitation-skills'])}" unless vulnerability['exploitation-skills'].nil?
|
112
|
+
text << "Proof-of-concept: #{parse_html(vulnerability['proof-of-concept'])}" unless vulnerability['proof-of-concept'].nil?
|
113
|
+
text.join('<br>')
|
114
|
+
end
|
115
|
+
|
116
|
+
def format_fix_text(vulnerability)
|
117
|
+
text = []
|
118
|
+
text << "Remedial-actions: #{parse_html(vulnerability['remedial-actions'])}" unless vulnerability['remedial-actions'].nil?
|
119
|
+
text << "Remedial-procedure: #{parse_html(vulnerability['remedial-procedure'])}" unless vulnerability['remedial-procedure'].nil?
|
120
|
+
text << "Remedy-references: #{parse_html(vulnerability['remedy-references'])}" unless vulnerability['remedy-references'].nil?
|
121
|
+
text.join('<br>')
|
122
|
+
end
|
123
|
+
|
124
|
+
def nist_tag(classification)
|
125
|
+
tags = []
|
126
|
+
entries = @cwe_nist_mapping.select { |x| classification['cwe'].include?(x[:cweid].to_s) && !x[:nistid].nil? }
|
127
|
+
tags << entries.map { |x| x[:nistid] }
|
128
|
+
entries = @owasp_nist_mapping.select { |x| classification['owasp'].include?(x[:owaspid].to_s) && !x[:nistid].nil? }
|
129
|
+
tags << entries.map { |x| x[:nistid] }
|
130
|
+
tags.flatten.empty? ? DEFAULT_NIST_TAG : tags.flatten.uniq
|
131
|
+
end
|
132
|
+
|
133
|
+
def impact(severity)
|
134
|
+
IMPACT_MAPPING[severity.to_sym]
|
135
|
+
end
|
136
|
+
|
137
|
+
def parse_mapper(mapping_file)
|
138
|
+
csv_data = CSV.read(mapping_file, { encoding: 'UTF-8',
|
139
|
+
headers: true,
|
140
|
+
header_converters: :symbol,
|
141
|
+
converters: :all })
|
142
|
+
csv_data.map(&:to_hash)
|
143
|
+
end
|
144
|
+
|
145
|
+
def desc_tags(data, label)
|
146
|
+
{ data: data || NA_STRING, label: label || NA_STRING }
|
147
|
+
end
|
148
|
+
|
149
|
+
# Netsparker report could have multiple issue entries for multiple findings of same issue type.
|
150
|
+
# The meta data is identical across entries
|
151
|
+
# method collapse_duplicates return unique controls with applicable findings collapsed into it.
|
152
|
+
def collapse_duplicates(controls)
|
153
|
+
unique_controls = []
|
154
|
+
|
155
|
+
controls.map { |x| x['id'] }.uniq.each do |id|
|
156
|
+
collapsed_results = controls.select { |x| x['id'].eql?(id) }.map { |x| x['results'] }
|
157
|
+
unique_control = controls.find { |x| x['id'].eql?(id) }
|
158
|
+
unique_control['results'] = collapsed_results.flatten
|
159
|
+
unique_controls << unique_control
|
160
|
+
end
|
161
|
+
unique_controls
|
162
|
+
end
|
163
|
+
end
|
164
|
+
end
|
@@ -9,10 +9,10 @@ NIKTO_NIST_MAPPING_FILE = File.join(RESOURCE_DIR, 'nikto-nist-mapping.csv')
|
|
9
9
|
IMPACT_MAPPING = {
|
10
10
|
high: 0.7,
|
11
11
|
medium: 0.5,
|
12
|
-
low: 0.3
|
12
|
+
low: 0.3
|
13
13
|
}.freeze
|
14
14
|
|
15
|
-
DEFAULT_NIST_TAG =
|
15
|
+
DEFAULT_NIST_TAG = %w{SA-11 RA-5}.freeze
|
16
16
|
|
17
17
|
# Loading spinner sign
|
18
18
|
$spinner = Enumerator.new do |e|
|
@@ -26,7 +26,7 @@ end
|
|
26
26
|
|
27
27
|
module HeimdallTools
|
28
28
|
class NiktoMapper
|
29
|
-
def initialize(nikto_json,
|
29
|
+
def initialize(nikto_json, _name = nil, verbose = false)
|
30
30
|
@nikto_json = nikto_json
|
31
31
|
@verbose = verbose
|
32
32
|
|
@@ -36,9 +36,9 @@ module HeimdallTools
|
|
36
36
|
raise "Invalid Nikto to NIST mapping file: Exception: #{e}"
|
37
37
|
end
|
38
38
|
|
39
|
-
|
40
|
-
|
41
|
-
|
39
|
+
# TODO: Support Multi-target scan results
|
40
|
+
# Nikto multi-target scans generate invalid format JSONs
|
41
|
+
# Possible workaround to use https://stackoverflow.com/a/58209963/1670307
|
42
42
|
|
43
43
|
begin
|
44
44
|
@project = JSON.parse(nikto_json)
|
@@ -64,7 +64,7 @@ module HeimdallTools
|
|
64
64
|
def finding(vulnerability)
|
65
65
|
finding = {}
|
66
66
|
finding['status'] = 'failed'
|
67
|
-
finding['code_desc'] = "URL : #{vulnerability['url']
|
67
|
+
finding['code_desc'] = "URL : #{vulnerability['url']} Method: #{vulnerability['method']}"
|
68
68
|
finding['run_time'] = NA_FLOAT
|
69
69
|
finding['start_time'] = NA_STRING
|
70
70
|
[finding]
|
@@ -83,32 +83,32 @@ module HeimdallTools
|
|
83
83
|
def parse_mapper
|
84
84
|
csv_data = CSV.read(NIKTO_NIST_MAPPING_FILE, **{ encoding: 'UTF-8',
|
85
85
|
headers: true,
|
86
|
-
header_converters: :symbol})
|
86
|
+
header_converters: :symbol })
|
87
87
|
csv_data.map(&:to_hash)
|
88
88
|
end
|
89
89
|
|
90
90
|
def desc_tags(data, label)
|
91
|
-
{
|
91
|
+
{ data: data || NA_STRING, label: label || NA_STRING }
|
92
92
|
end
|
93
93
|
|
94
|
-
# Nikto report could have multiple vulnerability entries for multiple findings of same issue type.
|
95
|
-
# The meta data is identical across entries
|
96
|
-
# method collapse_duplicates return unique controls with applicable findings collapsed into it.
|
97
|
-
def collapse_duplicates(controls)
|
98
|
-
unique_controls = []
|
99
|
-
|
100
|
-
controls.map { |x| x['id'] }.uniq.each do |id|
|
101
|
-
collapsed_results = controls.select { |x| x['id'].eql?(id) }.map {|x| x['results']}
|
102
|
-
unique_control = controls.find { |x| x['id'].eql?(id) }
|
103
|
-
unique_control['results'] = collapsed_results.flatten
|
104
|
-
unique_controls << unique_control
|
105
|
-
end
|
106
|
-
unique_controls
|
107
|
-
end
|
94
|
+
# Nikto report could have multiple vulnerability entries for multiple findings of same issue type.
|
95
|
+
# The meta data is identical across entries
|
96
|
+
# method collapse_duplicates return unique controls with applicable findings collapsed into it.
|
97
|
+
def collapse_duplicates(controls)
|
98
|
+
unique_controls = []
|
99
|
+
|
100
|
+
controls.map { |x| x['id'] }.uniq.each do |id|
|
101
|
+
collapsed_results = controls.select { |x| x['id'].eql?(id) }.map { |x| x['results'] }
|
102
|
+
unique_control = controls.find { |x| x['id'].eql?(id) }
|
103
|
+
unique_control['results'] = collapsed_results.flatten
|
104
|
+
unique_controls << unique_control
|
105
|
+
end
|
106
|
+
unique_controls
|
107
|
+
end
|
108
108
|
|
109
109
|
def to_hdf
|
110
110
|
controls = []
|
111
|
-
@project['vulnerabilities'].each do |
|
111
|
+
@project['vulnerabilities'].each do |vulnerability|
|
112
112
|
printf("\rProcessing: %s", $spinner.next)
|
113
113
|
|
114
114
|
item = {}
|
@@ -125,11 +125,11 @@ module HeimdallTools
|
|
125
125
|
# Duplicating vulnerability msg field
|
126
126
|
item['desc'] = vulnerability['msg'].to_s
|
127
127
|
|
128
|
-
# Nitko does not provide finding severity; hard-coding severity to medium
|
129
|
-
item['impact'] = impact('medium')
|
128
|
+
# Nitko does not provide finding severity; hard-coding severity to medium
|
129
|
+
item['impact'] = impact('medium')
|
130
130
|
item['code'] = NA_STRING
|
131
131
|
item['results'] = finding(vulnerability)
|
132
|
-
item['tags']['nist'] = nist_tag(
|
132
|
+
item['tags']['nist'] = nist_tag(vulnerability['id'].to_s)
|
133
133
|
item['tags']['ösvdb'] = vulnerability['OSVDB']
|
134
134
|
|
135
135
|
controls << item
|
@@ -10,12 +10,12 @@ CWE_NIST_MAPPING_FILE = File.join(RESOURCE_DIR, 'cwe-nist-mapping.csv')
|
|
10
10
|
IMPACT_MAPPING = {
|
11
11
|
high: 0.7,
|
12
12
|
medium: 0.5,
|
13
|
-
low: 0.3
|
13
|
+
low: 0.3
|
14
14
|
}.freeze
|
15
15
|
|
16
16
|
SNYK_VERSION_REGEX = 'v(\d+.)(\d+.)(\d+)'.freeze
|
17
17
|
|
18
|
-
DEFAULT_NIST_TAG =
|
18
|
+
DEFAULT_NIST_TAG = %w{SA-11 RA-5}.freeze
|
19
19
|
|
20
20
|
# Loading spinner sign
|
21
21
|
$spinner = Enumerator.new do |e|
|
@@ -29,7 +29,7 @@ end
|
|
29
29
|
|
30
30
|
module HeimdallTools
|
31
31
|
class SnykMapper
|
32
|
-
def initialize(synk_json,
|
32
|
+
def initialize(synk_json, _name = nil, verbose = false)
|
33
33
|
@synk_json = synk_json
|
34
34
|
@verbose = verbose
|
35
35
|
|
@@ -38,10 +38,9 @@ module HeimdallTools
|
|
38
38
|
@projects = JSON.parse(synk_json)
|
39
39
|
|
40
40
|
# Cover single and multi-project scan use cases.
|
41
|
-
unless @projects.
|
42
|
-
@projects = [
|
41
|
+
unless @projects.is_a?(Array)
|
42
|
+
@projects = [@projects]
|
43
43
|
end
|
44
|
-
|
45
44
|
rescue StandardError => e
|
46
45
|
raise "Invalid Snyk JSON file provided Exception: #{e}"
|
47
46
|
end
|
@@ -52,7 +51,7 @@ module HeimdallTools
|
|
52
51
|
begin
|
53
52
|
info['policy'] = project['policy']
|
54
53
|
reg = Regexp.new(SNYK_VERSION_REGEX, Regexp::IGNORECASE)
|
55
|
-
info['version'] = info['policy'].scan(reg).join
|
54
|
+
info['version'] = info['policy'].scan(reg).join
|
56
55
|
info['projectName'] = project['projectName']
|
57
56
|
info['summary'] = project['summary']
|
58
57
|
|
@@ -65,7 +64,7 @@ module HeimdallTools
|
|
65
64
|
def finding(vulnerability)
|
66
65
|
finding = {}
|
67
66
|
finding['status'] = 'failed'
|
68
|
-
finding['code_desc'] = "From : [ #{vulnerability['from'].join(
|
67
|
+
finding['code_desc'] = "From : [ #{vulnerability['from'].join(' , ')} ]"
|
69
68
|
finding['run_time'] = NA_FLOAT
|
70
69
|
|
71
70
|
# Snyk results does not profile scan timestamp; using current time to satisfy HDF format
|
@@ -81,9 +80,9 @@ module HeimdallTools
|
|
81
80
|
|
82
81
|
def parse_identifiers(vulnerability, ref)
|
83
82
|
# Extracting id number from reference style CWE-297
|
84
|
-
vulnerability['identifiers'][ref].map { |e| e.split("#{ref}-")[1]
|
85
|
-
|
86
|
-
|
83
|
+
vulnerability['identifiers'][ref].map { |e| e.split("#{ref}-")[1] }
|
84
|
+
rescue StandardError
|
85
|
+
[]
|
87
86
|
end
|
88
87
|
|
89
88
|
def impact(severity)
|
@@ -99,17 +98,17 @@ module HeimdallTools
|
|
99
98
|
end
|
100
99
|
|
101
100
|
def desc_tags(data, label)
|
102
|
-
{
|
101
|
+
{ data: data || NA_STRING, label: label || NA_STRING }
|
103
102
|
end
|
104
103
|
|
105
104
|
# Snyk report could have multiple vulnerability entries for multiple findings of same issue type.
|
106
|
-
# The meta data is identical across entries
|
105
|
+
# The meta data is identical across entries
|
107
106
|
# method collapse_duplicates return unique controls with applicable findings collapsed into it.
|
108
107
|
def collapse_duplicates(controls)
|
109
108
|
unique_controls = []
|
110
109
|
|
111
110
|
controls.map { |x| x['id'] }.uniq.each do |id|
|
112
|
-
collapsed_results = controls.select { |x| x['id'].eql?(id) }.map {|x| x['results']}
|
111
|
+
collapsed_results = controls.select { |x| x['id'].eql?(id) }.map { |x| x['results'] }
|
113
112
|
unique_control = controls.find { |x| x['id'].eql?(id) }
|
114
113
|
unique_control['results'] = collapsed_results.flatten
|
115
114
|
unique_controls << unique_control
|
@@ -117,12 +116,11 @@ module HeimdallTools
|
|
117
116
|
unique_controls
|
118
117
|
end
|
119
118
|
|
120
|
-
|
121
119
|
def to_hdf
|
122
120
|
project_results = {}
|
123
|
-
@projects.each do |
|
121
|
+
@projects.each do |project|
|
124
122
|
controls = []
|
125
|
-
project['vulnerabilities'].each do |
|
123
|
+
project['vulnerabilities'].each do |vulnerability|
|
126
124
|
printf("\rProcessing: %s", $spinner.next)
|
127
125
|
|
128
126
|
item = {}
|
@@ -135,13 +133,13 @@ module HeimdallTools
|
|
135
133
|
item['title'] = vulnerability['title'].to_s
|
136
134
|
item['id'] = vulnerability['id'].to_s
|
137
135
|
item['desc'] = vulnerability['description'].to_s
|
138
|
-
item['impact'] = impact(vulnerability['severity'])
|
136
|
+
item['impact'] = impact(vulnerability['severity'])
|
139
137
|
item['code'] = ''
|
140
138
|
item['results'] = finding(vulnerability)
|
141
|
-
item['tags']['nist'] = nist_tag(
|
142
|
-
item['tags']['cweid'] = parse_identifiers(
|
143
|
-
item['tags']['cveid'] = parse_identifiers(
|
144
|
-
item['tags']['ghsaid'] = parse_identifiers(
|
139
|
+
item['tags']['nist'] = nist_tag(parse_identifiers(vulnerability, 'CWE'))
|
140
|
+
item['tags']['cweid'] = parse_identifiers(vulnerability, 'CWE')
|
141
|
+
item['tags']['cveid'] = parse_identifiers(vulnerability, 'CVE')
|
142
|
+
item['tags']['ghsaid'] = parse_identifiers(vulnerability, 'GHSA')
|
145
143
|
|
146
144
|
controls << item
|
147
145
|
end
|
@@ -5,7 +5,7 @@ require 'heimdall_tools/hdf'
|
|
5
5
|
|
6
6
|
RESOURCE_DIR = Pathname.new(__FILE__).join('../../data')
|
7
7
|
|
8
|
-
DEFAULT_NIST_TAG =
|
8
|
+
DEFAULT_NIST_TAG = %w{SA-11 RA-5}.freeze
|
9
9
|
|
10
10
|
MAPPING_FILES = {
|
11
11
|
cwe: File.join(RESOURCE_DIR, 'cwe-nist-mapping.csv'),
|
@@ -33,16 +33,18 @@ class SonarQubeApi
|
|
33
33
|
|
34
34
|
PAGE_SIZE = 100
|
35
35
|
|
36
|
-
def initialize(api_url, auth=nil)
|
36
|
+
def initialize(api_url, auth = nil)
|
37
37
|
@api_url = api_url
|
38
38
|
@auth = auth
|
39
39
|
end
|
40
40
|
|
41
|
-
def query_api(endpoint, params={})
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
41
|
+
def query_api(endpoint, params = {})
|
42
|
+
unless @auth.nil?
|
43
|
+
creds = {
|
44
|
+
username: @auth.split(':')[0],
|
45
|
+
password: @auth.split(':')[1]
|
46
|
+
}
|
47
|
+
end
|
46
48
|
|
47
49
|
response = HTTParty.get(@api_url + endpoint, { query: params, basic_auth: creds })
|
48
50
|
check_response response
|
@@ -109,9 +111,9 @@ end
|
|
109
111
|
module HeimdallTools
|
110
112
|
class SonarQubeMapper
|
111
113
|
# Fetches the necessary data from the API and builds report
|
112
|
-
def initialize(project_name, sonarqube_url, auth=nil)
|
114
|
+
def initialize(project_name, sonarqube_url, auth = nil)
|
113
115
|
@project_name = project_name
|
114
|
-
@api = SonarQubeApi.new(sonarqube_url,auth)
|
116
|
+
@api = SonarQubeApi.new(sonarqube_url, auth)
|
115
117
|
|
116
118
|
@mappings = load_nist_mappings
|
117
119
|
@findings = @api.query_issues(@project_name).map { |x| Finding.new(x, @api) }
|
@@ -132,16 +134,16 @@ module HeimdallTools
|
|
132
134
|
headers: true,
|
133
135
|
header_converters: :symbol,
|
134
136
|
converters: :all })
|
135
|
-
mappings[mapping_type] =
|
136
|
-
[row[
|
137
|
-
}
|
137
|
+
mappings[mapping_type] = csv_data.reject { |row| row[:nistid].nil? }.map { |row|
|
138
|
+
[row["#{mapping_type.to_s.downcase}id".to_sym].to_s, [row[:nistid], "Rev_#{row[:rev]}"]]
|
139
|
+
}.to_h
|
138
140
|
end
|
139
141
|
mappings
|
140
142
|
end
|
141
143
|
|
142
144
|
# Returns a report in HDF format
|
143
145
|
def to_hdf
|
144
|
-
results = HeimdallDataFormat.new(profile_name:
|
146
|
+
results = HeimdallDataFormat.new(profile_name: 'SonarQube Scan',
|
145
147
|
version: @api.query_version,
|
146
148
|
title: "SonarQube Scan of Project: #{@project_name}",
|
147
149
|
summary: "SonarQube Scan of Project: #{@project_name}",
|
@@ -156,7 +158,7 @@ class Control
|
|
156
158
|
# OWASP is stated specifically, ex owasp-a1
|
157
159
|
#
|
158
160
|
# SonarQube is inconsistent with tags (ex some cwe rules don't have cwe number in desc,) as noted below
|
159
|
-
TAG_DATA = {} # NOTE: We count on Ruby to preserve order for TAG_DATA
|
161
|
+
TAG_DATA = {}.freeze # NOTE: We count on Ruby to preserve order for TAG_DATA
|
160
162
|
TAG_DATA[:cwe] = {
|
161
163
|
# Some rules with cwe tag don't have cwe number in description!
|
162
164
|
# Currently only squid:S2658, but it has OWASP tag so we can use that.
|
@@ -206,8 +208,8 @@ class Control
|
|
206
208
|
reg = Regexp.new(tag_data[:regex], Regexp::IGNORECASE)
|
207
209
|
parsed_tags += @data['htmlDesc'].scan(reg).map(&:first)
|
208
210
|
|
209
|
-
if parsed_tags.empty? and not KNOWN_BAD_RULES.include? @key
|
210
|
-
puts "Error: Rule #{@key}: No regex matches for #{tag_type} tag."
|
211
|
+
if parsed_tags.empty? and not KNOWN_BAD_RULES.include? @key && parsed_tags.empty?
|
212
|
+
puts "Error: Rule #{@key}: No regex matches for #{tag_type} tag."
|
211
213
|
end
|
212
214
|
else
|
213
215
|
# If the tag type doesn't have a regex, it is specific enough to be mapped directly
|
@@ -239,11 +241,11 @@ class Control
|
|
239
241
|
return [@mappings[tag_type][parsed_tag]].flatten.uniq
|
240
242
|
end
|
241
243
|
|
242
|
-
DEFAULT_NIST_TAG # Entries with unmapped NIST tags
|
244
|
+
DEFAULT_NIST_TAG # Entries with unmapped NIST tags fall back to defaults
|
243
245
|
end
|
244
246
|
|
245
247
|
def hdf
|
246
|
-
#
|
248
|
+
# NOTE: Structure is based on fortify -> HDF converter output
|
247
249
|
{
|
248
250
|
title: @data['name'],
|
249
251
|
desc: @data['htmlDesc'],
|
@@ -256,7 +258,7 @@ class Control
|
|
256
258
|
id: @key,
|
257
259
|
descriptions: NA_ARRAY,
|
258
260
|
refs: NA_ARRAY,
|
259
|
-
source_location: NA_HASH
|
261
|
+
source_location: NA_HASH
|
260
262
|
}
|
261
263
|
end
|
262
264
|
end
|
@@ -284,10 +286,10 @@ class Finding
|
|
284
286
|
|
285
287
|
snip_html = "StartLine: #{snip_start}, EndLine: #{snip_end}<br>Code:<pre>#{snip}</pre>"
|
286
288
|
{
|
287
|
-
|
289
|
+
status: 'failed',
|
288
290
|
code_desc: "Path:#{component}:#{vuln_start}:#{vuln_end} #{snip_html}",
|
289
291
|
run_time: NA_FLOAT,
|
290
|
-
start_time: Time.now.strftime(
|
292
|
+
start_time: Time.now.strftime('%a,%d %b %Y %X')
|
291
293
|
}
|
292
294
|
end
|
293
295
|
end
|