heimdall_tools 1.3.31 → 1.3.36

Sign up to get free protection for your applications and to get access to all the features.
@@ -10,4 +10,8 @@ module HeimdallTools
10
10
  autoload :SonarQubeMapper, 'heimdall_tools/sonarqube_mapper'
11
11
  autoload :BurpSuiteMapper, 'heimdall_tools/burpsuite_mapper'
12
12
  autoload :NessusMapper, 'heimdall_tools/nessus_mapper'
13
+ autoload :SnykMapper, 'heimdall_tools/snyk_mapper'
14
+ autoload :NiktoMapper, 'heimdall_tools/nikto_mapper'
15
+ autoload :JfrogXrayMapper, 'heimdall_tools/jfrog_xray_mapper'
16
+ autoload :DBProtectMapper, 'heimdall_tools/dbprotect_mapper'
13
17
  end
@@ -61,6 +61,56 @@ module HeimdallTools
61
61
 
62
62
  end
63
63
 
64
+ desc 'snyk_mapper', 'snyk_mapper translates Snyk results Json to HDF format Json be viewed on Heimdall'
65
+ long_desc Help.text(:snyk_mapper)
66
+ option :json, required: true, aliases: '-j'
67
+ option :output_prefix, required: true, aliases: '-o'
68
+ option :verbose, type: :boolean, aliases: '-V'
69
+ def snyk_mapper
70
+ hdfs = HeimdallTools::SnykMapper.new(File.read(options[:json]), options[:name]).to_hdf
71
+ puts "\r\HDF Generated:\n"
72
+ hdfs.keys.each do | host |
73
+ File.write("#{options[:output_prefix]}-#{host}.json", hdfs[host])
74
+ puts "#{options[:output_prefix]}-#{host}.json"
75
+ end
76
+ end
77
+
78
+ desc 'nikto_mapper', 'nikto_mapper translates Nikto results Json to HDF format Json be viewed on Heimdall'
79
+ long_desc Help.text(:nikto_mapper)
80
+ option :json, required: true, aliases: '-j'
81
+ option :output, required: true, aliases: '-o'
82
+ option :verbose, type: :boolean, aliases: '-V'
83
+ def nikto_mapper
84
+ hdf = HeimdallTools::NiktoMapper.new(File.read(options[:json])).to_hdf
85
+ File.write(options[:output], hdf)
86
+ puts "\r\HDF Generated:\n"
87
+ puts "#{options[:output]}"
88
+ end
89
+
90
+ desc 'jfrog_xray_mapper', 'jfrog_xray_mapper translates Jfrog Xray results Json to HDF format Json be viewed on Heimdall'
91
+ long_desc Help.text(:jfrog_xray_mapper)
92
+ option :json, required: true, aliases: '-j'
93
+ option :output, required: true, aliases: '-o'
94
+ option :verbose, type: :boolean, aliases: '-V'
95
+ def jfrog_xray_mapper
96
+ hdf = HeimdallTools::JfrogXrayMapper.new(File.read(options[:json])).to_hdf
97
+ File.write(options[:output], hdf)
98
+ puts "\r\HDF Generated:\n"
99
+ puts "#{options[:output]}"
100
+ end
101
+
102
+ desc 'dbprotect_mapper', 'dbprotect_mapper translates dbprotect results xml to HDF format Json be viewed on Heimdall'
103
+ long_desc Help.text(:dbprotect_mapper)
104
+ option :xml, required: true, aliases: '-x'
105
+ option :output, required: true, aliases: '-o'
106
+ option :verbose, type: :boolean, aliases: '-V'
107
+ def dbprotect_mapper
108
+ hdf = HeimdallTools::DBProtectMapper.new(File.read(options[:xml])).to_hdf
109
+ File.write(options[:output], hdf)
110
+ puts "\r\HDF Generated:\n"
111
+ puts "#{options[:output]}"
112
+ end
113
+
64
114
  desc 'version', 'prints version'
65
115
  def version
66
116
  puts VERSION
@@ -0,0 +1,127 @@
1
+ require 'json'
2
+ require 'csv'
3
+ require 'heimdall_tools/hdf'
4
+ require 'utilities/xml_to_hash'
5
+
6
+ IMPACT_MAPPING = {
7
+ High: 0.7,
8
+ Medium: 0.5,
9
+ Low: 0.3,
10
+ Informational: 0.0
11
+ }.freeze
12
+
13
+ # rubocop:disable Metrics/AbcSize
14
+
15
+ module HeimdallTools
16
+ class DBProtectMapper
17
+ def initialize(xml, name=nil, verbose = false)
18
+ @verbose = verbose
19
+
20
+ begin
21
+ dataset = xml_to_hash(xml)
22
+ @entries = compile_findings(dataset['dataset'])
23
+
24
+ rescue StandardError => e
25
+ raise "Invalid DBProtect XML file provided Exception: #{e};\nNote that XML must be of kind `Check Results Details`."
26
+ end
27
+
28
+ end
29
+
30
+ def to_hdf
31
+ controls = []
32
+ @entries.each do |entry|
33
+ @item = {}
34
+ @item['id'] = entry['Check ID']
35
+ @item['title'] = entry['Check']
36
+ @item['desc'] = format_desc(entry)
37
+ @item['impact'] = impact(entry['Risk DV'])
38
+ @item['tags'] = {}
39
+ @item['descriptions'] = []
40
+ @item['refs'] = NA_ARRAY
41
+ @item['source_location'] = NA_HASH
42
+ @item['code'] = ''
43
+ @item['results'] = finding(entry)
44
+
45
+ controls << @item
46
+ end
47
+ controls = collapse_duplicates(controls)
48
+ results = HeimdallDataFormat.new(profile_name: @entries.first['Policy'],
49
+ version: "",
50
+ title: @entries.first['Job Name'],
51
+ summary: format_summary(@entries.first),
52
+ controls: controls)
53
+ results.to_hdf
54
+ end
55
+
56
+ private
57
+
58
+ def compile_findings(dataset)
59
+ keys = dataset['metadata']['item'].map{ |e| e['name']}
60
+ findings = dataset['data']['row'].map { |e| Hash[keys.zip(e['value'])] }
61
+ findings
62
+ end
63
+
64
+ def format_desc(entry)
65
+ text = []
66
+ text << "Task : #{entry['Task']}"
67
+ text << "Check Category : #{entry['Check Category']}"
68
+ text.join("; ")
69
+ end
70
+
71
+ def format_summary(entry)
72
+ text = []
73
+ text << "Organization : #{entry['Organization']}"
74
+ text << "Asset : #{entry['Check Asset']}"
75
+ text << "Asset Type : #{entry['Asset Type']}"
76
+ text << "IP Address, Port, Instance : #{entry['Asset Type']}"
77
+ text << "IP Address, Port, Instance : #{entry['IP Address, Port, Instance']}"
78
+ text.join("\n")
79
+ end
80
+
81
+ def finding(entry)
82
+ finding = {}
83
+
84
+ finding['code_desc'] = entry['Details']
85
+ finding['run_time'] = 0.0
86
+ finding['start_time'] = entry['Date']
87
+
88
+ case entry['Result Status']
89
+ when 'Fact'
90
+ finding['status'] = 'skipped'
91
+ when 'Failed'
92
+ finding['status'] = 'failed'
93
+ finding['backtrace'] = ["DB Protect Failed Check"]
94
+ when 'Finding'
95
+ finding['status'] = 'failed'
96
+ when 'Not A Finding'
97
+ finding['status'] = 'passed'
98
+ when 'Skipped'
99
+ finding['status'] = 'skipped'
100
+ else
101
+ finding['status'] = 'skipped'
102
+ end
103
+ [finding]
104
+ end
105
+
106
+ def impact(severity)
107
+ IMPACT_MAPPING[severity.to_sym]
108
+ end
109
+
110
+ # DBProtect report could have multiple issue entries for multiple findings of same issue type.
111
+ # The meta data is identical across entries
112
+ # method collapse_duplicates return unique controls with applicable findings collapsed into it.
113
+ def collapse_duplicates(controls)
114
+ unique_controls = []
115
+
116
+ controls.map { |x| x['id'] }.uniq.each do |id|
117
+ collapsed_results = controls.select { |x| x['id'].eql?(id) }.map {|x| x['results']}
118
+ unique_control = controls.find { |x| x['id'].eql?(id) }
119
+ unique_control['results'] = collapsed_results.flatten
120
+ unique_controls << unique_control
121
+ end
122
+ unique_controls
123
+ end
124
+
125
+
126
+ end
127
+ end
@@ -3,6 +3,7 @@ require 'heimdall_tools/hdf'
3
3
  require 'utilities/xml_to_hash'
4
4
 
5
5
  NIST_REFERENCE_NAME = 'Standards Mapping - NIST Special Publication 800-53 Revision 4'.freeze
6
+ DEFAULT_NIST_TAG = ["SA-11", "RA-5"].freeze
6
7
 
7
8
  module HeimdallTools
8
9
  class FortifyMapper
@@ -68,7 +69,7 @@ module HeimdallTools
68
69
  references = rule['References']['Reference']
69
70
  references = [references] unless references.is_a?(Array)
70
71
  tag = references.detect { |x| x['Author'].eql?(NIST_REFERENCE_NAME) }
71
- tag.nil? ? 'unmapped' : tag['Title'].match(/[a-zA-Z][a-zA-Z]-\d{1,2}/)
72
+ tag.nil? ? DEFAULT_NIST_TAG : tag['Title'].match(/[a-zA-Z][a-zA-Z]-\d{1,2}/)
72
73
  end
73
74
 
74
75
  def impact(classid)
@@ -0,0 +1,5 @@
1
+ dbprotect_mapper translates DBProtect report in `Check Results Details` format XML to HDF format JSON be viewed on Heimdall.
2
+
3
+ Examples:
4
+
5
+ heimdall_tools dbprotect_mapper -x check_results_details_report.xml -o db_protect_hdf.json
@@ -0,0 +1,5 @@
1
+ jfrog_xray_mapper translates an JFrog Xray results JSON file into HDF format JSON to be viewable in Heimdall
2
+
3
+ Examples:
4
+
5
+ heimdall_tools jfrog_xray_mapper -j xray_results.json -o output-file-name.json
@@ -0,0 +1,7 @@
1
+ nikto_mapper translates an Nikto results JSON file into HDF format JSON to be viewable in Heimdall
2
+
3
+ Note: Current this mapper only support single target Nikto Scans.
4
+
5
+ Examples:
6
+
7
+ heimdall_tools nikto_mapper [OPTIONS] -x <nikto-results-json> -o <hdf-scan-results.json>
@@ -0,0 +1,7 @@
1
+ snyk_mapper translates an Snyk results JSON file into HDF format json to be viewable in Heimdall
2
+
3
+ A separate HDF JSON is generated for each project reported in the Snyk Report.
4
+
5
+ Examples:
6
+
7
+ heimdall_tools snyk_mapper -j snyk_results.json -o output-file-prefix
@@ -0,0 +1,142 @@
1
+ require 'json'
2
+ require 'csv'
3
+ require 'heimdall_tools/hdf'
4
+ require 'utilities/xml_to_hash'
5
+
6
+ RESOURCE_DIR = Pathname.new(__FILE__).join('../../data')
7
+
8
+ CWE_NIST_MAPPING_FILE = File.join(RESOURCE_DIR, 'cwe-nist-mapping.csv')
9
+
10
+ IMPACT_MAPPING = {
11
+ high: 0.7,
12
+ medium: 0.5,
13
+ low: 0.3,
14
+ }.freeze
15
+
16
+ DEFAULT_NIST_TAG = ["SA-11", "RA-5"].freeze
17
+
18
+ # Loading spinner sign
19
+ $spinner = Enumerator.new do |e|
20
+ loop do
21
+ e.yield '|'
22
+ e.yield '/'
23
+ e.yield '-'
24
+ e.yield '\\'
25
+ end
26
+ end
27
+
28
+ module HeimdallTools
29
+ class JfrogXrayMapper
30
+ def initialize(xray_json, name=nil, verbose = false)
31
+ @xray_json = xray_json
32
+ @verbose = verbose
33
+
34
+ begin
35
+ @cwe_nist_mapping = parse_mapper
36
+ @project = JSON.parse(xray_json)
37
+
38
+ rescue StandardError => e
39
+ raise "Invalid JFrog Xray JSON file provided Exception: #{e}"
40
+ end
41
+ end
42
+
43
+ def finding(vulnerability)
44
+ finding = {}
45
+ finding['status'] = 'failed'
46
+ finding['code_desc'] = []
47
+ finding['code_desc'] << "source_comp_id : #{vulnerability['source_comp_id'].to_s }"
48
+ finding['code_desc'] << "vulnerable_versions : #{vulnerability['component_versions']['vulnerable_versions'].to_s }"
49
+ finding['code_desc'] << "fixed_versions : #{vulnerability['component_versions']['fixed_versions'].to_s }"
50
+ finding['code_desc'] << "issue_type : #{vulnerability['issue_type'].to_s }"
51
+ finding['code_desc'] << "provider : #{vulnerability['provider'].to_s }"
52
+ finding['code_desc'] = finding['code_desc'].join("\n")
53
+ finding['run_time'] = NA_FLOAT
54
+
55
+ # Xray results does not profile scan timestamp; using current time to satisfy HDF format
56
+ finding['start_time'] = NA_STRING
57
+ [finding]
58
+ end
59
+
60
+ def nist_tag(cweid)
61
+ entries = @cwe_nist_mapping.select { |x| cweid.include? x[:cweid].to_s }
62
+ tags = entries.map { |x| x[:nistid] }
63
+ tags.empty? ? DEFAULT_NIST_TAG : tags.flatten.uniq
64
+ end
65
+
66
+ def parse_identifiers(vulnerability, ref)
67
+ # Extracting id number from reference style CWE-297
68
+ vulnerability['component_versions']['more_details']['cves'][0][ref.downcase].map { |e| e.split("#{ref}-")[1] }
69
+ rescue
70
+ return []
71
+ end
72
+
73
+ def impact(severity)
74
+ IMPACT_MAPPING[severity.downcase.to_sym]
75
+ end
76
+
77
+ def parse_mapper
78
+ csv_data = CSV.read(CWE_NIST_MAPPING_FILE, **{ encoding: 'UTF-8',
79
+ headers: true,
80
+ header_converters: :symbol,
81
+ converters: :all })
82
+ csv_data.map(&:to_hash)
83
+ end
84
+
85
+ def desc_tags(data, label)
86
+ { "data": data || NA_STRING, "label": label || NA_STRING }
87
+ end
88
+
89
+ # Xray report could have multiple vulnerability entries for multiple findings of same issue type.
90
+ # The meta data is identical across entries
91
+ # method collapse_duplicates return unique controls with applicable findings collapsed into it.
92
+ def collapse_duplicates(controls)
93
+ unique_controls = []
94
+
95
+ controls.map { |x| x['id'] }.uniq.each do |id|
96
+ collapsed_results = controls.select { |x| x['id'].eql?(id) }.map {|x| x['results']}
97
+ unique_control = controls.find { |x| x['id'].eql?(id) }
98
+ unique_control['results'] = collapsed_results.flatten
99
+ unique_controls << unique_control
100
+ end
101
+ unique_controls
102
+ end
103
+
104
+ def to_hdf
105
+ controls = []
106
+ vulnerability_count = 0
107
+ @project['data'].uniq.each do | vulnerability |
108
+ printf("\rProcessing: %s", $spinner.next)
109
+
110
+ vulnerability_count +=1
111
+ item = {}
112
+ item['tags'] = {}
113
+ item['descriptions'] = []
114
+ item['refs'] = NA_ARRAY
115
+ item['source_location'] = NA_HASH
116
+ item['descriptions'] = NA_ARRAY
117
+
118
+ # Xray JSONs might note have `id` fields populated.
119
+ # If thats a case MD5 hash is used to collapse vulnerability findings of the same type.
120
+ item['id'] = vulnerability['id'].empty? ? OpenSSL::Digest::MD5.digest(vulnerability['summary'].to_s).unpack("H*")[0].to_s : vulnerability['id']
121
+ item['title'] = vulnerability['summary'].to_s
122
+ item['desc'] = vulnerability['component_versions']['more_details']['description'].to_s
123
+ item['impact'] = impact(vulnerability['severity'].to_s)
124
+ item['code'] = NA_STRING
125
+ item['results'] = finding(vulnerability)
126
+
127
+ item['tags']['nist'] = nist_tag( parse_identifiers( vulnerability, 'CWE') )
128
+ item['tags']['cweid'] = parse_identifiers( vulnerability, 'CWE')
129
+
130
+ controls << item
131
+ end
132
+
133
+ controls = collapse_duplicates(controls)
134
+ results = HeimdallDataFormat.new(profile_name: "JFrog Xray Scan",
135
+ version: NA_STRING,
136
+ title: "JFrog Xray Scan",
137
+ summary: "Continuous Security and Universal Artifact Analysis",
138
+ controls: controls)
139
+ results.to_hdf
140
+ end
141
+ end
142
+ end
@@ -0,0 +1,149 @@
1
+ require 'json'
2
+ require 'csv'
3
+ require 'heimdall_tools/hdf'
4
+
5
+ RESOURCE_DIR = Pathname.new(__FILE__).join('../../data')
6
+
7
+ NIKTO_NIST_MAPPING_FILE = File.join(RESOURCE_DIR, 'nikto-nist-mapping.csv')
8
+
9
+ IMPACT_MAPPING = {
10
+ high: 0.7,
11
+ medium: 0.5,
12
+ low: 0.3,
13
+ }.freeze
14
+
15
+ DEFAULT_NIST_TAG = ["SA-11", "RA-5"].freeze
16
+
17
+ # Loading spinner sign
18
+ $spinner = Enumerator.new do |e|
19
+ loop do
20
+ e.yield '|'
21
+ e.yield '/'
22
+ e.yield '-'
23
+ e.yield '\\'
24
+ end
25
+ end
26
+
27
+ module HeimdallTools
28
+ class NiktoMapper
29
+ def initialize(nikto_json, name=nil, verbose = false)
30
+ @nikto_json = nikto_json
31
+ @verbose = verbose
32
+
33
+ begin
34
+ @nikto_nist_mapping = parse_mapper
35
+ rescue StandardError => e
36
+ raise "Invalid Nikto to NIST mapping file: Exception: #{e}"
37
+ end
38
+
39
+ # TODO: Support Multi-target scan results
40
+ # Nikto multi-target scans generate invalid format JSONs
41
+ # Possible workaround to use https://stackoverflow.com/a/58209963/1670307
42
+
43
+ begin
44
+ @project = JSON.parse(nikto_json)
45
+ rescue StandardError => e
46
+ raise "Invalid Nikto JSON file provided\nNote: nikto_mapper does not support multi-target scan results\n\nException: #{e}"
47
+ end
48
+ end
49
+
50
+ def extract_scaninfo(project)
51
+ info = {}
52
+ begin
53
+ info['policy'] = 'Nikto Website Scanner'
54
+ info['version'] = NA_STRING
55
+ info['projectName'] = "Host: #{project['host']} Port: #{project['port']}"
56
+ info['summary'] = "Banner: #{project['banner']}"
57
+
58
+ info
59
+ rescue StandardError => e
60
+ raise "Error extracting project info from nikto JSON file provided Exception: #{e}"
61
+ end
62
+ end
63
+
64
+ def finding(vulnerability)
65
+ finding = {}
66
+ finding['status'] = 'failed'
67
+ finding['code_desc'] = "URL : #{vulnerability['url'].to_s } Method: #{vulnerability['method'].to_s}"
68
+ finding['run_time'] = NA_FLOAT
69
+ finding['start_time'] = NA_STRING
70
+ [finding]
71
+ end
72
+
73
+ def nist_tag(niktoid)
74
+ entries = @nikto_nist_mapping.select { |x| niktoid.eql?(x[:niktoid].to_s) }
75
+ tags = entries.map { |x| x[:nistid] }
76
+ tags.empty? ? DEFAULT_NIST_TAG : tags.flatten.uniq
77
+ end
78
+
79
+ def impact(severity)
80
+ IMPACT_MAPPING[severity.to_sym]
81
+ end
82
+
83
+ def parse_mapper
84
+ csv_data = CSV.read(NIKTO_NIST_MAPPING_FILE, **{ encoding: 'UTF-8',
85
+ headers: true,
86
+ header_converters: :symbol})
87
+ csv_data.map(&:to_hash)
88
+ end
89
+
90
+ def desc_tags(data, label)
91
+ { "data": data || NA_STRING, "label": label || NA_STRING }
92
+ end
93
+
94
+ # Nikto report could have multiple vulnerability entries for multiple findings of same issue type.
95
+ # The meta data is identical across entries
96
+ # method collapse_duplicates return unique controls with applicable findings collapsed into it.
97
+ def collapse_duplicates(controls)
98
+ unique_controls = []
99
+
100
+ controls.map { |x| x['id'] }.uniq.each do |id|
101
+ collapsed_results = controls.select { |x| x['id'].eql?(id) }.map {|x| x['results']}
102
+ unique_control = controls.find { |x| x['id'].eql?(id) }
103
+ unique_control['results'] = collapsed_results.flatten
104
+ unique_controls << unique_control
105
+ end
106
+ unique_controls
107
+ end
108
+
109
+ def to_hdf
110
+ controls = []
111
+ @project['vulnerabilities'].each do | vulnerability |
112
+ printf("\rProcessing: %s", $spinner.next)
113
+
114
+ item = {}
115
+ item['tags'] = {}
116
+ item['descriptions'] = []
117
+ item['refs'] = NA_ARRAY
118
+ item['source_location'] = NA_HASH
119
+ item['descriptions'] = NA_ARRAY
120
+
121
+ item['title'] = vulnerability['msg'].to_s
122
+ item['id'] = vulnerability['id'].to_s
123
+
124
+ # Nikto results JSON does not description fields
125
+ # Duplicating vulnerability msg field
126
+ item['desc'] = vulnerability['msg'].to_s
127
+
128
+ # Nitko does not provide finding severity; hard-coding severity to medium
129
+ item['impact'] = impact('medium')
130
+ item['code'] = NA_STRING
131
+ item['results'] = finding(vulnerability)
132
+ item['tags']['nist'] = nist_tag( vulnerability['id'].to_s )
133
+ item['tags']['ösvdb'] = vulnerability['OSVDB']
134
+
135
+ controls << item
136
+ end
137
+
138
+ controls = collapse_duplicates(controls)
139
+ scaninfo = extract_scaninfo(@project)
140
+ results = HeimdallDataFormat.new(profile_name: scaninfo['policy'],
141
+ version: scaninfo['version'],
142
+ title: "Nikto Target: #{scaninfo['projectName']}",
143
+ summary: "Banner: #{scaninfo['summary']}",
144
+ controls: controls,
145
+ target_id: scaninfo['projectName'])
146
+ results.to_hdf
147
+ end
148
+ end
149
+ end