heimdall_tools 1.3.45 → 1.3.49

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,140 @@
1
+ rule,nistid
2
+ acm-certificate-with-close-expiration-date,SC-12
3
+ acm-certificate-with-transparency-logging-disabled,SC-12
4
+ cloudformation-stack-with-role,AC-6
5
+ cloudtrail-duplicated-global-services-logging,AU-6
6
+ cloudtrail-no-cloudwatch-integration,AU-12|SI-4(2)
7
+ cloudtrail-no-data-logging,AU-12
8
+ cloudtrail-no-encryption-with-kms,AU-6
9
+ cloudtrail-no-global-services-logging,AU-12
10
+ cloudtrail-no-log-file-validation,AU-6
11
+ cloudtrail-no-logging,AU-12
12
+ cloudtrail-not-configured,AU-12
13
+ cloudwatch-alarm-without-actions,AU-12
14
+ config-recorder-not-configured,CM-8|CM-8(2)|CM-8(6)
15
+ ec2-ami-public,AC-3
16
+ ec2-default-security-group-in-use,AC-3(3)
17
+ ec2-default-security-group-with-rules,AC-3(3)
18
+ ec2-ebs-snapshot-not-encrypted,SC-28
19
+ ec2-ebs-snapshot-public,AC-3
20
+ ec2-ebs-volume-not-encrypted,SC-28
21
+ ec2-instance-in-security-group,CM-7(1)
22
+ ec2-instance-type,CM-2
23
+ ec2-instance-types,CM-2
24
+ ec2-instance-with-public-ip,AC-3
25
+ ec2-instance-with-user-data-secrets,AC-3
26
+ ec2-security-group-opens-all-ports,CM-7(1)
27
+ ec2-security-group-opens-all-ports-to-all,CM-7(1)
28
+ ec2-security-group-opens-all-ports-to-self,CM-7(1)
29
+ ec2-security-group-opens-icmp-to-all,CM-7(1)
30
+ ec2-security-group-opens-known-port-to-all,CM-7(1)
31
+ ec2-security-group-opens-plaintext-port,CM-7(1)
32
+ ec2-security-group-opens-port-range,CM-7(1)
33
+ ec2-security-group-opens-port-to-all,CM-7(1)
34
+ ec2-security-group-whitelists-aws,CM-7(1)
35
+ ec2-security-group-whitelists-aws-ip-from-banned-region,CM-7(1)
36
+ ec2-security-group-whitelists-non-elastic-ips,CM-7(1)
37
+ ec2-security-group-whitelists-unknown-aws,CM-7(1)
38
+ ec2-security-group-whitelists-unknown-cidrs,CM-7(1)
39
+ ec2-unused-security-group,CM-7(1)
40
+ elb-listener-allowing-cleartext,SC-8
41
+ elb-no-access-logs,AU-12
42
+ elb-older-ssl-policy,SC-8
43
+ elbv2-http-request-smuggling,SC-8
44
+ elbv2-listener-allowing-cleartext,SC-8
45
+ elbv2-no-access-logs,AU-12
46
+ elbv2-no-deletion-protection,SI-7
47
+ elbv2-older-ssl-policy,SC-8
48
+ iam-assume-role-lacks-external-id-and-mfa,AC-17
49
+ iam-assume-role-no-mfa,AC-6
50
+ iam-assume-role-policy-allows-all,AC-6
51
+ iam-ec2-role-without-instances,AC-6
52
+ iam-group-with-inline-policies,AC-6
53
+ iam-group-with-no-users,AC-6
54
+ iam-human-user-with-policies,AC-6
55
+ iam-inline-policy-allows-non-sts-action,AC-6
56
+ iam-inline-policy-allows-NotActions,AC-6
57
+ iam-inline-policy-for-role,AC-6
58
+ iam-managed-policy-allows-full-privileges,AC-6
59
+ iam-managed-policy-allows-non-sts-action,AC-6
60
+ iam-managed-policy-allows-NotActions,AC-6
61
+ iam-managed-policy-for-role,AC-6
62
+ iam-managed-policy-no-attachments,AC-6
63
+ iam-no-support-role,IR-7
64
+ iam-password-policy-expiration-threshold,AC-2
65
+ iam-password-policy-minimum-length,AC-2
66
+ iam-password-policy-no-expiration,AC-2
67
+ iam-password-policy-no-lowercase-required,AC-2
68
+ iam-password-policy-no-number-required,AC-2
69
+ iam-password-policy-no-symbol-required,AC-2
70
+ iam-password-policy-no-uppercase-required,AC-2
71
+ iam-password-policy-reuse-enabled,IA-5(1)
72
+ iam-role-with-inline-policies,AC-6
73
+ iam-root-account-no-hardware-mfa,IA-2(1)
74
+ iam-root-account-no-mfa,IA-2(1)
75
+ iam-root-account-used-recently,AC-6(9)
76
+ iam-root-account-with-active-certs,AC-6(9)
77
+ iam-root-account-with-active-keys,AC-6(9)
78
+ iam-service-user-with-password,AC-2
79
+ iam-unused-credentials-not-disabled,AC-2
80
+ iam-user-no-key-rotation,AC-2
81
+ iam-user-not-in-category-group,AC-2
82
+ iam-user-not-in-common-group,AC-2
83
+ iam-user-unused-access-key-initial-setup,AC-2
84
+ iam-user-with-multiple-access-keys,IA-2
85
+ iam-user-without-mfa,IA-2(1)
86
+ iam-user-with-password-and-key,IA-2
87
+ iam-user-with-policies,AC-2
88
+ kms-cmk-rotation-disabled,SC-12
89
+ logs-no-alarm-aws-configuration-changes,CM-8|CM-8(2)|CM-8(6)
90
+ logs-no-alarm-cloudtrail-configuration-changes,AU-6
91
+ logs-no-alarm-cmk-deletion,AC-2
92
+ logs-no-alarm-console-authentication-failures,AC-2
93
+ logs-no-alarm-iam-policy-changes,AC-2
94
+ logs-no-alarm-nacl-changes,CM-6(2)
95
+ logs-no-alarm-network-gateways-changes,AU-12|CM-6(2)
96
+ logs-no-alarm-root-usage,AU-2
97
+ logs-no-alarm-route-table-changes,AU-12|CM-6(2)
98
+ logs-no-alarm-s3-policy-changes,AC-6|AU-12
99
+ logs-no-alarm-security-group-changes,AC-2(4)
100
+ logs-no-alarm-signin-without-mfa,AC-2
101
+ logs-no-alarm-unauthorized-api-calls,AU-6|SI-4(2)
102
+ logs-no-alarm-vpc-changes,CM-6(1)
103
+ rds-instance-backup-disabled,CP-9
104
+ rds-instance-ca-certificate-deprecated,SC-12
105
+ rds-instance-no-minor-upgrade,SI-2
106
+ rds-instance-short-backup-retention-period,CP-9
107
+ rds-instance-single-az,CP-7
108
+ rds-instance-storage-not-encrypted,SC-28
109
+ rds-postgres-instance-with-invalid-certificate,SC-12
110
+ rds-security-group-allows-all,CM-7(1)
111
+ rds-snapshot-public,SC-28
112
+ redshift-cluster-database-not-encrypted,SC-28
113
+ redshift-cluster-no-version-upgrade,SI-2
114
+ redshift-cluster-publicly-accessible,AC-3
115
+ redshift-parameter-group-logging-disabled,AU-12
116
+ redshift-parameter-group-ssl-not-required,SC-8
117
+ redshift-security-group-whitelists-all,CM-7(1)
118
+ route53-domain-no-autorenew,SC-2
119
+ route53-domain-no-transferlock,SC-2
120
+ route53-domain-transferlock-not-authorized,SC-2
121
+ s3-bucket-allowing-cleartext,SC-28
122
+ s3-bucket-no-default-encryption,SC-28
123
+ s3-bucket-no-logging,AU-2|AU-12
124
+ s3-bucket-no-mfa-delete,SI-7
125
+ s3-bucket-no-versioning,SI-7
126
+ s3-bucket-world-acl,AC-3(3)
127
+ s3-bucket-world-policy-arg,AC-3(3)
128
+ s3-bucket-world-policy-star,AC-3(3)
129
+ ses-identity-dkim-not-enabled,SC-23
130
+ ses-identity-dkim-not-verified,SC-23
131
+ ses-identity-world-policy,AC-6
132
+ sns-topic-world-policy,AC-6
133
+ sqs-queue-world-policy,AC-6
134
+ vpc-custom-network-acls-allow-all,SC-7
135
+ vpc-default-network-acls-allow-all,SC-7
136
+ vpc-network-acl-not-used,SC-7
137
+ vpc-routing-tables-with-peering,AC-3(3)
138
+ vpc-subnet-with-bad-acls,SC-7
139
+ vpc-subnet-with-default-acls,SC-7
140
+ vpc-subnet-without-flow-log,AU-12
@@ -0,0 +1,11 @@
1
+ module HeimdallTools
2
+ class FirewallManager
3
+ def self.finding_id(finding, *, encode:, **)
4
+ encode.call(finding['Title'])
5
+ end
6
+
7
+ def self.product_name(findings, *, encode:, **)
8
+ encode.call("#{findings[0]['ProductFields']['aws/securityhub/CompanyName']} #{findings[0]['ProductFields']['aws/securityhub/ProductName']}")
9
+ end
10
+ end
11
+ end
@@ -0,0 +1,19 @@
1
+ module HeimdallTools
2
+ class Prowler
3
+ def self.subfindings_code_desc(finding, *, encode:, **)
4
+ encode.call(finding['Description'])
5
+ end
6
+
7
+ def self.finding_id(finding, *, encode:, **)
8
+ encode.call(finding['GeneratorId'].partition('-')[-1])
9
+ end
10
+
11
+ def self.product_name(findings, *, encode:, **)
12
+ encode.call(findings[0]['ProductFields']['ProviderName'])
13
+ end
14
+
15
+ def self.desc(*, **)
16
+ ' '
17
+ end
18
+ end
19
+ end
@@ -0,0 +1,89 @@
1
+ require 'csv'
2
+ require 'json'
3
+
4
+ module HeimdallTools
5
+ class SecurityHub
6
+ private_class_method def self.corresponding_control(controls, finding)
7
+ controls.find { |c| c['StandardsControlArn'] == finding['ProductFields']['StandardsControlArn'] }
8
+ end
9
+
10
+ def self.supporting_docs(standards:)
11
+ begin
12
+ controls = standards.nil? ? nil : standards.map { |s| JSON.parse(s)['Controls'] }.flatten
13
+ rescue StandardError => e
14
+ raise "Invalid supporting docs for Security Hub:\nException: #{e}"
15
+ end
16
+
17
+ begin
18
+ resource_dir = Pathname.new(__FILE__).join('../../../data')
19
+ aws_config_mapping_file = File.join(resource_dir, 'aws-config-mapping.csv')
20
+ aws_config_mapping = CSV.read(aws_config_mapping_file, { encoding: 'UTF-8', headers: true, header_converters: :symbol }).map(&:to_hash)
21
+ rescue StandardError => e
22
+ raise "Invalid AWS Config mapping file:\nException: #{e}"
23
+ end
24
+
25
+ { controls: controls, aws_config_mapping: aws_config_mapping }
26
+ end
27
+
28
+ def self.finding_id(finding, *, encode:, controls: nil, **)
29
+ ret = if !controls.nil? && !(control = corresponding_control(controls, finding)).nil?
30
+ control['ControlId']
31
+ elsif finding['ProductFields'].member?('ControlId') # check if aws
32
+ finding['ProductFields']['ControlId']
33
+ elsif finding['ProductFields'].member?('RuleId') # check if cis
34
+ finding['ProductFields']['RuleId']
35
+ else
36
+ finding['GeneratorId'].split('/')[-1]
37
+ end
38
+ encode.call(ret)
39
+ end
40
+
41
+ def self.finding_impact(finding, *, controls: nil, **)
42
+ if !controls.nil? && !(control = corresponding_control(controls, finding)).nil?
43
+ imp = control['SeverityRating'].to_sym
44
+ else
45
+ # severity is required, but can be either 'label' or 'normalized' internally with 'label' being preferred. other values can be in here too such as the original severity rating.
46
+ imp = finding['Severity'].key?('Label') ? finding['Severity']['Label'].to_sym : finding['Severity']['Normalized']/100.0
47
+ # securityhub asff file does not contain accurate severity information by setting things that shouldn't be informational to informational: when additional context, i.e. standards, is not provided, set informational to medium.
48
+ imp = :MEDIUM if imp.is_a?(Symbol) && imp == :INFORMATIONAL
49
+ end
50
+ imp
51
+ end
52
+
53
+ def self.finding_nist_tag(finding, *, aws_config_mapping:, **)
54
+ return {} unless finding['ProductFields']['RelatedAWSResources:0/type'] == 'AWS::Config::ConfigRule'
55
+
56
+ entries = aws_config_mapping.select { |rule| finding['ProductFields']['RelatedAWSResources:0/name'].include? rule[:awsconfigrulename] }
57
+ entries.map do |rule|
58
+ tags_joined = rule[:nistid].split('|') # subheadings are joined together in the csv file
59
+ tags_joined.map do |tag|
60
+ if (i = tag.index('(')).nil?
61
+ tag
62
+ else
63
+ tag[i..-1].scan(/\(.+?\)/).map { |subheading| "#{tag[0..i-1]}#{subheading}" }
64
+ end
65
+ end
66
+ end.flatten.uniq
67
+ end
68
+
69
+ def self.finding_title(finding, *, encode:, controls: nil, **)
70
+ ret = if !controls.nil? && !(control = corresponding_control(controls, finding)).nil?
71
+ control['Title']
72
+ else
73
+ finding['Title']
74
+ end
75
+ encode.call(ret)
76
+ end
77
+
78
+ def self.product_name(findings, *, encode:, **)
79
+ # "#{findings[0]['ProductFields']['aws/securityhub/CompanyName']} #{findings[0]['ProductFields']['aws/securityhub/ProductName']}"
80
+ # not using above due to wanting to provide the standard's name instead
81
+ if findings[0]['Types'][0].split('/')[-1].gsub(/-/, ' ').downcase == findings[0]['ProductFields']['StandardsControlArn'].split('/')[-4].gsub(/-/, ' ').downcase
82
+ standardname = findings[0]['Types'][0].split('/')[-1].gsub(/-/, ' ')
83
+ else
84
+ standardname = findings[0]['ProductFields']['StandardsControlArn'].split('/')[-4].gsub(/-/, ' ').split.map(&:capitalize).join(' ')
85
+ end
86
+ encode.call("#{standardname} v#{findings[0]['ProductFields']['StandardsControlArn'].split('/')[-2]}")
87
+ end
88
+ end
89
+ end
@@ -0,0 +1,232 @@
1
+ require 'json'
2
+ require 'set'
3
+
4
+ require 'htmlentities'
5
+
6
+ require 'heimdall_tools/hdf'
7
+ require 'heimdall_tools/asff_compatible_products/firewall_manager'
8
+ require 'heimdall_tools/asff_compatible_products/prowler'
9
+ require 'heimdall_tools/asff_compatible_products/securityhub'
10
+
11
+ module HeimdallTools
12
+ DEFAULT_NIST_TAG = %w{SA-11 RA-5}.freeze
13
+
14
+ INSPEC_INPUTS_MAPPING = {
15
+ string: 'String',
16
+ numeric: 'Numeric',
17
+ regexp: 'Regexp',
18
+ array: 'Array',
19
+ hash: 'Hash',
20
+ boolean: 'Boolean',
21
+ any: 'Any'
22
+ }.freeze
23
+
24
+ # Loading spinner sign
25
+ $spinner = Enumerator.new do |e|
26
+ loop do
27
+ e.yield '|'
28
+ e.yield '/'
29
+ e.yield '-'
30
+ e.yield '\\'
31
+ end
32
+ end
33
+
34
+ # TODO: use hash.dig and safe navigation operator throughout
35
+ class ASFFMapper
36
+ IMPACT_MAPPING = {
37
+ CRITICAL: 0.9,
38
+ HIGH: 0.7,
39
+ MEDIUM: 0.5,
40
+ LOW: 0.3,
41
+ INFORMATIONAL: 0.0
42
+ }.freeze
43
+
44
+ PRODUCT_ARN_MAPPING = {
45
+ %r{arn:.+:securityhub:.+:.*:product/aws/firewall-manager} => FirewallManager,
46
+ %r{arn:.+:securityhub:.+:.*:product/aws/securityhub} => SecurityHub,
47
+ %r{arn:.+:securityhub:.+:.*:product/prowler/prowler} => Prowler
48
+ }.freeze
49
+
50
+ def initialize(asff_json, securityhub_standards_json_array: nil, meta: nil)
51
+ @meta = meta
52
+
53
+ @supporting_docs = {}
54
+ @supporting_docs[SecurityHub] = SecurityHub.supporting_docs({ standards: securityhub_standards_json_array })
55
+
56
+ begin
57
+ asff_required_keys = %w{AwsAccountId CreatedAt Description GeneratorId Id ProductArn Resources SchemaVersion Severity Title Types UpdatedAt}
58
+ @report = JSON.parse(asff_json)
59
+ if @report.length == 1 && @report.member?('Findings') && @report['Findings'].each { |finding| asff_required_keys.to_set.difference(finding.keys.to_set).none? }.all?
60
+ # ideal case that is spec compliant
61
+ # might need to ensure that the file is utf-8 encoded and remove a BOM if one exists
62
+ elsif asff_required_keys.to_set.difference(@report.keys.to_set).none?
63
+ # individual finding so have to add wrapping array
64
+ @report = { 'Findings' => [@report] }
65
+ else
66
+ raise 'Not a findings file nor an individual finding'
67
+ end
68
+ rescue StandardError => e
69
+ raise "Invalid ASFF file provided:\nException: #{e}"
70
+ end
71
+
72
+ @coder = HTMLEntities.new
73
+ end
74
+
75
+ def encode(string)
76
+ @coder.encode(string, :basic, :named, :decimal)
77
+ end
78
+
79
+ def external_product_handler(product, data, func, default)
80
+ if (product.is_a?(Regexp) || (arn = PRODUCT_ARN_MAPPING.keys.find { |a| product.match(a) })) && PRODUCT_ARN_MAPPING.key?(arn || product) && PRODUCT_ARN_MAPPING[arn || product].respond_to?(func)
81
+ keywords = { encode: method(:encode) }
82
+ keywords = keywords.merge(@supporting_docs[PRODUCT_ARN_MAPPING[arn || product]]) if @supporting_docs.member?(PRODUCT_ARN_MAPPING[arn || product])
83
+ PRODUCT_ARN_MAPPING[arn || product].send(func, data, **keywords)
84
+ elsif default.is_a? Proc
85
+ default.call
86
+ else
87
+ default
88
+ end
89
+ end
90
+
91
+ def nist_tag(finding)
92
+ tags = external_product_handler(finding['ProductArn'], finding, :finding_nist_tag, {})
93
+ tags.empty? ? DEFAULT_NIST_TAG : tags
94
+ end
95
+
96
+ def impact(finding)
97
+ # there can be findings listed that are intentionally ignored due to the underlying control being superceded by a control from a different standard
98
+ if finding.member?('Workflow') && finding['Workflow'].member?('Status') && finding['Workflow']['Status'] == 'SUPPRESSED'
99
+ imp = :INFORMATIONAL
100
+ else
101
+ # severity is required, but can be either 'label' or 'normalized' internally with 'label' being preferred. other values can be in here too such as the original severity rating.
102
+ default = proc { finding['Severity'].key?('Label') ? finding['Severity']['Label'].to_sym : finding['Severity']['Normalized']/100.0 }
103
+ imp = external_product_handler(finding['ProductArn'], finding, :finding_impact, default)
104
+ end
105
+ imp.is_a?(Symbol) ? IMPACT_MAPPING[imp] : imp
106
+ end
107
+
108
+ def desc_tags(data, label)
109
+ { data: data || NA_STRING, label: label || NA_STRING }
110
+ end
111
+
112
+ def subfindings(finding)
113
+ subfinding = {}
114
+
115
+ statusreason = finding['Compliance']['StatusReasons'].map { |reason| reason.flatten.map { |string| encode(string) } }.flatten.join("\n") if finding.key?('Compliance') && finding['Compliance'].key?('StatusReasons')
116
+ if finding.key?('Compliance') && finding['Compliance'].key?('Status')
117
+ case finding['Compliance']['Status']
118
+ when 'PASSED'
119
+ subfinding['status'] = 'passed'
120
+ subfinding['message'] = statusreason if statusreason
121
+ when 'WARNING'
122
+ subfinding['status'] = 'skipped'
123
+ subfinding['skip_message'] = statusreason if statusreason
124
+ when 'FAILED'
125
+ subfinding['status'] = 'failed'
126
+ subfinding['message'] = statusreason if statusreason
127
+ when 'NOT_AVAILABLE'
128
+ # primary meaning is that the check could not be performed due to a service outage or API error, but it's also overloaded to mean NOT_APPLICABLE so technically 'skipped' or 'error' could be applicable, but AWS seems to do the equivalent of skipped
129
+ subfinding['status'] = 'skipped'
130
+ subfinding['skip_message'] = statusreason if statusreason
131
+ else
132
+ subfinding['status'] = 'error' # not a valid value for the status enum
133
+ subfinding['message'] = statusreason if statusreason
134
+ end
135
+ else
136
+ subfinding['status'] = 'skipped' # if no compliance status is provided which is a weird but possible case, then skip
137
+ subfinding['skip_message'] = statusreason if statusreason
138
+ end
139
+
140
+ subfinding['code_desc'] = external_product_handler(finding['ProductArn'], finding, :subfindings_code_desc, '')
141
+ subfinding['code_desc'] += '; ' unless subfinding['code_desc'].empty?
142
+ subfinding['code_desc'] += "Resources: [#{finding['Resources'].map { |r| "Type: #{encode(r['Type'])}, Id: #{encode(r['Id'])}#{", Partition: #{encode(r['Partition'])}" if r.key?('Partition')}#{", Region: #{encode(r['Region'])}" if r.key?('Region')}" }.join(', ')}]"
143
+
144
+ subfinding['start_time'] = finding.key?('LastObservedAt') ? finding['LastObservedAt'] : finding['UpdatedAt']
145
+
146
+ [subfinding]
147
+ end
148
+
149
+ def to_hdf
150
+ product_groups = {}
151
+ @report['Findings'].each do |finding|
152
+ printf("\rProcessing: %s", $spinner.next)
153
+
154
+ external = method(:external_product_handler).curry(4)[finding['ProductArn']][finding]
155
+
156
+ # group subfindings by asff productarn and then hdf id
157
+ item = {}
158
+ item['id'] = external[:finding_id][encode(finding['GeneratorId'])]
159
+
160
+ item['title'] = external[:finding_title][encode(finding['Title'])]
161
+
162
+ item['tags'] = { nist: nist_tag(finding) }
163
+
164
+ item['impact'] = impact(finding)
165
+
166
+ item['desc'] = encode(finding['Description'])
167
+
168
+ item['descriptions'] = []
169
+ item['descriptions'] << desc_tags(finding['Remediation']['Recommendation'].map { |_k, v| encode(v) }.join("\n"), 'fix') if finding.key?('Remediation') && finding['Remediation'].key?('Recommendation')
170
+
171
+ item['refs'] = []
172
+ item['refs'] << { url: finding['SourceUrl'] } if finding.key?('SourceUrl')
173
+
174
+ item['source_location'] = NA_HASH
175
+
176
+ item['results'] = subfindings(finding)
177
+
178
+ arn = PRODUCT_ARN_MAPPING.keys.find { |a| finding['ProductArn'].match(a) }
179
+ if arn.nil?
180
+ product_info = finding['ProductArn'].split(':')[-1]
181
+ arn = Regexp.new "arn:.+:securityhub:.+:.*:product/#{product_info.split('/')[1]}/#{product_info.split('/')[2]}"
182
+ end
183
+ product_groups[arn] = {} if product_groups[arn].nil?
184
+ product_groups[arn][item['id']] = [] if product_groups[arn][item['id']].nil?
185
+ product_groups[arn][item['id']] << [item, finding]
186
+ end
187
+
188
+ controls = []
189
+ product_groups.each do |product, id_groups|
190
+ id_groups.each do |id, data|
191
+ printf("\rProcessing: %s", $spinner.next)
192
+
193
+ external = method(:external_product_handler).curry(4)[product]
194
+
195
+ group = data.map { |d| d[0] }
196
+ findings = data.map { |d| d[1] }
197
+
198
+ product_info = findings[0]['ProductArn'].split(':')[-1].split('/')
199
+ product_name = external[findings][:product_name][encode("#{product_info[1]}/#{product_info[2]}")]
200
+
201
+ item = {}
202
+ # add product name to id if any ids are the same across products
203
+ item['id'] = product_groups.reject { |pg| pg == product }.values.any? { |ig| ig.keys.include?(id) } ? "[#{product_name}] #{id}" : id
204
+
205
+ item['title'] = "#{product_name}: #{group.map { |d| d['title'] }.uniq.join(';')}"
206
+
207
+ item['tags'] = { nist: group.map { |d| d['tags'][:nist] }.flatten.uniq }
208
+
209
+ item['impact'] = group.map { |d| d['impact'] }.max
210
+
211
+ item['desc'] = external[group][:desc][group.map { |d| d['desc'] }.uniq.join("\n")]
212
+
213
+ item['descriptions'] = group.map { |d| d['descriptions'] }.flatten.compact.reject(&:empty?).uniq
214
+
215
+ item['refs'] = group.map { |d| d['refs'] }.flatten.compact.reject(&:empty?).uniq
216
+
217
+ item['source_location'] = NA_HASH
218
+ item['code'] = JSON.pretty_generate({ Findings: findings })
219
+
220
+ item['results'] = group.map { |d| d['results'] }.flatten.uniq
221
+
222
+ controls << item
223
+ end
224
+ end
225
+
226
+ results = HeimdallDataFormat.new(profile_name: @meta&.key?('name') ? @meta['name'] : 'AWS Security Finding Format',
227
+ title: @meta&.key?('title') ? @meta['title'] : 'ASFF findings',
228
+ controls: controls)
229
+ results.to_hdf
230
+ end
231
+ end
232
+ end
@@ -8,7 +8,7 @@ RESOURCE_DIR = Pathname.new(__FILE__).join('../../data')
8
8
  AWS_CONFIG_MAPPING_FILE = File.join(RESOURCE_DIR, 'aws-config-mapping.csv')
9
9
 
10
10
  NOT_APPLICABLE_MSG = 'No AWS resources found to evaluate complaince for this rule'.freeze
11
- INSUFFICIENT_DATA_MSG = 'Not enough data has been collectd to determine compliance yet.'.freeze
11
+ INSUFFICIENT_DATA_MSG = 'Not enough data has been collected to determine compliance yet.'.freeze
12
12
 
13
13
  ##
14
14
  # HDF mapper for use with AWS Config rules.
@@ -57,10 +57,10 @@ module HeimdallTools
57
57
 
58
58
  results = HeimdallDataFormat.new(
59
59
  profile_name: 'AWS Config',
60
- title: 'AWS Config',
61
- summary: 'AWS Config',
62
- controls: controls,
63
- statistics: { aws_config_sdk_version: Aws::ConfigService::GEM_VERSION },
60
+ title: 'AWS Config',
61
+ summary: 'AWS Config',
62
+ controls: controls,
63
+ statistics: { aws_config_sdk_version: Aws::ConfigService::GEM_VERSION },
64
64
  )
65
65
  results.to_hdf
66
66
  end
@@ -41,6 +41,15 @@ module HeimdallTools
41
41
  File.write(options[:output], hdf)
42
42
  end
43
43
 
44
+ desc 'xccdf_results_mapper', 'xccdf_results_mapper translates SCAP client XCCDF-Results XML report to HDF format Json be viewed on Heimdall'
45
+ long_desc Help.text(:xccdf_results_mapper)
46
+ option :xml, required: true, aliases: '-x'
47
+ option :output, required: true, aliases: '-o'
48
+ def xccdf_results_mapper
49
+ hdf = HeimdallTools::XCCDFResultsMapper.new(File.read(options[:xml])).to_hdf
50
+ File.write(options[:output], hdf)
51
+ end
52
+
44
53
  desc 'nessus_mapper', 'nessus_mapper translates nessus xml report to HDF format Json be viewed on Heimdall'
45
54
  long_desc Help.text(:nessus_mapper)
46
55
  option :xml, required: true, aliases: '-x'
@@ -61,7 +70,7 @@ module HeimdallTools
61
70
  option :output_prefix, required: true, aliases: '-o'
62
71
  def snyk_mapper
63
72
  hdfs = HeimdallTools::SnykMapper.new(File.read(options[:json]), options[:name]).to_hdf
64
- puts "\r\HDF Generated:\n"
73
+ puts "\rHDF Generated:\n"
65
74
  hdfs.each_key do |host|
66
75
  File.write("#{options[:output_prefix]}-#{host}.json", hdfs[host])
67
76
  puts "#{options[:output_prefix]}-#{host}.json"
@@ -75,7 +84,7 @@ module HeimdallTools
75
84
  def nikto_mapper
76
85
  hdf = HeimdallTools::NiktoMapper.new(File.read(options[:json])).to_hdf
77
86
  File.write(options[:output], hdf)
78
- puts "\r\HDF Generated:\n"
87
+ puts "\rHDF Generated:\n"
79
88
  puts options[:output].to_s
80
89
  end
81
90
 
@@ -86,7 +95,7 @@ module HeimdallTools
86
95
  def jfrog_xray_mapper
87
96
  hdf = HeimdallTools::JfrogXrayMapper.new(File.read(options[:json])).to_hdf
88
97
  File.write(options[:output], hdf)
89
- puts "\r\HDF Generated:\n"
98
+ puts "\rHDF Generated:\n"
90
99
  puts options[:output].to_s
91
100
  end
92
101
 
@@ -97,7 +106,7 @@ module HeimdallTools
97
106
  def dbprotect_mapper
98
107
  hdf = HeimdallTools::DBProtectMapper.new(File.read(options[:xml])).to_hdf
99
108
  File.write(options[:output], hdf)
100
- puts "\r\HDF Generated:\n"
109
+ puts "\rHDF Generated:\n"
101
110
  puts options[:output].to_s
102
111
  end
103
112
 
@@ -108,7 +117,7 @@ module HeimdallTools
108
117
  def aws_config_mapper
109
118
  hdf = HeimdallTools::AwsConfigMapper.new(options[:custom_mapping]).to_hdf
110
119
  File.write(options[:output], hdf)
111
- puts "\r\HDF Generated:\n"
120
+ puts "\rHDF Generated:\n"
112
121
  puts options[:output].to_s
113
122
  end
114
123
 
@@ -119,7 +128,53 @@ module HeimdallTools
119
128
  def netsparker_mapper
120
129
  hdf = HeimdallTools::NetsparkerMapper.new(File.read(options[:xml])).to_hdf
121
130
  File.write(options[:output], hdf)
122
- puts "\r\HDF Generated:\n"
131
+ puts "\rHDF Generated:\n"
132
+ puts options[:output].to_s
133
+ end
134
+
135
+ desc 'sarif_mapper', 'sarif_mapper translates a SARIF JSON file into HDF format JSON to be viewable in Heimdall'
136
+ long_desc Help.text(:sarif_mapper)
137
+ option :json, required: true, aliases: '-j'
138
+ option :output, required: true, aliases: '-o'
139
+ option :verbose, type: :boolean, aliases: '-V'
140
+ def sarif_mapper
141
+ hdf = HeimdallTools::SarifMapper.new(File.read(options[:json])).to_hdf
142
+ File.write(options[:output], hdf)
143
+ puts "\rHDF Generated:\n"
144
+ puts options[:output].to_s
145
+ end
146
+
147
+ desc 'scoutsuite_mapper', 'scoutsuite_mapper translates Scout Suite results from Javascript to HDF-formatted JSON so as to be viewable on Heimdall'
148
+ long_desc Help.text(:scoutsuite_mapper)
149
+ option :javascript, required: true, banner: 'SCOUTSUITE-RESULTS-JS', aliases: ['-i', '--input', '-j']
150
+ option :output, required: true, banner: 'HDF-SCAN-RESULTS-JSON', aliases: '-o'
151
+ def scoutsuite_mapper
152
+ hdf = HeimdallTools::ScoutSuiteMapper.new(File.read(options[:javascript])).to_hdf
153
+ File.write(options[:output], hdf)
154
+ puts "\rHDF Generated:\n"
155
+ puts options[:output].to_s
156
+ end
157
+
158
+ desc 'asff_mapper', 'asff_mapper translates AWS Security Finding Format results from JSON to HDF-formatted JSON so as to be viewable on Heimdall'
159
+ long_desc Help.text(:asff_mapper)
160
+ option :json, required: true, banner: 'ASFF-FINDING-JSON', aliases: ['-i', '--input', '-j']
161
+ option :securityhub_standards, required: false, type: :array, banner: 'ASFF-SECURITYHUB-STANDARDS-JSON', aliases: ['--sh', '--input-securityhub-standards']
162
+ option :output, required: true, banner: 'HDF-SCAN-RESULTS-JSON', aliases: '-o'
163
+ def asff_mapper
164
+ hdf = HeimdallTools::ASFFMapper.new(File.read(options[:json]), securityhub_standards_json_array: options[:securityhub_standards].nil? ? nil : options[:securityhub_standards].map { |filename| File.read(filename) }).to_hdf
165
+ File.write(options[:output], hdf)
166
+ puts "\rHDF Generated:\n"
167
+ puts options[:output].to_s
168
+ end
169
+
170
+ desc 'prowler_mapper', 'prowler_mapper translates Prowler-derived AWS Security Finding Format results from concatenated JSON blobs to HDF-formatted JSON so as to be viewable on Heimdall'
171
+ long_desc Help.text(:prowler_mapper)
172
+ option :json, required: true, banner: 'PROWLER-ASFF-JSON', aliases: ['-i', '--input', '-j']
173
+ option :output, required: true, banner: 'HDF-SCAN-RESULTS-JSON', aliases: '-o'
174
+ def prowler_mapper
175
+ hdf = HeimdallTools::ProwlerMapper.new(File.read(options[:json])).to_hdf
176
+ File.write(options[:output], hdf)
177
+ puts "\rHDF Generated:\n"
123
178
  puts options[:output].to_s
124
179
  end
125
180
 
@@ -58,9 +58,9 @@ module HeimdallTools
58
58
  def snippet(snippetid)
59
59
  snippet = @snippets.select { |x| x['id'].eql?(snippetid) }.first
60
60
  "\nPath: #{snippet['File']}\n" \
61
- "StartLine: #{snippet['StartLine']}, " \
62
- "EndLine: #{snippet['EndLine']}\n" \
63
- "Code:\n#{snippet['Text']['#cdata-section'].strip}" \
61
+ "StartLine: #{snippet['StartLine']}, " \
62
+ "EndLine: #{snippet['EndLine']}\n" \
63
+ "Code:\n#{snippet['Text']['#cdata-section'].strip}" \
64
64
  end
65
65
 
66
66
  def nist_tag(rule)
@@ -0,0 +1,6 @@
1
+ asff_mapper translates AWS Security Finding Format results from JSON to HDF-formatted JSON so as to be viewable on Heimdall
2
+
3
+ Examples:
4
+
5
+ heimdall_tools asff_mapper -i <asff-finding-json> -o <hdf-scan-results-json>
6
+ heimdall_tools asff_mapper -i <asff-finding-json> --sh <standard-1-json> ... <standard-n-json> -o <hdf-scan-results-json>
@@ -0,0 +1,5 @@
1
+ prowler_mapper translates Prowler-derived AWS Security Finding Format results from concatenated JSON blobs to HDF-formatted JSON so as to be viewable on Heimdall
2
+
3
+ Examples:
4
+
5
+ heimdall_tools prowler_mapper -i <prowler-asff-json> -o <hdf-scan-results-json>
@@ -0,0 +1,12 @@
1
+ sarif_mapper translates a SARIF JSON file into HDF format JSON to be viewable in Heimdall
2
+
3
+ SARIF level to HDF impact Mapping:
4
+ SARIF level error -> HDF impact 0.7
5
+ SARIF level warning -> HDF impact 0.5
6
+ SARIF level note -> HDF impact 0.3
7
+ SARIF level none -> HDF impact 0.1
8
+ SARIF level not provided -> HDF impact 0.1 as default
9
+
10
+ Examples:
11
+
12
+ heimdall_tools sarif_mapper [OPTIONS] -j <sarif-results-json> -o <hdf-scan-results.json>