ai_root_shield 0.2.0 → 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +52 -3
- data/Gemfile.lock +1 -1
- data/README.md +109 -3
- data/examples/policies/banking_policy.json +79 -0
- data/examples/policies/development_policy.json +64 -0
- data/examples/policies/enterprise_policy.json +89 -0
- data/exe/ai_root_shield +157 -14
- data/lib/ai_root_shield/advanced_proxy_detector.rb +406 -0
- data/lib/ai_root_shield/certificate_pinning_helper.rb +258 -0
- data/lib/ai_root_shield/enterprise_policy_manager.rb +431 -0
- data/lib/ai_root_shield/rasp_protection.rb +359 -0
- data/lib/ai_root_shield/version.rb +1 -1
- data/lib/ai_root_shield.rb +171 -4
- metadata +16 -5
@@ -0,0 +1,258 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "openssl"
|
4
|
+
require "net/http"
|
5
|
+
require "uri"
|
6
|
+
|
7
|
+
module AiRootShield
|
8
|
+
# Certificate pinning helper for TLS public key pinning integration
|
9
|
+
class CertificatePinningHelper
|
10
|
+
# Supported hash algorithms for pinning
|
11
|
+
SUPPORTED_ALGORITHMS = %w[sha256 sha1].freeze
|
12
|
+
|
13
|
+
# Common certificate authorities and their pins
|
14
|
+
COMMON_CA_PINS = {
|
15
|
+
"letsencrypt" => [
|
16
|
+
"sha256/YLh1dUR9y6Kja30RrAn7JKnbQG/uEtLMkBgFF2Fuihg=", # ISRG Root X1
|
17
|
+
"sha256/sRHdihwgkaib1P1gxX8HFszlD+7/gTfNvuAybgLPNis=" # ISRG Root X2
|
18
|
+
],
|
19
|
+
"digicert" => [
|
20
|
+
"sha256/WoiWRyIOVNa9ihaBciRSC7XHjliYS9VwUGOIud4PB18=", # DigiCert Global Root G2
|
21
|
+
"sha256/RRM1dGqnDFsCJXBTHky16vi1obOlCgFFn/yOhI/y+ho=" # DigiCert Global Root CA
|
22
|
+
],
|
23
|
+
"google" => [
|
24
|
+
"sha256/KwccWaCgrnaw6tsrrSO61FgLacNgG2MMLq8GE6+oP5I=", # GTS Root R1
|
25
|
+
"sha256/FEzVOUp4dF3gI0ZVPRJhFbSD608T5Wx5Bp0+jBw/gQo=" # GTS Root R2
|
26
|
+
]
|
27
|
+
}.freeze
|
28
|
+
|
29
|
+
def initialize(config = {})
|
30
|
+
@config = {
|
31
|
+
algorithm: "sha256",
|
32
|
+
backup_pins: [],
|
33
|
+
pin_validation_enabled: true,
|
34
|
+
allow_backup_pins: true,
|
35
|
+
strict_mode: false
|
36
|
+
}.merge(config)
|
37
|
+
|
38
|
+
@pinned_hosts = {}
|
39
|
+
@validation_cache = {}
|
40
|
+
end
|
41
|
+
|
42
|
+
# Add certificate pin for a host
|
43
|
+
# @param host [String] Hostname to pin
|
44
|
+
# @param pins [Array<String>] Array of certificate pins
|
45
|
+
# @param options [Hash] Additional options
|
46
|
+
def add_pin(host, pins, options = {})
|
47
|
+
normalized_host = normalize_host(host)
|
48
|
+
|
49
|
+
@pinned_hosts[normalized_host] = {
|
50
|
+
pins: Array(pins),
|
51
|
+
algorithm: options[:algorithm] || @config[:algorithm],
|
52
|
+
backup_pins: options[:backup_pins] || [],
|
53
|
+
strict_mode: options[:strict_mode] || @config[:strict_mode],
|
54
|
+
added_at: Time.now
|
55
|
+
}
|
56
|
+
end
|
57
|
+
|
58
|
+
# Validate certificate chain against pinned certificates
|
59
|
+
# @param host [String] Hostname being validated
|
60
|
+
# @param cert_chain [Array<OpenSSL::X509::Certificate>] Certificate chain
|
61
|
+
# @return [Hash] Validation result
|
62
|
+
def validate_pin(host, cert_chain)
|
63
|
+
normalized_host = normalize_host(host)
|
64
|
+
pin_config = @pinned_hosts[normalized_host]
|
65
|
+
|
66
|
+
return { valid: true, reason: "no_pin_configured" } unless pin_config
|
67
|
+
|
68
|
+
# Check cache first
|
69
|
+
cache_key = generate_cache_key(host, cert_chain)
|
70
|
+
return @validation_cache[cache_key] if @validation_cache[cache_key]
|
71
|
+
|
72
|
+
result = perform_pin_validation(pin_config, cert_chain)
|
73
|
+
|
74
|
+
# Cache result for performance
|
75
|
+
@validation_cache[cache_key] = result
|
76
|
+
|
77
|
+
result
|
78
|
+
end
|
79
|
+
|
80
|
+
# Extract certificate pin from certificate
|
81
|
+
# @param certificate [OpenSSL::X509::Certificate] Certificate to extract pin from
|
82
|
+
# @param algorithm [String] Hash algorithm to use
|
83
|
+
# @return [String] Certificate pin
|
84
|
+
def extract_pin(certificate, algorithm = "sha256")
|
85
|
+
public_key_der = certificate.public_key.to_der
|
86
|
+
|
87
|
+
case algorithm.downcase
|
88
|
+
when "sha256"
|
89
|
+
digest = OpenSSL::Digest::SHA256.digest(public_key_der)
|
90
|
+
"sha256/#{[digest].pack('m0')}"
|
91
|
+
when "sha1"
|
92
|
+
digest = OpenSSL::Digest::SHA1.digest(public_key_der)
|
93
|
+
"sha1/#{[digest].pack('m0')}"
|
94
|
+
else
|
95
|
+
raise ArgumentError, "Unsupported algorithm: #{algorithm}"
|
96
|
+
end
|
97
|
+
end
|
98
|
+
|
99
|
+
# Get certificate chain from URL
|
100
|
+
# @param url [String] URL to get certificate chain from
|
101
|
+
# @return [Array<OpenSSL::X509::Certificate>] Certificate chain
|
102
|
+
def get_certificate_chain(url)
|
103
|
+
uri = URI.parse(url)
|
104
|
+
return [] unless uri.scheme == "https"
|
105
|
+
|
106
|
+
cert_chain = []
|
107
|
+
|
108
|
+
begin
|
109
|
+
tcp_socket = TCPSocket.new(uri.host, uri.port || 443)
|
110
|
+
ssl_context = OpenSSL::SSL::SSLContext.new
|
111
|
+
ssl_context.verify_mode = OpenSSL::SSL::VERIFY_NONE
|
112
|
+
|
113
|
+
ssl_socket = OpenSSL::SSL::SSLSocket.new(tcp_socket, ssl_context)
|
114
|
+
ssl_socket.hostname = uri.host
|
115
|
+
ssl_socket.connect
|
116
|
+
|
117
|
+
cert_chain = ssl_socket.peer_cert_chain || []
|
118
|
+
|
119
|
+
ssl_socket.close
|
120
|
+
tcp_socket.close
|
121
|
+
rescue => e
|
122
|
+
# Log error but don't raise to allow graceful handling
|
123
|
+
warn "Certificate chain retrieval failed for #{url}: #{e.message}"
|
124
|
+
end
|
125
|
+
|
126
|
+
cert_chain
|
127
|
+
end
|
128
|
+
|
129
|
+
# Generate pins for a URL
|
130
|
+
# @param url [String] URL to generate pins for
|
131
|
+
# @param algorithm [String] Hash algorithm to use
|
132
|
+
# @return [Array<String>] Generated pins
|
133
|
+
def generate_pins_for_url(url, algorithm = "sha256")
|
134
|
+
cert_chain = get_certificate_chain(url)
|
135
|
+
return [] if cert_chain.empty?
|
136
|
+
|
137
|
+
cert_chain.map { |cert| extract_pin(cert, algorithm) }
|
138
|
+
end
|
139
|
+
|
140
|
+
# Validate current pinning configuration
|
141
|
+
# @return [Hash] Validation report
|
142
|
+
def validate_configuration
|
143
|
+
report = {
|
144
|
+
total_pins: @pinned_hosts.size,
|
145
|
+
valid_pins: 0,
|
146
|
+
invalid_pins: 0,
|
147
|
+
issues: []
|
148
|
+
}
|
149
|
+
|
150
|
+
@pinned_hosts.each do |host, config|
|
151
|
+
begin
|
152
|
+
# Test connectivity and pin validation
|
153
|
+
test_url = "https://#{host}"
|
154
|
+
cert_chain = get_certificate_chain(test_url)
|
155
|
+
|
156
|
+
if cert_chain.empty?
|
157
|
+
report[:issues] << "Cannot retrieve certificate chain for #{host}"
|
158
|
+
report[:invalid_pins] += 1
|
159
|
+
else
|
160
|
+
validation_result = perform_pin_validation(config, cert_chain)
|
161
|
+
if validation_result[:valid]
|
162
|
+
report[:valid_pins] += 1
|
163
|
+
else
|
164
|
+
report[:invalid_pins] += 1
|
165
|
+
report[:issues] << "Pin validation failed for #{host}: #{validation_result[:reason]}"
|
166
|
+
end
|
167
|
+
end
|
168
|
+
rescue => e
|
169
|
+
report[:invalid_pins] += 1
|
170
|
+
report[:issues] << "Error validating #{host}: #{e.message}"
|
171
|
+
end
|
172
|
+
end
|
173
|
+
|
174
|
+
report
|
175
|
+
end
|
176
|
+
|
177
|
+
# Get pinning status for all configured hosts
|
178
|
+
# @return [Hash] Pinning status
|
179
|
+
def pinning_status
|
180
|
+
{
|
181
|
+
enabled: @config[:pin_validation_enabled],
|
182
|
+
total_hosts: @pinned_hosts.size,
|
183
|
+
hosts: @pinned_hosts.keys,
|
184
|
+
cache_size: @validation_cache.size,
|
185
|
+
configuration: @config
|
186
|
+
}
|
187
|
+
end
|
188
|
+
|
189
|
+
# Clear validation cache
|
190
|
+
def clear_cache
|
191
|
+
@validation_cache.clear
|
192
|
+
end
|
193
|
+
|
194
|
+
# Remove pin for host
|
195
|
+
# @param host [String] Host to remove pin for
|
196
|
+
def remove_pin(host)
|
197
|
+
normalized_host = normalize_host(host)
|
198
|
+
@pinned_hosts.delete(normalized_host)
|
199
|
+
|
200
|
+
# Clear related cache entries
|
201
|
+
@validation_cache.delete_if { |key, _| key.include?(normalized_host) }
|
202
|
+
end
|
203
|
+
|
204
|
+
# Load pins from common CA configurations
|
205
|
+
# @param ca_name [String] CA name (letsencrypt, digicert, google)
|
206
|
+
# @param hosts [Array<String>] Hosts to apply CA pins to
|
207
|
+
def load_ca_pins(ca_name, hosts)
|
208
|
+
ca_pins = COMMON_CA_PINS[ca_name.downcase]
|
209
|
+
raise ArgumentError, "Unknown CA: #{ca_name}" unless ca_pins
|
210
|
+
|
211
|
+
Array(hosts).each do |host|
|
212
|
+
add_pin(host, ca_pins, backup_pins: ca_pins)
|
213
|
+
end
|
214
|
+
end
|
215
|
+
|
216
|
+
private
|
217
|
+
|
218
|
+
def normalize_host(host)
|
219
|
+
# Remove protocol and path, keep only hostname
|
220
|
+
host.gsub(/^https?:\/\//, "").split("/").first.downcase
|
221
|
+
end
|
222
|
+
|
223
|
+
def generate_cache_key(host, cert_chain)
|
224
|
+
cert_fingerprints = cert_chain.map { |cert| cert.to_der }.join
|
225
|
+
"#{normalize_host(host)}_#{Digest::SHA256.hexdigest(cert_fingerprints)}"
|
226
|
+
end
|
227
|
+
|
228
|
+
def perform_pin_validation(pin_config, cert_chain)
|
229
|
+
return { valid: false, reason: "empty_certificate_chain" } if cert_chain.empty?
|
230
|
+
|
231
|
+
algorithm = pin_config[:algorithm]
|
232
|
+
expected_pins = pin_config[:pins] + (pin_config[:backup_pins] || [])
|
233
|
+
|
234
|
+
# Extract pins from certificate chain
|
235
|
+
actual_pins = cert_chain.map { |cert| extract_pin(cert, algorithm) }
|
236
|
+
|
237
|
+
# Check if any actual pin matches expected pins
|
238
|
+
matching_pins = actual_pins & expected_pins
|
239
|
+
|
240
|
+
if matching_pins.any?
|
241
|
+
{
|
242
|
+
valid: true,
|
243
|
+
reason: "pin_match_found",
|
244
|
+
matched_pins: matching_pins,
|
245
|
+
algorithm: algorithm
|
246
|
+
}
|
247
|
+
else
|
248
|
+
{
|
249
|
+
valid: false,
|
250
|
+
reason: "no_pin_match",
|
251
|
+
expected_pins: expected_pins,
|
252
|
+
actual_pins: actual_pins,
|
253
|
+
algorithm: algorithm
|
254
|
+
}
|
255
|
+
end
|
256
|
+
end
|
257
|
+
end
|
258
|
+
end
|
@@ -0,0 +1,431 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "json"
|
4
|
+
|
5
|
+
module AiRootShield
|
6
|
+
# Enterprise policy management for customizable security rules and compliance
|
7
|
+
class EnterprisePolicyManager
|
8
|
+
# Default enterprise policy template
|
9
|
+
DEFAULT_POLICY = {
|
10
|
+
"version" => "1.0",
|
11
|
+
"name" => "Default Enterprise Policy",
|
12
|
+
"description" => "Standard enterprise security policy",
|
13
|
+
"minimum_security_level" => 70,
|
14
|
+
"compliance_rules" => {
|
15
|
+
"device_requirements" => {
|
16
|
+
"allow_rooted_devices" => false,
|
17
|
+
"allow_jailbroken_devices" => false,
|
18
|
+
"allow_emulators" => false,
|
19
|
+
"require_screen_lock" => true,
|
20
|
+
"minimum_os_version" => {
|
21
|
+
"android" => "8.0",
|
22
|
+
"ios" => "12.0"
|
23
|
+
}
|
24
|
+
},
|
25
|
+
"network_security" => {
|
26
|
+
"allow_vpn" => true,
|
27
|
+
"allow_proxy" => false,
|
28
|
+
"allow_tor" => false,
|
29
|
+
"require_certificate_pinning" => true,
|
30
|
+
"allowed_dns_servers" => [],
|
31
|
+
"blocked_dns_servers" => []
|
32
|
+
},
|
33
|
+
"application_integrity" => {
|
34
|
+
"allow_debug_builds" => false,
|
35
|
+
"allow_repackaged_apps" => false,
|
36
|
+
"require_code_signing" => true,
|
37
|
+
"allowed_certificate_issuers" => []
|
38
|
+
},
|
39
|
+
"runtime_protection" => {
|
40
|
+
"enable_rasp" => true,
|
41
|
+
"allow_debugging" => false,
|
42
|
+
"allow_hooking_frameworks" => false,
|
43
|
+
"enable_tamper_detection" => true
|
44
|
+
}
|
45
|
+
},
|
46
|
+
"risk_thresholds" => {
|
47
|
+
"low" => 20,
|
48
|
+
"medium" => 50,
|
49
|
+
"high" => 70,
|
50
|
+
"critical" => 90
|
51
|
+
},
|
52
|
+
"actions" => {
|
53
|
+
"on_policy_violation" => "block",
|
54
|
+
"on_high_risk" => "alert",
|
55
|
+
"on_critical_risk" => "block",
|
56
|
+
"custom_actions" => {}
|
57
|
+
},
|
58
|
+
"reporting" => {
|
59
|
+
"enable_audit_logs" => true,
|
60
|
+
"log_level" => "info",
|
61
|
+
"retention_days" => 90
|
62
|
+
}
|
63
|
+
}.freeze
|
64
|
+
|
65
|
+
# Policy violation severity levels
|
66
|
+
VIOLATION_LEVELS = %w[info warning critical].freeze
|
67
|
+
|
68
|
+
def initialize(policy_config = nil)
|
69
|
+
@policy = load_policy(policy_config)
|
70
|
+
@violations = []
|
71
|
+
@compliance_cache = {}
|
72
|
+
@audit_logs = []
|
73
|
+
end
|
74
|
+
|
75
|
+
# Load policy from file or use provided configuration
|
76
|
+
# @param policy_source [String, Hash, nil] Policy file path, hash, or nil for default
|
77
|
+
# @return [Hash] Loaded policy
|
78
|
+
def load_policy(policy_source = nil)
|
79
|
+
case policy_source
|
80
|
+
when String
|
81
|
+
load_policy_from_file(policy_source)
|
82
|
+
when Hash
|
83
|
+
merge_with_default_policy(policy_source)
|
84
|
+
when nil
|
85
|
+
DEFAULT_POLICY.dup
|
86
|
+
else
|
87
|
+
raise ArgumentError, "Invalid policy source type: #{policy_source.class}"
|
88
|
+
end
|
89
|
+
end
|
90
|
+
|
91
|
+
# Validate device compliance against enterprise policy
|
92
|
+
# @param scan_result [Hash] Device scan result from AI Root Shield
|
93
|
+
# @return [Hash] Compliance validation result
|
94
|
+
def validate_compliance(scan_result)
|
95
|
+
compliance_result = {
|
96
|
+
compliant: true,
|
97
|
+
violations: [],
|
98
|
+
risk_assessment: {},
|
99
|
+
recommended_actions: [],
|
100
|
+
policy_version: @policy["version"],
|
101
|
+
validation_timestamp: Time.now.to_f
|
102
|
+
}
|
103
|
+
|
104
|
+
# Check device requirements
|
105
|
+
compliance_result = validate_device_requirements(scan_result, compliance_result)
|
106
|
+
|
107
|
+
# Check network security
|
108
|
+
compliance_result = validate_network_security(scan_result, compliance_result)
|
109
|
+
|
110
|
+
# Check application integrity
|
111
|
+
compliance_result = validate_application_integrity(scan_result, compliance_result)
|
112
|
+
|
113
|
+
# Check runtime protection
|
114
|
+
compliance_result = validate_runtime_protection(scan_result, compliance_result)
|
115
|
+
|
116
|
+
# Assess overall risk against thresholds
|
117
|
+
compliance_result = assess_risk_thresholds(scan_result, compliance_result)
|
118
|
+
|
119
|
+
# Determine final compliance status
|
120
|
+
compliance_result[:compliant] = compliance_result[:violations].empty?
|
121
|
+
|
122
|
+
# Log compliance check
|
123
|
+
log_compliance_check(scan_result, compliance_result)
|
124
|
+
|
125
|
+
compliance_result
|
126
|
+
end
|
127
|
+
|
128
|
+
# Check if minimum security level is met
|
129
|
+
# @param risk_score [Integer] Current risk score
|
130
|
+
# @return [Boolean] True if minimum security level is met
|
131
|
+
def meets_minimum_security_level?(risk_score)
|
132
|
+
minimum_level = @policy["minimum_security_level"]
|
133
|
+
(100 - risk_score) >= minimum_level
|
134
|
+
end
|
135
|
+
|
136
|
+
# Get policy configuration
|
137
|
+
# @return [Hash] Current policy configuration
|
138
|
+
def policy_configuration
|
139
|
+
@policy.dup
|
140
|
+
end
|
141
|
+
|
142
|
+
# Update policy configuration
|
143
|
+
# @param new_policy [Hash] New policy configuration
|
144
|
+
def update_policy(new_policy)
|
145
|
+
@policy = merge_with_default_policy(new_policy)
|
146
|
+
clear_compliance_cache
|
147
|
+
log_audit_event("policy_updated", "Enterprise policy configuration updated")
|
148
|
+
end
|
149
|
+
|
150
|
+
# Get compliance violations
|
151
|
+
# @return [Array<Hash>] List of compliance violations
|
152
|
+
def compliance_violations
|
153
|
+
@violations.dup
|
154
|
+
end
|
155
|
+
|
156
|
+
# Get audit logs
|
157
|
+
# @return [Array<Hash>] Audit log entries
|
158
|
+
def audit_logs
|
159
|
+
@audit_logs.dup
|
160
|
+
end
|
161
|
+
|
162
|
+
# Clear compliance cache
|
163
|
+
def clear_compliance_cache
|
164
|
+
@compliance_cache.clear
|
165
|
+
end
|
166
|
+
|
167
|
+
# Export policy to JSON
|
168
|
+
# @return [String] Policy as JSON string
|
169
|
+
def export_policy
|
170
|
+
JSON.pretty_generate(@policy)
|
171
|
+
end
|
172
|
+
|
173
|
+
# Import policy from JSON
|
174
|
+
# @param json_policy [String] Policy as JSON string
|
175
|
+
def import_policy(json_policy)
|
176
|
+
policy_hash = JSON.parse(json_policy)
|
177
|
+
update_policy(policy_hash)
|
178
|
+
end
|
179
|
+
|
180
|
+
# Get policy statistics
|
181
|
+
# @return [Hash] Policy usage statistics
|
182
|
+
def policy_statistics
|
183
|
+
{
|
184
|
+
policy_version: @policy["version"],
|
185
|
+
total_violations: @violations.size,
|
186
|
+
violation_types: @violations.group_by { |v| v[:type] }.transform_values(&:size),
|
187
|
+
compliance_checks: @compliance_cache.size,
|
188
|
+
audit_log_entries: @audit_logs.size,
|
189
|
+
last_policy_update: @audit_logs.reverse.find { |log| log[:event] == "policy_updated" }&.dig(:timestamp)
|
190
|
+
}
|
191
|
+
end
|
192
|
+
|
193
|
+
private
|
194
|
+
|
195
|
+
def load_policy_from_file(file_path)
|
196
|
+
unless File.exist?(file_path)
|
197
|
+
raise ArgumentError, "Policy file not found: #{file_path}"
|
198
|
+
end
|
199
|
+
|
200
|
+
policy_content = File.read(file_path)
|
201
|
+
policy_hash = JSON.parse(policy_content)
|
202
|
+
merge_with_default_policy(policy_hash)
|
203
|
+
rescue JSON::ParserError => e
|
204
|
+
raise ArgumentError, "Invalid JSON in policy file: #{e.message}"
|
205
|
+
end
|
206
|
+
|
207
|
+
def merge_with_default_policy(custom_policy)
|
208
|
+
# Deep merge custom policy with default policy
|
209
|
+
deep_merge(DEFAULT_POLICY.dup, custom_policy)
|
210
|
+
end
|
211
|
+
|
212
|
+
def deep_merge(hash1, hash2)
|
213
|
+
hash1.merge(hash2) do |key, old_val, new_val|
|
214
|
+
if old_val.is_a?(Hash) && new_val.is_a?(Hash)
|
215
|
+
deep_merge(old_val, new_val)
|
216
|
+
else
|
217
|
+
new_val
|
218
|
+
end
|
219
|
+
end
|
220
|
+
end
|
221
|
+
|
222
|
+
def validate_device_requirements(scan_result, compliance_result)
|
223
|
+
device_rules = @policy.dig("compliance_rules", "device_requirements") || {}
|
224
|
+
|
225
|
+
# Check rooted/jailbroken devices
|
226
|
+
if !device_rules["allow_rooted_devices"] && has_root_indicators?(scan_result)
|
227
|
+
add_violation(compliance_result, "device_rooted", "critical", "Rooted device detected")
|
228
|
+
end
|
229
|
+
|
230
|
+
if !device_rules["allow_jailbroken_devices"] && has_jailbreak_indicators?(scan_result)
|
231
|
+
add_violation(compliance_result, "device_jailbroken", "critical", "Jailbroken device detected")
|
232
|
+
end
|
233
|
+
|
234
|
+
# Check emulators
|
235
|
+
if !device_rules["allow_emulators"] && has_emulator_indicators?(scan_result)
|
236
|
+
add_violation(compliance_result, "emulator_detected", "warning", "Emulator/simulator detected")
|
237
|
+
end
|
238
|
+
|
239
|
+
compliance_result
|
240
|
+
end
|
241
|
+
|
242
|
+
def validate_network_security(scan_result, compliance_result)
|
243
|
+
network_rules = @policy.dig("compliance_rules", "network_security") || {}
|
244
|
+
|
245
|
+
# Check VPN usage
|
246
|
+
if !network_rules["allow_vpn"] && has_vpn_indicators?(scan_result)
|
247
|
+
add_violation(compliance_result, "vpn_detected", "warning", "VPN usage detected")
|
248
|
+
end
|
249
|
+
|
250
|
+
# Check proxy usage
|
251
|
+
if !network_rules["allow_proxy"] && has_proxy_indicators?(scan_result)
|
252
|
+
add_violation(compliance_result, "proxy_detected", "warning", "Proxy usage detected")
|
253
|
+
end
|
254
|
+
|
255
|
+
# Check Tor usage
|
256
|
+
if !network_rules["allow_tor"] && has_tor_indicators?(scan_result)
|
257
|
+
add_violation(compliance_result, "tor_detected", "critical", "Tor usage detected")
|
258
|
+
end
|
259
|
+
|
260
|
+
compliance_result
|
261
|
+
end
|
262
|
+
|
263
|
+
def validate_application_integrity(scan_result, compliance_result)
|
264
|
+
app_rules = @policy.dig("compliance_rules", "application_integrity") || {}
|
265
|
+
|
266
|
+
# Check debug builds
|
267
|
+
if !app_rules["allow_debug_builds"] && has_debug_indicators?(scan_result)
|
268
|
+
add_violation(compliance_result, "debug_build", "warning", "Debug build detected")
|
269
|
+
end
|
270
|
+
|
271
|
+
# Check repackaged apps
|
272
|
+
if !app_rules["allow_repackaged_apps"] && has_repackaging_indicators?(scan_result)
|
273
|
+
add_violation(compliance_result, "app_repackaged", "critical", "Repackaged application detected")
|
274
|
+
end
|
275
|
+
|
276
|
+
compliance_result
|
277
|
+
end
|
278
|
+
|
279
|
+
def validate_runtime_protection(scan_result, compliance_result)
|
280
|
+
runtime_rules = @policy.dig("compliance_rules", "runtime_protection") || {}
|
281
|
+
|
282
|
+
# Check debugging
|
283
|
+
if !runtime_rules["allow_debugging"] && has_debugging_indicators?(scan_result)
|
284
|
+
add_violation(compliance_result, "debugging_detected", "critical", "Runtime debugging detected")
|
285
|
+
end
|
286
|
+
|
287
|
+
# Check hooking frameworks
|
288
|
+
if !runtime_rules["allow_hooking_frameworks"] && has_hooking_indicators?(scan_result)
|
289
|
+
add_violation(compliance_result, "hooking_detected", "critical", "Hooking framework detected")
|
290
|
+
end
|
291
|
+
|
292
|
+
compliance_result
|
293
|
+
end
|
294
|
+
|
295
|
+
def assess_risk_thresholds(scan_result, compliance_result)
|
296
|
+
risk_score = scan_result[:risk_score] || 0
|
297
|
+
thresholds = @policy["risk_thresholds"] || {}
|
298
|
+
|
299
|
+
risk_level = case risk_score
|
300
|
+
when 0..thresholds["low"]
|
301
|
+
"low"
|
302
|
+
when thresholds["low"]..thresholds["medium"]
|
303
|
+
"medium"
|
304
|
+
when thresholds["medium"]..thresholds["high"]
|
305
|
+
"high"
|
306
|
+
else
|
307
|
+
"critical"
|
308
|
+
end
|
309
|
+
|
310
|
+
compliance_result[:risk_assessment] = {
|
311
|
+
score: risk_score,
|
312
|
+
level: risk_level,
|
313
|
+
threshold_exceeded: risk_score > thresholds["high"]
|
314
|
+
}
|
315
|
+
|
316
|
+
# Add violation if risk threshold exceeded
|
317
|
+
if risk_score > thresholds["critical"]
|
318
|
+
add_violation(compliance_result, "critical_risk", "critical",
|
319
|
+
"Risk score #{risk_score} exceeds critical threshold #{thresholds['critical']}")
|
320
|
+
elsif risk_score > thresholds["high"]
|
321
|
+
add_violation(compliance_result, "high_risk", "warning",
|
322
|
+
"Risk score #{risk_score} exceeds high threshold #{thresholds['high']}")
|
323
|
+
end
|
324
|
+
|
325
|
+
compliance_result
|
326
|
+
end
|
327
|
+
|
328
|
+
def add_violation(compliance_result, type, severity, message)
|
329
|
+
violation = {
|
330
|
+
type: type,
|
331
|
+
severity: severity,
|
332
|
+
message: message,
|
333
|
+
timestamp: Time.now.to_f
|
334
|
+
}
|
335
|
+
|
336
|
+
compliance_result[:violations] << violation
|
337
|
+
@violations << violation
|
338
|
+
end
|
339
|
+
|
340
|
+
def has_root_indicators?(scan_result)
|
341
|
+
factors = scan_result[:factors] || []
|
342
|
+
factors.any? { |factor| factor.to_s.upcase.include?("ROOT") }
|
343
|
+
end
|
344
|
+
|
345
|
+
def has_jailbreak_indicators?(scan_result)
|
346
|
+
factors = scan_result[:factors] || []
|
347
|
+
factors.any? { |factor| factor.to_s.upcase.include?("JAILBREAK") || factor.to_s.upcase.include?("CYDIA") }
|
348
|
+
end
|
349
|
+
|
350
|
+
def has_emulator_indicators?(scan_result)
|
351
|
+
factors = scan_result[:factors] || []
|
352
|
+
factors.any? { |factor| factor.to_s.upcase.include?("EMULATOR") || factor.to_s.upcase.include?("SIMULATOR") }
|
353
|
+
end
|
354
|
+
|
355
|
+
def has_vpn_indicators?(scan_result)
|
356
|
+
factors = scan_result[:factors] || []
|
357
|
+
factors.any? { |factor|
|
358
|
+
factor_str = factor.to_s.upcase
|
359
|
+
factor_str.include?("VPN") || factor_str.include?("NETWORK_VPN_SERVICE_DETECTED")
|
360
|
+
}
|
361
|
+
end
|
362
|
+
|
363
|
+
def has_proxy_indicators?(scan_result)
|
364
|
+
factors = scan_result[:factors] || []
|
365
|
+
factors.any? { |factor| factor.to_s.upcase.include?("PROXY") }
|
366
|
+
end
|
367
|
+
|
368
|
+
def has_tor_indicators?(scan_result)
|
369
|
+
factors = scan_result[:factors] || []
|
370
|
+
factors.any? { |factor| factor.to_s.upcase.include?("TOR") }
|
371
|
+
end
|
372
|
+
|
373
|
+
def has_debug_indicators?(scan_result)
|
374
|
+
factors = scan_result[:factors] || []
|
375
|
+
factors.any? { |factor| factor.to_s.upcase.include?("DEBUG") }
|
376
|
+
end
|
377
|
+
|
378
|
+
def has_repackaging_indicators?(scan_result)
|
379
|
+
factors = scan_result[:factors] || []
|
380
|
+
factors.any? { |factor| factor.to_s.upcase.include?("REPACKAG") }
|
381
|
+
end
|
382
|
+
|
383
|
+
def has_debugging_indicators?(scan_result)
|
384
|
+
rasp_status = scan_result[:rasp_status] || {}
|
385
|
+
rasp_status.dig(:events_detected).to_i > 0
|
386
|
+
end
|
387
|
+
|
388
|
+
def has_hooking_indicators?(scan_result)
|
389
|
+
factors = scan_result[:factors] || []
|
390
|
+
factors.any? { |factor| factor.to_s.upcase.include?("FRIDA") || factor.to_s.upcase.include?("XPOSED") }
|
391
|
+
end
|
392
|
+
|
393
|
+
def log_compliance_check(scan_result, compliance_result)
|
394
|
+
return unless @policy.dig("reporting", "enable_audit_logs")
|
395
|
+
|
396
|
+
log_audit_event("compliance_check", "Device compliance validation performed", {
|
397
|
+
device_id: scan_result[:device_id],
|
398
|
+
risk_score: scan_result[:risk_score],
|
399
|
+
compliant: compliance_result[:compliant],
|
400
|
+
violations_count: compliance_result[:violations].size
|
401
|
+
})
|
402
|
+
|
403
|
+
# Update compliance cache for statistics
|
404
|
+
@compliance_cache[scan_result[:device_id]] = compliance_result
|
405
|
+
end
|
406
|
+
|
407
|
+
def log_audit_event(event_type, message, details = {})
|
408
|
+
return unless @policy.dig("reporting", "enable_audit_logs")
|
409
|
+
|
410
|
+
audit_entry = {
|
411
|
+
event: event_type,
|
412
|
+
message: message,
|
413
|
+
details: details,
|
414
|
+
timestamp: Time.now.to_f,
|
415
|
+
policy_version: @policy["version"]
|
416
|
+
}
|
417
|
+
|
418
|
+
@audit_logs << audit_entry
|
419
|
+
|
420
|
+
# Clean up old logs based on retention policy
|
421
|
+
cleanup_old_logs
|
422
|
+
end
|
423
|
+
|
424
|
+
def cleanup_old_logs
|
425
|
+
retention_days = @policy.dig("reporting", "retention_days") || 90
|
426
|
+
cutoff_time = Time.now.to_f - (retention_days * 24 * 60 * 60)
|
427
|
+
|
428
|
+
@audit_logs.reject! { |log| log[:timestamp] < cutoff_time }
|
429
|
+
end
|
430
|
+
end
|
431
|
+
end
|