aws-cis-controls-assessment 1.0.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- aws_cis_assessment/__init__.py +11 -0
- aws_cis_assessment/cli/__init__.py +3 -0
- aws_cis_assessment/cli/examples.py +274 -0
- aws_cis_assessment/cli/main.py +1259 -0
- aws_cis_assessment/cli/utils.py +356 -0
- aws_cis_assessment/config/__init__.py +1 -0
- aws_cis_assessment/config/config_loader.py +328 -0
- aws_cis_assessment/config/rules/cis_controls_ig1.yaml +590 -0
- aws_cis_assessment/config/rules/cis_controls_ig2.yaml +412 -0
- aws_cis_assessment/config/rules/cis_controls_ig3.yaml +100 -0
- aws_cis_assessment/controls/__init__.py +1 -0
- aws_cis_assessment/controls/base_control.py +400 -0
- aws_cis_assessment/controls/ig1/__init__.py +239 -0
- aws_cis_assessment/controls/ig1/control_1_1.py +586 -0
- aws_cis_assessment/controls/ig1/control_2_2.py +231 -0
- aws_cis_assessment/controls/ig1/control_3_3.py +718 -0
- aws_cis_assessment/controls/ig1/control_3_4.py +235 -0
- aws_cis_assessment/controls/ig1/control_4_1.py +461 -0
- aws_cis_assessment/controls/ig1/control_access_keys.py +310 -0
- aws_cis_assessment/controls/ig1/control_advanced_security.py +512 -0
- aws_cis_assessment/controls/ig1/control_backup_recovery.py +510 -0
- aws_cis_assessment/controls/ig1/control_cloudtrail_logging.py +197 -0
- aws_cis_assessment/controls/ig1/control_critical_security.py +422 -0
- aws_cis_assessment/controls/ig1/control_data_protection.py +898 -0
- aws_cis_assessment/controls/ig1/control_iam_advanced.py +573 -0
- aws_cis_assessment/controls/ig1/control_iam_governance.py +493 -0
- aws_cis_assessment/controls/ig1/control_iam_policies.py +383 -0
- aws_cis_assessment/controls/ig1/control_instance_optimization.py +100 -0
- aws_cis_assessment/controls/ig1/control_network_enhancements.py +203 -0
- aws_cis_assessment/controls/ig1/control_network_security.py +672 -0
- aws_cis_assessment/controls/ig1/control_s3_enhancements.py +173 -0
- aws_cis_assessment/controls/ig1/control_s3_security.py +422 -0
- aws_cis_assessment/controls/ig1/control_vpc_security.py +235 -0
- aws_cis_assessment/controls/ig2/__init__.py +172 -0
- aws_cis_assessment/controls/ig2/control_3_10.py +698 -0
- aws_cis_assessment/controls/ig2/control_3_11.py +1330 -0
- aws_cis_assessment/controls/ig2/control_5_2.py +393 -0
- aws_cis_assessment/controls/ig2/control_advanced_encryption.py +355 -0
- aws_cis_assessment/controls/ig2/control_codebuild_security.py +263 -0
- aws_cis_assessment/controls/ig2/control_encryption_rest.py +382 -0
- aws_cis_assessment/controls/ig2/control_encryption_transit.py +382 -0
- aws_cis_assessment/controls/ig2/control_network_ha.py +467 -0
- aws_cis_assessment/controls/ig2/control_remaining_encryption.py +426 -0
- aws_cis_assessment/controls/ig2/control_remaining_rules.py +363 -0
- aws_cis_assessment/controls/ig2/control_service_logging.py +402 -0
- aws_cis_assessment/controls/ig3/__init__.py +49 -0
- aws_cis_assessment/controls/ig3/control_12_8.py +395 -0
- aws_cis_assessment/controls/ig3/control_13_1.py +467 -0
- aws_cis_assessment/controls/ig3/control_3_14.py +523 -0
- aws_cis_assessment/controls/ig3/control_7_1.py +359 -0
- aws_cis_assessment/core/__init__.py +1 -0
- aws_cis_assessment/core/accuracy_validator.py +425 -0
- aws_cis_assessment/core/assessment_engine.py +1266 -0
- aws_cis_assessment/core/audit_trail.py +491 -0
- aws_cis_assessment/core/aws_client_factory.py +313 -0
- aws_cis_assessment/core/error_handler.py +607 -0
- aws_cis_assessment/core/models.py +166 -0
- aws_cis_assessment/core/scoring_engine.py +459 -0
- aws_cis_assessment/reporters/__init__.py +8 -0
- aws_cis_assessment/reporters/base_reporter.py +454 -0
- aws_cis_assessment/reporters/csv_reporter.py +835 -0
- aws_cis_assessment/reporters/html_reporter.py +2162 -0
- aws_cis_assessment/reporters/json_reporter.py +561 -0
- aws_cis_controls_assessment-1.0.3.dist-info/METADATA +248 -0
- aws_cis_controls_assessment-1.0.3.dist-info/RECORD +77 -0
- aws_cis_controls_assessment-1.0.3.dist-info/WHEEL +5 -0
- aws_cis_controls_assessment-1.0.3.dist-info/entry_points.txt +2 -0
- aws_cis_controls_assessment-1.0.3.dist-info/licenses/LICENSE +21 -0
- aws_cis_controls_assessment-1.0.3.dist-info/top_level.txt +2 -0
- docs/README.md +94 -0
- docs/assessment-logic.md +766 -0
- docs/cli-reference.md +698 -0
- docs/config-rule-mappings.md +393 -0
- docs/developer-guide.md +858 -0
- docs/installation.md +299 -0
- docs/troubleshooting.md +634 -0
- docs/user-guide.md +487 -0
|
@@ -0,0 +1,173 @@
|
|
|
1
|
+
"""Control 3.3: Configure Data Access Control Lists - S3 enhancements."""
|
|
2
|
+
|
|
3
|
+
from typing import Dict, List, Any
|
|
4
|
+
import logging
|
|
5
|
+
from botocore.exceptions import ClientError
|
|
6
|
+
|
|
7
|
+
from aws_cis_assessment.controls.base_control import BaseConfigRuleAssessment
|
|
8
|
+
from aws_cis_assessment.core.models import ComplianceResult, ComplianceStatus
|
|
9
|
+
from aws_cis_assessment.core.aws_client_factory import AWSClientFactory
|
|
10
|
+
|
|
11
|
+
logger = logging.getLogger(__name__)
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class S3AccountLevelPublicAccessBlocksPeriodicAssessment(BaseConfigRuleAssessment):
|
|
15
|
+
"""Assessment for s3-account-level-public-access-blocks-periodic AWS Config rule."""
|
|
16
|
+
|
|
17
|
+
def __init__(self):
|
|
18
|
+
super().__init__(
|
|
19
|
+
rule_name="s3-account-level-public-access-blocks-periodic",
|
|
20
|
+
control_id="3.3",
|
|
21
|
+
resource_types=["AWS::::Account"]
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
def _get_resources(self, aws_factory: AWSClientFactory, resource_type: str, region: str) -> List[Dict[str, Any]]:
|
|
25
|
+
"""Get account-level resource for S3 public access block check."""
|
|
26
|
+
if resource_type != "AWS::::Account":
|
|
27
|
+
return []
|
|
28
|
+
|
|
29
|
+
# Return a single account resource
|
|
30
|
+
return [{'AccountId': aws_factory.account_id}]
|
|
31
|
+
|
|
32
|
+
def _evaluate_resource_compliance(self, resource: Dict[str, Any], aws_factory: AWSClientFactory, region: str) -> ComplianceResult:
|
|
33
|
+
"""Evaluate if account has S3 public access blocks configured."""
|
|
34
|
+
account_id = resource.get('AccountId', 'unknown')
|
|
35
|
+
|
|
36
|
+
try:
|
|
37
|
+
s3control_client = aws_factory.get_client('s3control', region)
|
|
38
|
+
|
|
39
|
+
response = aws_factory.aws_api_call_with_retry(
|
|
40
|
+
lambda: s3control_client.get_public_access_block(AccountId=account_id)
|
|
41
|
+
)
|
|
42
|
+
|
|
43
|
+
config = response.get('PublicAccessBlockConfiguration', {})
|
|
44
|
+
block_public_acls = config.get('BlockPublicAcls', False)
|
|
45
|
+
ignore_public_acls = config.get('IgnorePublicAcls', False)
|
|
46
|
+
block_public_policy = config.get('BlockPublicPolicy', False)
|
|
47
|
+
restrict_public_buckets = config.get('RestrictPublicBuckets', False)
|
|
48
|
+
|
|
49
|
+
if all([block_public_acls, ignore_public_acls, block_public_policy, restrict_public_buckets]):
|
|
50
|
+
compliance_status = ComplianceStatus.COMPLIANT
|
|
51
|
+
evaluation_reason = f"Account {account_id} has all S3 public access blocks enabled"
|
|
52
|
+
else:
|
|
53
|
+
compliance_status = ComplianceStatus.NON_COMPLIANT
|
|
54
|
+
missing = []
|
|
55
|
+
if not block_public_acls:
|
|
56
|
+
missing.append('BlockPublicAcls')
|
|
57
|
+
if not ignore_public_acls:
|
|
58
|
+
missing.append('IgnorePublicAcls')
|
|
59
|
+
if not block_public_policy:
|
|
60
|
+
missing.append('BlockPublicPolicy')
|
|
61
|
+
if not restrict_public_buckets:
|
|
62
|
+
missing.append('RestrictPublicBuckets')
|
|
63
|
+
evaluation_reason = f"Account {account_id} is missing S3 public access blocks: {', '.join(missing)}"
|
|
64
|
+
|
|
65
|
+
except ClientError as e:
|
|
66
|
+
if e.response.get('Error', {}).get('Code') == 'NoSuchPublicAccessBlockConfiguration':
|
|
67
|
+
compliance_status = ComplianceStatus.NON_COMPLIANT
|
|
68
|
+
evaluation_reason = f"Account {account_id} does not have S3 public access blocks configured"
|
|
69
|
+
else:
|
|
70
|
+
compliance_status = ComplianceStatus.ERROR
|
|
71
|
+
evaluation_reason = f"Error checking S3 public access blocks for account {account_id}: {str(e)}"
|
|
72
|
+
|
|
73
|
+
return ComplianceResult(
|
|
74
|
+
resource_id=account_id,
|
|
75
|
+
resource_type="AWS::::Account",
|
|
76
|
+
compliance_status=compliance_status,
|
|
77
|
+
evaluation_reason=evaluation_reason,
|
|
78
|
+
config_rule_name=self.rule_name,
|
|
79
|
+
region=region
|
|
80
|
+
)
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
class S3BucketPublicWriteProhibitedAssessment(BaseConfigRuleAssessment):
|
|
84
|
+
"""Assessment for s3-bucket-public-write-prohibited AWS Config rule."""
|
|
85
|
+
|
|
86
|
+
def __init__(self):
|
|
87
|
+
super().__init__(
|
|
88
|
+
rule_name="s3-bucket-public-write-prohibited",
|
|
89
|
+
control_id="3.3",
|
|
90
|
+
resource_types=["AWS::S3::Bucket"]
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
def _get_resources(self, aws_factory: AWSClientFactory, resource_type: str, region: str) -> List[Dict[str, Any]]:
|
|
94
|
+
"""Get S3 buckets."""
|
|
95
|
+
if resource_type != "AWS::S3::Bucket":
|
|
96
|
+
return []
|
|
97
|
+
|
|
98
|
+
try:
|
|
99
|
+
s3_client = aws_factory.get_client('s3', region)
|
|
100
|
+
|
|
101
|
+
response = aws_factory.aws_api_call_with_retry(
|
|
102
|
+
lambda: s3_client.list_buckets()
|
|
103
|
+
)
|
|
104
|
+
|
|
105
|
+
buckets = []
|
|
106
|
+
for bucket in response.get('Buckets', []):
|
|
107
|
+
buckets.append({
|
|
108
|
+
'Name': bucket.get('Name'),
|
|
109
|
+
'CreationDate': bucket.get('CreationDate')
|
|
110
|
+
})
|
|
111
|
+
|
|
112
|
+
return buckets
|
|
113
|
+
|
|
114
|
+
except ClientError as e:
|
|
115
|
+
logger.error(f"Error retrieving S3 buckets: {e}")
|
|
116
|
+
raise
|
|
117
|
+
|
|
118
|
+
def _evaluate_resource_compliance(self, resource: Dict[str, Any], aws_factory: AWSClientFactory, region: str) -> ComplianceResult:
|
|
119
|
+
"""Evaluate if S3 bucket prohibits public write access."""
|
|
120
|
+
bucket_name = resource.get('Name', 'unknown')
|
|
121
|
+
|
|
122
|
+
try:
|
|
123
|
+
s3_client = aws_factory.get_client('s3', region)
|
|
124
|
+
|
|
125
|
+
# Check bucket ACL
|
|
126
|
+
try:
|
|
127
|
+
acl_response = aws_factory.aws_api_call_with_retry(
|
|
128
|
+
lambda: s3_client.get_bucket_acl(Bucket=bucket_name)
|
|
129
|
+
)
|
|
130
|
+
|
|
131
|
+
grants = acl_response.get('Grants', [])
|
|
132
|
+
public_write_found = False
|
|
133
|
+
|
|
134
|
+
for grant in grants:
|
|
135
|
+
grantee = grant.get('Grantee', {})
|
|
136
|
+
permission = grant.get('Permission', '')
|
|
137
|
+
|
|
138
|
+
# Check for public write permissions
|
|
139
|
+
if (grantee.get('Type') == 'Group' and
|
|
140
|
+
grantee.get('URI') in [
|
|
141
|
+
'http://acs.amazonaws.com/groups/global/AllUsers',
|
|
142
|
+
'http://acs.amazonaws.com/groups/global/AuthenticatedUsers'
|
|
143
|
+
] and
|
|
144
|
+
permission in ['WRITE', 'WRITE_ACP', 'FULL_CONTROL']):
|
|
145
|
+
public_write_found = True
|
|
146
|
+
break
|
|
147
|
+
|
|
148
|
+
if not public_write_found:
|
|
149
|
+
compliance_status = ComplianceStatus.COMPLIANT
|
|
150
|
+
evaluation_reason = f"S3 bucket {bucket_name} does not allow public write access"
|
|
151
|
+
else:
|
|
152
|
+
compliance_status = ComplianceStatus.NON_COMPLIANT
|
|
153
|
+
evaluation_reason = f"S3 bucket {bucket_name} allows public write access"
|
|
154
|
+
|
|
155
|
+
except ClientError as e:
|
|
156
|
+
if e.response.get('Error', {}).get('Code') in ['AccessDenied', 'NoSuchBucket']:
|
|
157
|
+
compliance_status = ComplianceStatus.ERROR
|
|
158
|
+
evaluation_reason = f"Cannot access ACL for bucket {bucket_name}: {str(e)}"
|
|
159
|
+
else:
|
|
160
|
+
raise
|
|
161
|
+
|
|
162
|
+
except ClientError as e:
|
|
163
|
+
compliance_status = ComplianceStatus.ERROR
|
|
164
|
+
evaluation_reason = f"Error checking public write access for bucket {bucket_name}: {str(e)}"
|
|
165
|
+
|
|
166
|
+
return ComplianceResult(
|
|
167
|
+
resource_id=bucket_name,
|
|
168
|
+
resource_type="AWS::S3::Bucket",
|
|
169
|
+
compliance_status=compliance_status,
|
|
170
|
+
evaluation_reason=evaluation_reason,
|
|
171
|
+
config_rule_name=self.rule_name,
|
|
172
|
+
region=region
|
|
173
|
+
)
|
|
@@ -0,0 +1,422 @@
|
|
|
1
|
+
"""
|
|
2
|
+
CIS Control 3.3 - S3 Security Controls
|
|
3
|
+
Critical S3 security rules for data protection and access control.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import logging
|
|
7
|
+
from typing import List, Dict, Any, Optional
|
|
8
|
+
import boto3
|
|
9
|
+
import json
|
|
10
|
+
from botocore.exceptions import ClientError, NoCredentialsError
|
|
11
|
+
|
|
12
|
+
from aws_cis_assessment.controls.base_control import BaseConfigRuleAssessment
|
|
13
|
+
from aws_cis_assessment.core.models import ComplianceResult, ComplianceStatus
|
|
14
|
+
from aws_cis_assessment.core.aws_client_factory import AWSClientFactory
|
|
15
|
+
|
|
16
|
+
logger = logging.getLogger(__name__)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class S3BucketSSLRequestsOnlyAssessment(BaseConfigRuleAssessment):
|
|
20
|
+
"""
|
|
21
|
+
CIS Control 3.3 - Configure Data Access Control Lists
|
|
22
|
+
AWS Config Rule: s3-bucket-ssl-requests-only
|
|
23
|
+
|
|
24
|
+
Ensures S3 buckets require SSL/TLS for all requests to protect data in transit.
|
|
25
|
+
"""
|
|
26
|
+
|
|
27
|
+
def __init__(self):
|
|
28
|
+
super().__init__(
|
|
29
|
+
rule_name="s3-bucket-ssl-requests-only",
|
|
30
|
+
control_id="3.3",
|
|
31
|
+
resource_types=["AWS::S3::Bucket"]
|
|
32
|
+
)
|
|
33
|
+
|
|
34
|
+
def _get_resources(self, aws_factory: AWSClientFactory, resource_type: str, region: str) -> List[Dict[str, Any]]:
|
|
35
|
+
"""Get all S3 buckets (only from us-east-1 to avoid duplicates)."""
|
|
36
|
+
if resource_type != "AWS::S3::Bucket":
|
|
37
|
+
return []
|
|
38
|
+
|
|
39
|
+
# S3 is global, only check from us-east-1 to avoid duplicate checks
|
|
40
|
+
if region != 'us-east-1':
|
|
41
|
+
return []
|
|
42
|
+
|
|
43
|
+
try:
|
|
44
|
+
s3_client = aws_factory.get_client('s3', region)
|
|
45
|
+
|
|
46
|
+
response = s3_client.list_buckets()
|
|
47
|
+
buckets = []
|
|
48
|
+
|
|
49
|
+
for bucket in response.get('Buckets', []):
|
|
50
|
+
bucket_name = bucket['Name']
|
|
51
|
+
|
|
52
|
+
try:
|
|
53
|
+
# Get bucket policy to check for SSL enforcement
|
|
54
|
+
has_ssl_policy = False
|
|
55
|
+
ssl_policy_statements = []
|
|
56
|
+
|
|
57
|
+
try:
|
|
58
|
+
policy_response = s3_client.get_bucket_policy(Bucket=bucket_name)
|
|
59
|
+
policy_doc = json.loads(policy_response['Policy'])
|
|
60
|
+
statements = policy_doc.get('Statement', [])
|
|
61
|
+
|
|
62
|
+
for statement in statements:
|
|
63
|
+
if isinstance(statement, dict):
|
|
64
|
+
effect = statement.get('Effect', '')
|
|
65
|
+
condition = statement.get('Condition', {})
|
|
66
|
+
|
|
67
|
+
# Check for SSL enforcement conditions
|
|
68
|
+
if effect == 'Deny':
|
|
69
|
+
# Check for aws:SecureTransport condition
|
|
70
|
+
bool_conditions = condition.get('Bool', {})
|
|
71
|
+
if 'aws:SecureTransport' in bool_conditions:
|
|
72
|
+
secure_transport = bool_conditions['aws:SecureTransport']
|
|
73
|
+
if secure_transport == 'false' or secure_transport is False:
|
|
74
|
+
has_ssl_policy = True
|
|
75
|
+
ssl_policy_statements.append(statement)
|
|
76
|
+
|
|
77
|
+
except ClientError as e:
|
|
78
|
+
if e.response.get('Error', {}).get('Code') != 'NoSuchBucketPolicy':
|
|
79
|
+
raise e
|
|
80
|
+
|
|
81
|
+
buckets.append({
|
|
82
|
+
'BucketName': bucket_name,
|
|
83
|
+
'HasSSLPolicy': has_ssl_policy,
|
|
84
|
+
'SSLPolicyStatements': ssl_policy_statements
|
|
85
|
+
})
|
|
86
|
+
|
|
87
|
+
except ClientError as e:
|
|
88
|
+
error_code = e.response.get('Error', {}).get('Code', '')
|
|
89
|
+
if error_code in ['NoSuchBucket', 'AccessDenied']:
|
|
90
|
+
continue
|
|
91
|
+
else:
|
|
92
|
+
logger.warning(f"Error checking S3 bucket {bucket_name}: {e}")
|
|
93
|
+
continue
|
|
94
|
+
|
|
95
|
+
logger.debug(f"Found {len(buckets)} S3 buckets from {region}")
|
|
96
|
+
return buckets
|
|
97
|
+
|
|
98
|
+
except ClientError as e:
|
|
99
|
+
logger.error(f"Error retrieving S3 buckets from {region}: {e}")
|
|
100
|
+
raise
|
|
101
|
+
except Exception as e:
|
|
102
|
+
logger.error(f"Unexpected error retrieving S3 buckets from {region}: {e}")
|
|
103
|
+
raise
|
|
104
|
+
|
|
105
|
+
def _evaluate_resource_compliance(self, resource: Dict[str, Any], aws_factory: AWSClientFactory, region: str) -> ComplianceResult:
|
|
106
|
+
"""Evaluate if S3 bucket requires SSL/TLS for requests."""
|
|
107
|
+
bucket_name = resource.get('BucketName', 'unknown')
|
|
108
|
+
has_ssl_policy = resource.get('HasSSLPolicy', False)
|
|
109
|
+
|
|
110
|
+
if has_ssl_policy:
|
|
111
|
+
return ComplianceResult(
|
|
112
|
+
resource_id=bucket_name,
|
|
113
|
+
resource_type="AWS::S3::Bucket",
|
|
114
|
+
compliance_status=ComplianceStatus.COMPLIANT,
|
|
115
|
+
evaluation_reason="S3 bucket has policy requiring SSL/TLS for requests",
|
|
116
|
+
config_rule_name=self.rule_name,
|
|
117
|
+
region=region
|
|
118
|
+
)
|
|
119
|
+
else:
|
|
120
|
+
return ComplianceResult(
|
|
121
|
+
resource_id=bucket_name,
|
|
122
|
+
resource_type="AWS::S3::Bucket",
|
|
123
|
+
compliance_status=ComplianceStatus.NON_COMPLIANT,
|
|
124
|
+
evaluation_reason="S3 bucket does not require SSL/TLS for requests",
|
|
125
|
+
config_rule_name=self.rule_name,
|
|
126
|
+
region=region
|
|
127
|
+
)
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
class S3BucketServerSideEncryptionEnabledAssessment(BaseConfigRuleAssessment):
|
|
131
|
+
"""
|
|
132
|
+
CIS Control 3.3 - Configure Data Access Control Lists
|
|
133
|
+
AWS Config Rule: s3-bucket-server-side-encryption-enabled
|
|
134
|
+
|
|
135
|
+
Ensures S3 buckets have server-side encryption enabled to protect data at rest.
|
|
136
|
+
"""
|
|
137
|
+
|
|
138
|
+
def __init__(self):
|
|
139
|
+
super().__init__(
|
|
140
|
+
rule_name="s3-bucket-server-side-encryption-enabled",
|
|
141
|
+
control_id="3.3",
|
|
142
|
+
resource_types=["AWS::S3::Bucket"]
|
|
143
|
+
)
|
|
144
|
+
|
|
145
|
+
def _get_resources(self, aws_factory: AWSClientFactory, resource_type: str, region: str) -> List[Dict[str, Any]]:
|
|
146
|
+
"""Get all S3 buckets with encryption configuration."""
|
|
147
|
+
if resource_type != "AWS::S3::Bucket":
|
|
148
|
+
return []
|
|
149
|
+
|
|
150
|
+
# S3 is global, only check from us-east-1 to avoid duplicate checks
|
|
151
|
+
if region != 'us-east-1':
|
|
152
|
+
return []
|
|
153
|
+
|
|
154
|
+
try:
|
|
155
|
+
s3_client = aws_factory.get_client('s3', region)
|
|
156
|
+
|
|
157
|
+
response = s3_client.list_buckets()
|
|
158
|
+
buckets = []
|
|
159
|
+
|
|
160
|
+
for bucket in response.get('Buckets', []):
|
|
161
|
+
bucket_name = bucket['Name']
|
|
162
|
+
|
|
163
|
+
try:
|
|
164
|
+
# Get bucket encryption configuration
|
|
165
|
+
has_encryption = False
|
|
166
|
+
encryption_rules = []
|
|
167
|
+
|
|
168
|
+
try:
|
|
169
|
+
encryption_response = s3_client.get_bucket_encryption(Bucket=bucket_name)
|
|
170
|
+
encryption_config = encryption_response.get('ServerSideEncryptionConfiguration', {})
|
|
171
|
+
rules = encryption_config.get('Rules', [])
|
|
172
|
+
|
|
173
|
+
if rules:
|
|
174
|
+
has_encryption = True
|
|
175
|
+
for rule in rules:
|
|
176
|
+
sse_algorithm = rule.get('ApplyServerSideEncryptionByDefault', {}).get('SSEAlgorithm', '')
|
|
177
|
+
kms_key_id = rule.get('ApplyServerSideEncryptionByDefault', {}).get('KMSMasterKeyID', '')
|
|
178
|
+
encryption_rules.append({
|
|
179
|
+
'SSEAlgorithm': sse_algorithm,
|
|
180
|
+
'KMSMasterKeyID': kms_key_id
|
|
181
|
+
})
|
|
182
|
+
|
|
183
|
+
except ClientError as e:
|
|
184
|
+
if e.response.get('Error', {}).get('Code') != 'ServerSideEncryptionConfigurationNotFoundError':
|
|
185
|
+
raise e
|
|
186
|
+
|
|
187
|
+
buckets.append({
|
|
188
|
+
'BucketName': bucket_name,
|
|
189
|
+
'HasEncryption': has_encryption,
|
|
190
|
+
'EncryptionRules': encryption_rules
|
|
191
|
+
})
|
|
192
|
+
|
|
193
|
+
except ClientError as e:
|
|
194
|
+
error_code = e.response.get('Error', {}).get('Code', '')
|
|
195
|
+
if error_code in ['NoSuchBucket', 'AccessDenied']:
|
|
196
|
+
continue
|
|
197
|
+
else:
|
|
198
|
+
logger.warning(f"Error checking S3 bucket {bucket_name}: {e}")
|
|
199
|
+
continue
|
|
200
|
+
|
|
201
|
+
logger.debug(f"Found {len(buckets)} S3 buckets from {region}")
|
|
202
|
+
return buckets
|
|
203
|
+
|
|
204
|
+
except ClientError as e:
|
|
205
|
+
logger.error(f"Error retrieving S3 buckets from {region}: {e}")
|
|
206
|
+
raise
|
|
207
|
+
except Exception as e:
|
|
208
|
+
logger.error(f"Unexpected error retrieving S3 buckets from {region}: {e}")
|
|
209
|
+
raise
|
|
210
|
+
|
|
211
|
+
def _evaluate_resource_compliance(self, resource: Dict[str, Any], aws_factory: AWSClientFactory, region: str) -> ComplianceResult:
|
|
212
|
+
"""Evaluate if S3 bucket has server-side encryption enabled."""
|
|
213
|
+
bucket_name = resource.get('BucketName', 'unknown')
|
|
214
|
+
has_encryption = resource.get('HasEncryption', False)
|
|
215
|
+
encryption_rules = resource.get('EncryptionRules', [])
|
|
216
|
+
|
|
217
|
+
if has_encryption:
|
|
218
|
+
encryption_details = []
|
|
219
|
+
for rule in encryption_rules:
|
|
220
|
+
algorithm = rule.get('SSEAlgorithm', 'Unknown')
|
|
221
|
+
encryption_details.append(algorithm)
|
|
222
|
+
|
|
223
|
+
return ComplianceResult(
|
|
224
|
+
resource_id=bucket_name,
|
|
225
|
+
resource_type="AWS::S3::Bucket",
|
|
226
|
+
compliance_status=ComplianceStatus.COMPLIANT,
|
|
227
|
+
evaluation_reason=f"S3 bucket has server-side encryption enabled: {', '.join(encryption_details)}",
|
|
228
|
+
config_rule_name=self.rule_name,
|
|
229
|
+
region=region
|
|
230
|
+
)
|
|
231
|
+
else:
|
|
232
|
+
return ComplianceResult(
|
|
233
|
+
resource_id=bucket_name,
|
|
234
|
+
resource_type="AWS::S3::Bucket",
|
|
235
|
+
compliance_status=ComplianceStatus.NON_COMPLIANT,
|
|
236
|
+
evaluation_reason="S3 bucket does not have server-side encryption enabled",
|
|
237
|
+
config_rule_name=self.rule_name,
|
|
238
|
+
region=region
|
|
239
|
+
)
|
|
240
|
+
|
|
241
|
+
|
|
242
|
+
class S3BucketLoggingEnabledAssessment(BaseConfigRuleAssessment):
|
|
243
|
+
"""
|
|
244
|
+
CIS Control 3.3 - Configure Data Access Control Lists
|
|
245
|
+
AWS Config Rule: s3-bucket-logging-enabled
|
|
246
|
+
|
|
247
|
+
Ensures S3 buckets have access logging enabled for audit and compliance.
|
|
248
|
+
"""
|
|
249
|
+
|
|
250
|
+
def __init__(self):
|
|
251
|
+
super().__init__(
|
|
252
|
+
rule_name="s3-bucket-logging-enabled",
|
|
253
|
+
control_id="3.3",
|
|
254
|
+
resource_types=["AWS::S3::Bucket"]
|
|
255
|
+
)
|
|
256
|
+
|
|
257
|
+
def _get_resources(self, aws_factory: AWSClientFactory, resource_type: str, region: str) -> List[Dict[str, Any]]:
|
|
258
|
+
"""Get all S3 buckets with logging configuration."""
|
|
259
|
+
if resource_type != "AWS::S3::Bucket":
|
|
260
|
+
return []
|
|
261
|
+
|
|
262
|
+
# S3 is global, only check from us-east-1 to avoid duplicate checks
|
|
263
|
+
if region != 'us-east-1':
|
|
264
|
+
return []
|
|
265
|
+
|
|
266
|
+
try:
|
|
267
|
+
s3_client = aws_factory.get_client('s3', region)
|
|
268
|
+
|
|
269
|
+
response = s3_client.list_buckets()
|
|
270
|
+
buckets = []
|
|
271
|
+
|
|
272
|
+
for bucket in response.get('Buckets', []):
|
|
273
|
+
bucket_name = bucket['Name']
|
|
274
|
+
|
|
275
|
+
try:
|
|
276
|
+
# Get bucket logging configuration
|
|
277
|
+
logging_response = s3_client.get_bucket_logging(Bucket=bucket_name)
|
|
278
|
+
logging_config = logging_response.get('LoggingEnabled', {})
|
|
279
|
+
|
|
280
|
+
has_logging = bool(logging_config)
|
|
281
|
+
target_bucket = logging_config.get('TargetBucket', '') if has_logging else ''
|
|
282
|
+
target_prefix = logging_config.get('TargetPrefix', '') if has_logging else ''
|
|
283
|
+
|
|
284
|
+
buckets.append({
|
|
285
|
+
'BucketName': bucket_name,
|
|
286
|
+
'HasLogging': has_logging,
|
|
287
|
+
'TargetBucket': target_bucket,
|
|
288
|
+
'TargetPrefix': target_prefix
|
|
289
|
+
})
|
|
290
|
+
|
|
291
|
+
except ClientError as e:
|
|
292
|
+
error_code = e.response.get('Error', {}).get('Code', '')
|
|
293
|
+
if error_code in ['NoSuchBucket', 'AccessDenied']:
|
|
294
|
+
continue
|
|
295
|
+
else:
|
|
296
|
+
logger.warning(f"Error checking S3 bucket {bucket_name}: {e}")
|
|
297
|
+
continue
|
|
298
|
+
|
|
299
|
+
logger.debug(f"Found {len(buckets)} S3 buckets from {region}")
|
|
300
|
+
return buckets
|
|
301
|
+
|
|
302
|
+
except ClientError as e:
|
|
303
|
+
logger.error(f"Error retrieving S3 buckets from {region}: {e}")
|
|
304
|
+
raise
|
|
305
|
+
except Exception as e:
|
|
306
|
+
logger.error(f"Unexpected error retrieving S3 buckets from {region}: {e}")
|
|
307
|
+
raise
|
|
308
|
+
|
|
309
|
+
def _evaluate_resource_compliance(self, resource: Dict[str, Any], aws_factory: AWSClientFactory, region: str) -> ComplianceResult:
|
|
310
|
+
"""Evaluate if S3 bucket has access logging enabled."""
|
|
311
|
+
bucket_name = resource.get('BucketName', 'unknown')
|
|
312
|
+
has_logging = resource.get('HasLogging', False)
|
|
313
|
+
target_bucket = resource.get('TargetBucket', '')
|
|
314
|
+
|
|
315
|
+
if has_logging:
|
|
316
|
+
return ComplianceResult(
|
|
317
|
+
resource_id=bucket_name,
|
|
318
|
+
resource_type="AWS::S3::Bucket",
|
|
319
|
+
compliance_status=ComplianceStatus.COMPLIANT,
|
|
320
|
+
evaluation_reason=f"S3 bucket has access logging enabled (target: {target_bucket})",
|
|
321
|
+
config_rule_name=self.rule_name,
|
|
322
|
+
region=region
|
|
323
|
+
)
|
|
324
|
+
else:
|
|
325
|
+
return ComplianceResult(
|
|
326
|
+
resource_id=bucket_name,
|
|
327
|
+
resource_type="AWS::S3::Bucket",
|
|
328
|
+
compliance_status=ComplianceStatus.NON_COMPLIANT,
|
|
329
|
+
evaluation_reason="S3 bucket does not have access logging enabled",
|
|
330
|
+
config_rule_name=self.rule_name,
|
|
331
|
+
region=region
|
|
332
|
+
)
|
|
333
|
+
|
|
334
|
+
|
|
335
|
+
class S3BucketVersioningEnabledAssessment(BaseConfigRuleAssessment):
|
|
336
|
+
"""
|
|
337
|
+
CIS Control 3.4 - Enforce Data Retention
|
|
338
|
+
AWS Config Rule: s3-bucket-versioning-enabled
|
|
339
|
+
|
|
340
|
+
Ensures S3 buckets have versioning enabled for data protection and recovery.
|
|
341
|
+
"""
|
|
342
|
+
|
|
343
|
+
def __init__(self):
|
|
344
|
+
super().__init__(
|
|
345
|
+
rule_name="s3-bucket-versioning-enabled",
|
|
346
|
+
control_id="3.4",
|
|
347
|
+
resource_types=["AWS::S3::Bucket"]
|
|
348
|
+
)
|
|
349
|
+
|
|
350
|
+
def _get_resources(self, aws_factory: AWSClientFactory, resource_type: str, region: str) -> List[Dict[str, Any]]:
|
|
351
|
+
"""Get all S3 buckets with versioning configuration."""
|
|
352
|
+
if resource_type != "AWS::S3::Bucket":
|
|
353
|
+
return []
|
|
354
|
+
|
|
355
|
+
# S3 is global, only check from us-east-1 to avoid duplicate checks
|
|
356
|
+
if region != 'us-east-1':
|
|
357
|
+
return []
|
|
358
|
+
|
|
359
|
+
try:
|
|
360
|
+
s3_client = aws_factory.get_client('s3', region)
|
|
361
|
+
|
|
362
|
+
response = s3_client.list_buckets()
|
|
363
|
+
buckets = []
|
|
364
|
+
|
|
365
|
+
for bucket in response.get('Buckets', []):
|
|
366
|
+
bucket_name = bucket['Name']
|
|
367
|
+
|
|
368
|
+
try:
|
|
369
|
+
# Get bucket versioning configuration
|
|
370
|
+
versioning_response = s3_client.get_bucket_versioning(Bucket=bucket_name)
|
|
371
|
+
versioning_status = versioning_response.get('Status', 'Disabled')
|
|
372
|
+
mfa_delete = versioning_response.get('MfaDelete', 'Disabled')
|
|
373
|
+
|
|
374
|
+
buckets.append({
|
|
375
|
+
'BucketName': bucket_name,
|
|
376
|
+
'VersioningStatus': versioning_status,
|
|
377
|
+
'MfaDelete': mfa_delete,
|
|
378
|
+
'IsVersioningEnabled': versioning_status == 'Enabled'
|
|
379
|
+
})
|
|
380
|
+
|
|
381
|
+
except ClientError as e:
|
|
382
|
+
error_code = e.response.get('Error', {}).get('Code', '')
|
|
383
|
+
if error_code in ['NoSuchBucket', 'AccessDenied']:
|
|
384
|
+
continue
|
|
385
|
+
else:
|
|
386
|
+
logger.warning(f"Error checking S3 bucket {bucket_name}: {e}")
|
|
387
|
+
continue
|
|
388
|
+
|
|
389
|
+
logger.debug(f"Found {len(buckets)} S3 buckets from {region}")
|
|
390
|
+
return buckets
|
|
391
|
+
|
|
392
|
+
except ClientError as e:
|
|
393
|
+
logger.error(f"Error retrieving S3 buckets from {region}: {e}")
|
|
394
|
+
raise
|
|
395
|
+
except Exception as e:
|
|
396
|
+
logger.error(f"Unexpected error retrieving S3 buckets from {region}: {e}")
|
|
397
|
+
raise
|
|
398
|
+
|
|
399
|
+
def _evaluate_resource_compliance(self, resource: Dict[str, Any], aws_factory: AWSClientFactory, region: str) -> ComplianceResult:
|
|
400
|
+
"""Evaluate if S3 bucket has versioning enabled."""
|
|
401
|
+
bucket_name = resource.get('BucketName', 'unknown')
|
|
402
|
+
versioning_status = resource.get('VersioningStatus', 'Disabled')
|
|
403
|
+
is_versioning_enabled = resource.get('IsVersioningEnabled', False)
|
|
404
|
+
|
|
405
|
+
if is_versioning_enabled:
|
|
406
|
+
return ComplianceResult(
|
|
407
|
+
resource_id=bucket_name,
|
|
408
|
+
resource_type="AWS::S3::Bucket",
|
|
409
|
+
compliance_status=ComplianceStatus.COMPLIANT,
|
|
410
|
+
evaluation_reason=f"S3 bucket has versioning enabled (status: {versioning_status})",
|
|
411
|
+
config_rule_name=self.rule_name,
|
|
412
|
+
region=region
|
|
413
|
+
)
|
|
414
|
+
else:
|
|
415
|
+
return ComplianceResult(
|
|
416
|
+
resource_id=bucket_name,
|
|
417
|
+
resource_type="AWS::S3::Bucket",
|
|
418
|
+
compliance_status=ComplianceStatus.NON_COMPLIANT,
|
|
419
|
+
evaluation_reason=f"S3 bucket does not have versioning enabled (status: {versioning_status})",
|
|
420
|
+
config_rule_name=self.rule_name,
|
|
421
|
+
region=region
|
|
422
|
+
)
|