runbooks 0.7.0__py3-none-any.whl → 0.7.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (132) hide show
  1. runbooks/__init__.py +87 -37
  2. runbooks/cfat/README.md +300 -49
  3. runbooks/cfat/__init__.py +2 -2
  4. runbooks/finops/__init__.py +1 -1
  5. runbooks/finops/cli.py +1 -1
  6. runbooks/inventory/collectors/__init__.py +8 -0
  7. runbooks/inventory/collectors/aws_management.py +791 -0
  8. runbooks/inventory/collectors/aws_networking.py +3 -3
  9. runbooks/main.py +3389 -782
  10. runbooks/operate/__init__.py +207 -0
  11. runbooks/operate/base.py +311 -0
  12. runbooks/operate/cloudformation_operations.py +619 -0
  13. runbooks/operate/cloudwatch_operations.py +496 -0
  14. runbooks/operate/dynamodb_operations.py +812 -0
  15. runbooks/operate/ec2_operations.py +926 -0
  16. runbooks/operate/iam_operations.py +569 -0
  17. runbooks/operate/s3_operations.py +1211 -0
  18. runbooks/operate/tagging_operations.py +655 -0
  19. runbooks/remediation/CLAUDE.md +100 -0
  20. runbooks/remediation/DOME9.md +218 -0
  21. runbooks/remediation/README.md +26 -0
  22. runbooks/remediation/Tests/__init__.py +0 -0
  23. runbooks/remediation/Tests/update_policy.py +74 -0
  24. runbooks/remediation/__init__.py +95 -0
  25. runbooks/remediation/acm_cert_expired_unused.py +98 -0
  26. runbooks/remediation/acm_remediation.py +875 -0
  27. runbooks/remediation/api_gateway_list.py +167 -0
  28. runbooks/remediation/base.py +643 -0
  29. runbooks/remediation/cloudtrail_remediation.py +908 -0
  30. runbooks/remediation/cloudtrail_s3_modifications.py +296 -0
  31. runbooks/remediation/cognito_active_users.py +78 -0
  32. runbooks/remediation/cognito_remediation.py +856 -0
  33. runbooks/remediation/cognito_user_password_reset.py +163 -0
  34. runbooks/remediation/commons.py +455 -0
  35. runbooks/remediation/dynamodb_optimize.py +155 -0
  36. runbooks/remediation/dynamodb_remediation.py +744 -0
  37. runbooks/remediation/dynamodb_server_side_encryption.py +108 -0
  38. runbooks/remediation/ec2_public_ips.py +134 -0
  39. runbooks/remediation/ec2_remediation.py +892 -0
  40. runbooks/remediation/ec2_subnet_disable_auto_ip_assignment.py +72 -0
  41. runbooks/remediation/ec2_unattached_ebs_volumes.py +448 -0
  42. runbooks/remediation/ec2_unused_security_groups.py +202 -0
  43. runbooks/remediation/kms_enable_key_rotation.py +651 -0
  44. runbooks/remediation/kms_remediation.py +717 -0
  45. runbooks/remediation/lambda_list.py +243 -0
  46. runbooks/remediation/lambda_remediation.py +971 -0
  47. runbooks/remediation/multi_account.py +569 -0
  48. runbooks/remediation/rds_instance_list.py +199 -0
  49. runbooks/remediation/rds_remediation.py +873 -0
  50. runbooks/remediation/rds_snapshot_list.py +192 -0
  51. runbooks/remediation/requirements.txt +118 -0
  52. runbooks/remediation/s3_block_public_access.py +159 -0
  53. runbooks/remediation/s3_bucket_public_access.py +143 -0
  54. runbooks/remediation/s3_disable_static_website_hosting.py +74 -0
  55. runbooks/remediation/s3_downloader.py +215 -0
  56. runbooks/remediation/s3_enable_access_logging.py +562 -0
  57. runbooks/remediation/s3_encryption.py +526 -0
  58. runbooks/remediation/s3_force_ssl_secure_policy.py +143 -0
  59. runbooks/remediation/s3_list.py +141 -0
  60. runbooks/remediation/s3_object_search.py +201 -0
  61. runbooks/remediation/s3_remediation.py +816 -0
  62. runbooks/remediation/scan_for_phrase.py +425 -0
  63. runbooks/remediation/workspaces_list.py +220 -0
  64. runbooks/security/__init__.py +9 -10
  65. runbooks/security/security_baseline_tester.py +4 -2
  66. runbooks-0.7.6.dist-info/METADATA +608 -0
  67. {runbooks-0.7.0.dist-info → runbooks-0.7.6.dist-info}/RECORD +84 -76
  68. {runbooks-0.7.0.dist-info → runbooks-0.7.6.dist-info}/entry_points.txt +0 -1
  69. {runbooks-0.7.0.dist-info → runbooks-0.7.6.dist-info}/top_level.txt +0 -1
  70. jupyter-agent/.env +0 -2
  71. jupyter-agent/.env.template +0 -2
  72. jupyter-agent/.gitattributes +0 -35
  73. jupyter-agent/.gradio/certificate.pem +0 -31
  74. jupyter-agent/README.md +0 -16
  75. jupyter-agent/__main__.log +0 -8
  76. jupyter-agent/app.py +0 -256
  77. jupyter-agent/cloudops-agent.png +0 -0
  78. jupyter-agent/ds-system-prompt.txt +0 -154
  79. jupyter-agent/jupyter-agent.png +0 -0
  80. jupyter-agent/llama3_template.jinja +0 -123
  81. jupyter-agent/requirements.txt +0 -9
  82. jupyter-agent/tmp/4ojbs8a02ir/jupyter-agent.ipynb +0 -68
  83. jupyter-agent/tmp/cm5iasgpm3p/jupyter-agent.ipynb +0 -91
  84. jupyter-agent/tmp/crqbsseag5/jupyter-agent.ipynb +0 -91
  85. jupyter-agent/tmp/hohanq1u097/jupyter-agent.ipynb +0 -57
  86. jupyter-agent/tmp/jns1sam29wm/jupyter-agent.ipynb +0 -53
  87. jupyter-agent/tmp/jupyter-agent.ipynb +0 -27
  88. jupyter-agent/utils.py +0 -409
  89. runbooks/aws/__init__.py +0 -58
  90. runbooks/aws/dynamodb_operations.py +0 -231
  91. runbooks/aws/ec2_copy_image_cross-region.py +0 -195
  92. runbooks/aws/ec2_describe_instances.py +0 -202
  93. runbooks/aws/ec2_ebs_snapshots_delete.py +0 -186
  94. runbooks/aws/ec2_run_instances.py +0 -213
  95. runbooks/aws/ec2_start_stop_instances.py +0 -212
  96. runbooks/aws/ec2_terminate_instances.py +0 -143
  97. runbooks/aws/ec2_unused_eips.py +0 -196
  98. runbooks/aws/ec2_unused_volumes.py +0 -188
  99. runbooks/aws/s3_create_bucket.py +0 -142
  100. runbooks/aws/s3_list_buckets.py +0 -152
  101. runbooks/aws/s3_list_objects.py +0 -156
  102. runbooks/aws/s3_object_operations.py +0 -183
  103. runbooks/aws/tagging_lambda_handler.py +0 -183
  104. runbooks/inventory/FAILED_SCRIPTS_TROUBLESHOOTING.md +0 -619
  105. runbooks/inventory/PASSED_SCRIPTS_GUIDE.md +0 -738
  106. runbooks/inventory/aws_organization.png +0 -0
  107. runbooks/inventory/cfn_move_stack_instances.py +0 -1526
  108. runbooks/inventory/delete_s3_buckets_objects.py +0 -169
  109. runbooks/inventory/lockdown_cfn_stackset_role.py +0 -224
  110. runbooks/inventory/update_aws_actions.py +0 -173
  111. runbooks/inventory/update_cfn_stacksets.py +0 -1215
  112. runbooks/inventory/update_cloudwatch_logs_retention_policy.py +0 -294
  113. runbooks/inventory/update_iam_roles_cross_accounts.py +0 -478
  114. runbooks/inventory/update_s3_public_access_block.py +0 -539
  115. runbooks/organizations/__init__.py +0 -12
  116. runbooks/organizations/manager.py +0 -374
  117. runbooks-0.7.0.dist-info/METADATA +0 -375
  118. /runbooks/inventory/{tests → Tests}/common_test_data.py +0 -0
  119. /runbooks/inventory/{tests → Tests}/common_test_functions.py +0 -0
  120. /runbooks/inventory/{tests → Tests}/script_test_data.py +0 -0
  121. /runbooks/inventory/{tests → Tests}/setup.py +0 -0
  122. /runbooks/inventory/{tests → Tests}/src.py +0 -0
  123. /runbooks/inventory/{tests/test_inventory_modules.py → Tests/test_Inventory_Modules.py} +0 -0
  124. /runbooks/inventory/{tests → Tests}/test_cfn_describe_stacks.py +0 -0
  125. /runbooks/inventory/{tests → Tests}/test_ec2_describe_instances.py +0 -0
  126. /runbooks/inventory/{tests → Tests}/test_lambda_list_functions.py +0 -0
  127. /runbooks/inventory/{tests → Tests}/test_moto_integration_example.py +0 -0
  128. /runbooks/inventory/{tests → Tests}/test_org_list_accounts.py +0 -0
  129. /runbooks/inventory/{Inventory_Modules.py → inventory_modules.py} +0 -0
  130. /runbooks/{aws → operate}/tags.json +0 -0
  131. {runbooks-0.7.0.dist-info → runbooks-0.7.6.dist-info}/WHEEL +0 -0
  132. {runbooks-0.7.0.dist-info → runbooks-0.7.6.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,562 @@
1
+ """
2
+ Enterprise S3 Access Logging Management - Automated Security Audit Trail
3
+
4
+ ## Overview
5
+
6
+ This module provides comprehensive S3 server access logging management to enhance
7
+ security monitoring and compliance posture. S3 access logging is a critical
8
+ security requirement for tracking bucket access patterns and detecting unauthorized
9
+ activities.
10
+
11
+ ## Key Features
12
+
13
+ - **Comprehensive Detection**: Identifies buckets without access logging enabled
14
+ - **Safe Configuration**: Enables server access logging with optimal settings
15
+ - **Bulk Operations**: Efficiently processes all buckets in an account
16
+ - **Compliance Integration**: Supports CIS, NIST, SOC2, and PCI DSS requirements
17
+ - **Audit Trail**: Comprehensive logging of all configuration operations
18
+ - **Cost Optimization**: Configurable log retention and storage options
19
+
20
+ ## Security Benefits
21
+
22
+ - **Access Monitoring**: Tracks all requests to S3 buckets for security analysis
23
+ - **Compliance Adherence**: Meets regulatory requirements for access logging
24
+ - **Incident Response**: Provides detailed audit trails for security investigations
25
+ - **Threat Detection**: Enables detection of unauthorized access patterns
26
+ - **Forensic Analysis**: Supports detailed investigation of security events
27
+
28
+ ## Access Log Format
29
+
30
+ S3 server access logs contain:
31
+ - Request timestamp and remote IP address
32
+ - Requester identity and authentication status
33
+ - Request type, target resource, and response details
34
+ - Bytes transferred and processing time
35
+ - Error codes and referrer information
36
+
37
+ ## Usage Examples
38
+
39
+ ```python
40
+ # Audit mode - detect buckets without logging (safe)
41
+ python s3_enable_access_logging.py --dry-run
42
+
43
+ # Enable access logging with default settings
44
+ python s3_enable_access_logging.py
45
+
46
+ # Enable with custom log bucket and prefix
47
+ python s3_enable_access_logging.py --log-bucket audit-logs --log-prefix access-logs/
48
+ ```
49
+
50
+ ## Important Configuration Notes
51
+
52
+ ⚠️ **LOG STORAGE**: Logs are stored in the same bucket by default
53
+ ⚠️ **COST IMPACT**: Access logging incurs additional storage costs
54
+ ⚠️ **RETENTION**: Consider lifecycle policies for log management
55
+
56
+ Version: 0.7.6 - Enterprise Production Ready
57
+ Compliance: CIS AWS Foundations 3.1, SOC2 A1.1, PCI DSS 10.2
58
+ """
59
+
60
+ import logging
61
+ from typing import Any, Dict, List, Optional, Tuple
62
+
63
+ import click
64
+ from botocore.exceptions import BotoCoreError, ClientError
65
+
66
+ from .commons import display_aws_account_info, get_client
67
+
68
+ # Configure enterprise logging
69
+ logger = logging.getLogger(__name__)
70
+ logger.setLevel(logging.INFO)
71
+
72
+
73
+ def check_bucket_logging_status(bucket_name: str, s3_client) -> Tuple[bool, Optional[Dict[str, Any]]]:
74
+ """
75
+ Check the current access logging configuration for an S3 bucket.
76
+
77
+ This function queries the S3 service to determine whether server access logging
78
+ is currently enabled for the specified bucket. It handles various edge cases
79
+ and permission scenarios that may occur in enterprise environments.
80
+
81
+ ## Implementation Details
82
+
83
+ - Uses S3 GetBucketLogging API
84
+ - Handles permission and access errors gracefully
85
+ - Returns both status and configuration details
86
+ - Provides structured error logging for troubleshooting
87
+
88
+ ## Security Considerations
89
+
90
+ - Requires s3:GetBucketLogging permission
91
+ - May encounter cross-region access restrictions
92
+ - Bucket policies may deny logging configuration access
93
+
94
+ Args:
95
+ bucket_name (str): S3 bucket name to check
96
+ Must be a valid S3 bucket name format
97
+ s3_client: Initialized boto3 S3 client instance
98
+
99
+ Returns:
100
+ Tuple[bool, Optional[Dict[str, Any]]]: (is_enabled, logging_config)
101
+ - is_enabled: True if access logging is configured
102
+ - logging_config: Current logging configuration or None
103
+
104
+ Raises:
105
+ ClientError: If S3 API access fails with unexpected errors
106
+ ValueError: If bucket_name is invalid
107
+
108
+ Example:
109
+ >>> s3_client = boto3.client('s3')
110
+ >>> is_enabled, config = check_bucket_logging_status('my-bucket', s3_client)
111
+ >>> if is_enabled:
112
+ ... print(f"Logging enabled to {config['LoggingEnabled']['TargetBucket']}")
113
+ ... else:
114
+ ... print("Access logging is not configured")
115
+ """
116
+
117
+ # Input validation
118
+ if not bucket_name or not isinstance(bucket_name, str):
119
+ raise ValueError(f"Invalid bucket_name: {bucket_name}. Must be a non-empty string.")
120
+
121
+ logger.debug(f"Checking access logging status for bucket: {bucket_name}")
122
+
123
+ try:
124
+ # Query bucket logging configuration
125
+ logging_response = s3_client.get_bucket_logging(Bucket=bucket_name)
126
+
127
+ # Check if logging is enabled
128
+ is_logging_enabled = "LoggingEnabled" in logging_response
129
+ logging_config = logging_response.get("LoggingEnabled", None)
130
+
131
+ if is_logging_enabled:
132
+ target_bucket = logging_config.get("TargetBucket", "Unknown")
133
+ target_prefix = logging_config.get("TargetPrefix", "")
134
+ logger.debug(f"Bucket {bucket_name} has logging enabled to {target_bucket} with prefix '{target_prefix}'")
135
+ else:
136
+ logger.debug(f"Bucket {bucket_name} does not have access logging enabled")
137
+
138
+ return is_logging_enabled, logging_config
139
+
140
+ except ClientError as e:
141
+ error_code = e.response.get("Error", {}).get("Code", "Unknown")
142
+ error_message = e.response.get("Error", {}).get("Message", str(e))
143
+
144
+ # Handle specific S3 errors gracefully
145
+ if error_code == "NoSuchBucket":
146
+ logger.warning(f"Bucket not found: {bucket_name}")
147
+ return False, None
148
+ elif error_code in ["AccessDenied", "Forbidden"]:
149
+ logger.warning(f"Insufficient permissions to check logging for bucket: {bucket_name}")
150
+ return False, None
151
+ elif error_code == "NoSuchLoggingConfiguration":
152
+ logger.debug(f"No logging configuration found for bucket: {bucket_name}")
153
+ return False, None
154
+ else:
155
+ logger.error(f"S3 API error checking logging for {bucket_name}: {error_code} - {error_message}")
156
+ raise
157
+
158
+ except BotoCoreError as e:
159
+ logger.error(f"AWS service error checking logging for {bucket_name}: {e}")
160
+ raise
161
+
162
+ except Exception as e:
163
+ logger.error(f"Unexpected error checking logging for {bucket_name}: {e}")
164
+ raise
165
+
166
+
167
+ def enable_bucket_access_logging(bucket_name: str, target_bucket: str, target_prefix: str, s3_client) -> bool:
168
+ """
169
+ Enable server access logging for a specific S3 bucket.
170
+
171
+ This function configures S3 server access logging for the specified bucket,
172
+ directing log files to a target bucket with the specified prefix. This is
173
+ essential for security monitoring and compliance requirements.
174
+
175
+ ## Implementation Details
176
+
177
+ - Uses S3 PutBucketLogging API
178
+ - Validates target bucket accessibility
179
+ - Configures optimal logging settings
180
+ - Provides comprehensive error handling and logging
181
+
182
+ ## Security Benefits
183
+
184
+ - **Audit Trail**: Creates detailed access logs for security analysis
185
+ - **Compliance**: Meets regulatory requirements for access monitoring
186
+ - **Threat Detection**: Enables identification of suspicious access patterns
187
+ - **Forensics**: Supports incident investigation and response
188
+
189
+ Args:
190
+ bucket_name (str): Source bucket to enable logging for
191
+ target_bucket (str): Destination bucket for log files
192
+ Must have appropriate ACL permissions
193
+ target_prefix (str): Prefix for log files (e.g., 'access-logs/')
194
+ Helps organize logs by source or date
195
+ s3_client: Initialized boto3 S3 client instance
196
+
197
+ Returns:
198
+ bool: True if logging was successfully enabled, False otherwise
199
+
200
+ Raises:
201
+ ValueError: If parameters are invalid
202
+ ClientError: If S3 API access fails with unexpected errors
203
+
204
+ Example:
205
+ >>> s3_client = boto3.client('s3')
206
+ >>> success = enable_bucket_access_logging('my-bucket', 'logs-bucket', 'access-logs/', s3_client)
207
+ >>> if success:
208
+ ... print("Access logging enabled successfully")
209
+ ... else:
210
+ ... print("Failed to enable access logging - check logs")
211
+ """
212
+
213
+ # Input validation
214
+ if not bucket_name or not isinstance(bucket_name, str):
215
+ raise ValueError(f"Invalid bucket_name: {bucket_name}. Must be a non-empty string.")
216
+
217
+ if not target_bucket or not isinstance(target_bucket, str):
218
+ raise ValueError(f"Invalid target_bucket: {target_bucket}. Must be a non-empty string.")
219
+
220
+ if not isinstance(target_prefix, str):
221
+ raise ValueError(f"Invalid target_prefix: {target_prefix}. Must be a string.")
222
+
223
+ logger.info(f"🔧 Enabling access logging for bucket: {bucket_name}")
224
+ logger.info(f" 📝 Target bucket: {target_bucket}")
225
+ logger.info(f" 📁 Log prefix: {target_prefix}")
226
+
227
+ try:
228
+ # Configure bucket logging
229
+ logging_configuration = {"LoggingEnabled": {"TargetBucket": target_bucket, "TargetPrefix": target_prefix}}
230
+
231
+ # Apply logging configuration
232
+ s3_client.put_bucket_logging(Bucket=bucket_name, BucketLoggingStatus=logging_configuration)
233
+
234
+ logger.info(f"✅ Successfully enabled access logging for bucket: {bucket_name}")
235
+ return True
236
+
237
+ except ClientError as e:
238
+ error_code = e.response.get("Error", {}).get("Code", "Unknown")
239
+ error_message = e.response.get("Error", {}).get("Message", str(e))
240
+
241
+ # Handle specific S3 errors with informative messages
242
+ if error_code == "NoSuchBucket":
243
+ logger.error(f"❌ Source bucket not found: {bucket_name}")
244
+ elif error_code == "InvalidTargetBucketForLogging":
245
+ logger.error(f"❌ Invalid target bucket for logging: {target_bucket}")
246
+ logger.error(" Target bucket must be in the same region and have proper ACL permissions")
247
+ elif error_code in ["AccessDenied", "Forbidden"]:
248
+ logger.error(f"🔒 Insufficient permissions to enable logging for bucket: {bucket_name}")
249
+ logger.error(" Required permissions: s3:PutBucketLogging")
250
+ elif error_code == "InvalidRequest":
251
+ logger.error(f"❌ Invalid logging configuration for bucket: {bucket_name}")
252
+ logger.error(f" Check target bucket permissions and prefix format: {target_prefix}")
253
+ else:
254
+ logger.error(f"❌ S3 API error enabling logging for {bucket_name}: {error_code} - {error_message}")
255
+
256
+ return False
257
+
258
+ except BotoCoreError as e:
259
+ logger.error(f"❌ AWS service error enabling logging for {bucket_name}: {e}")
260
+ return False
261
+
262
+ except Exception as e:
263
+ logger.error(f"❌ Unexpected error enabling logging for {bucket_name}: {e}")
264
+ raise
265
+
266
+
267
+ @click.command()
268
+ @click.option(
269
+ "--dry-run", is_flag=True, default=True, help="Preview mode - show buckets that need logging without enabling it"
270
+ )
271
+ @click.option("--target-bucket", type=str, help="Destination bucket for access logs (defaults to same bucket)")
272
+ @click.option("--log-prefix", type=str, default="access-logs/", help="Prefix for log files (default: access-logs/)")
273
+ @click.option("--region", type=str, help="AWS region to scan (defaults to current region)")
274
+ @click.option("--bucket-filter", type=str, help="Filter buckets by name pattern (supports wildcards)")
275
+ @click.option("--output-file", type=str, help="Save results to CSV file")
276
+ def enable_s3_access_logging(
277
+ dry_run: bool,
278
+ target_bucket: Optional[str],
279
+ log_prefix: str,
280
+ region: Optional[str],
281
+ bucket_filter: Optional[str],
282
+ output_file: Optional[str],
283
+ ):
284
+ """
285
+ Enterprise S3 Access Logging Management - Bulk logging enablement for security monitoring.
286
+
287
+ This command provides comprehensive detection and enablement of S3 server access logging
288
+ across all buckets in your AWS account. Access logging is a critical security requirement
289
+ for monitoring bucket access patterns and detecting unauthorized activities.
290
+
291
+ ## Operation Modes
292
+
293
+ **Dry-Run Mode (Default - SAFE):**
294
+ - Scans and reports buckets without access logging
295
+ - No configuration changes are made
296
+ - Generates detailed compliance reports
297
+ - Safe for production environments
298
+
299
+ **Enablement Mode (CONFIGURATION CHANGE):**
300
+ - Actually enables access logging on eligible buckets
301
+ - Requires explicit --no-dry-run flag
302
+ - Creates comprehensive audit trail
303
+ - Enhances security monitoring capabilities
304
+
305
+ ## Logging Configuration
306
+
307
+ **Default Settings:**
308
+ - Logs stored in the same bucket as the source
309
+ - Log prefix: 'access-logs/' for organization
310
+ - Standard S3 server access log format
311
+
312
+ **Custom Settings:**
313
+ - Specify dedicated log bucket with --target-bucket
314
+ - Custom log prefix with --log-prefix
315
+ - Regional configuration with --region
316
+
317
+ ## Compliance Benefits
318
+
319
+ - **CIS AWS Foundations 3.1**: Ensures S3 access logging is enabled
320
+ - **SOC2 A1.1**: Demonstrates access monitoring controls
321
+ - **PCI DSS 10.2**: Implements audit trail requirements
322
+ - **Cost Optimization**: Centralized log management options
323
+
324
+ Args:
325
+ dry_run (bool): When True (default), only reports findings without changes
326
+ target_bucket (str): Destination bucket for log files (optional)
327
+ log_prefix (str): Prefix for organizing log files
328
+ region (str): AWS region to scan (defaults to configured region)
329
+ bucket_filter (str): Filter buckets by name pattern
330
+ output_file (str): Optional CSV file path for saving detailed results
331
+
332
+ Returns:
333
+ None: Results are logged and optionally saved to CSV
334
+
335
+ Examples:
336
+ # Safe audit of all buckets (recommended first step)
337
+ python s3_enable_access_logging.py --dry-run
338
+
339
+ # Audit with filtering and output
340
+ python s3_enable_access_logging.py --dry-run --bucket-filter "*prod*" --output-file s3-logging-audit.csv
341
+
342
+ # Enable logging with default settings
343
+ python s3_enable_access_logging.py --no-dry-run
344
+
345
+ # Enable with custom log bucket and prefix
346
+ python s3_enable_access_logging.py --no-dry-run --target-bucket audit-logs --log-prefix s3-access/
347
+ """
348
+
349
+ # Input validation
350
+ if log_prefix and not isinstance(log_prefix, str):
351
+ raise ValueError(f"Invalid log_prefix: {log_prefix}. Must be a string.")
352
+
353
+ # Enhanced logging for operation start
354
+ operation_mode = "DRY-RUN (Safe Audit)" if dry_run else "ENABLEMENT (Configuration Change)"
355
+ logger.info(f"📝 Starting S3 Access Logging Analysis - Mode: {operation_mode}")
356
+ logger.info(f"📊 Configuration: region={region or 'default'}, filter={bucket_filter or 'none'}")
357
+ logger.info(f"📁 Log settings: target_bucket={target_bucket or 'same bucket'}, prefix={log_prefix}")
358
+
359
+ # Display account information for verification
360
+ account_info = display_aws_account_info()
361
+ logger.info(f"🏢 {account_info}")
362
+
363
+ if not dry_run:
364
+ logger.warning("⚠️ CONFIGURATION MODE ENABLED - Access logging will be enabled!")
365
+ logger.warning("⚠️ This will incur additional storage costs for log files!")
366
+
367
+ try:
368
+ # Initialize S3 client with region support
369
+ s3_client = get_client("s3", region_name=region)
370
+ logger.debug(f"Initialized S3 client for region: {region or 'default'}")
371
+
372
+ # Collect comprehensive bucket analysis data
373
+ bucket_analysis_results = []
374
+ buckets_without_logging = []
375
+ buckets_with_logging = []
376
+ skipped_buckets = []
377
+ total_buckets_scanned = 0
378
+
379
+ logger.info("🔍 Scanning S3 buckets in account...")
380
+
381
+ # List all buckets
382
+ try:
383
+ response = s3_client.list_buckets()
384
+ all_buckets = response.get("Buckets", [])
385
+ except ClientError as e:
386
+ error_code = e.response.get("Error", {}).get("Code", "Unknown")
387
+ logger.error(f"❌ Failed to list S3 buckets: {error_code}")
388
+ raise
389
+
390
+ logger.info(f"📋 Found {len(all_buckets)} total buckets to analyze")
391
+
392
+ for bucket_index, bucket in enumerate(all_buckets, 1):
393
+ bucket_name = bucket["Name"]
394
+ bucket_creation_date = bucket.get("CreationDate")
395
+ total_buckets_scanned += 1
396
+
397
+ logger.debug(f"Analyzing bucket {bucket_index}/{len(all_buckets)}: {bucket_name}")
398
+
399
+ # Apply bucket filtering if specified
400
+ if bucket_filter:
401
+ if bucket_filter.replace("*", "") not in bucket_name:
402
+ logger.debug(f"Bucket {bucket_name} filtered out by pattern: {bucket_filter}")
403
+ continue
404
+
405
+ try:
406
+ # Check current logging status
407
+ is_logging_enabled, logging_config = check_bucket_logging_status(bucket_name, s3_client)
408
+
409
+ # Determine target bucket for this bucket's logs
410
+ effective_target_bucket = target_bucket or bucket_name
411
+
412
+ # Build comprehensive bucket analysis
413
+ bucket_analysis = {
414
+ "BucketName": bucket_name,
415
+ "CreationDate": bucket_creation_date.isoformat() if bucket_creation_date else "Unknown",
416
+ "CurrentLoggingEnabled": is_logging_enabled,
417
+ "CurrentTargetBucket": logging_config.get("TargetBucket", "") if logging_config else "",
418
+ "CurrentLogPrefix": logging_config.get("TargetPrefix", "") if logging_config else "",
419
+ "NeedsLoggingEnabled": not is_logging_enabled,
420
+ "ProposedTargetBucket": effective_target_bucket,
421
+ "ProposedLogPrefix": log_prefix,
422
+ "Region": region or "default",
423
+ }
424
+
425
+ bucket_analysis_results.append(bucket_analysis)
426
+
427
+ # Categorize buckets for processing
428
+ if is_logging_enabled:
429
+ buckets_with_logging.append(bucket_analysis)
430
+ target_info = f"→ {logging_config.get('TargetBucket', 'Unknown')}"
431
+ prefix_info = f"(prefix: {logging_config.get('TargetPrefix', 'none')})"
432
+ logger.debug(f"✅ LOGGING ENABLED: {bucket_name} {target_info} {prefix_info}")
433
+ else:
434
+ buckets_without_logging.append(bucket_analysis)
435
+ logger.info(f"🎯 NEEDS LOGGING: {bucket_name} → {effective_target_bucket}/{log_prefix}")
436
+
437
+ except Exception as e:
438
+ logger.warning(f"⚠️ Could not analyze bucket {bucket_name}: {e}")
439
+ skipped_buckets.append({"bucket_name": bucket_name, "error": str(e)})
440
+ continue
441
+
442
+ # Generate comprehensive analysis summary
443
+ needs_logging_count = len(buckets_without_logging)
444
+ already_enabled_count = len(buckets_with_logging)
445
+ skipped_count = len(skipped_buckets)
446
+
447
+ logger.info("📊 S3 ACCESS LOGGING ANALYSIS SUMMARY:")
448
+ logger.info(f" 📋 Total buckets scanned: {total_buckets_scanned}")
449
+ logger.info(f" ✅ Buckets with logging enabled: {already_enabled_count}")
450
+ logger.info(f" 🎯 Buckets needing logging: {needs_logging_count}")
451
+ logger.info(f" ⚠️ Skipped buckets: {skipped_count}")
452
+
453
+ # Calculate compliance percentage
454
+ analyzable_total = already_enabled_count + needs_logging_count
455
+ if analyzable_total > 0:
456
+ compliance_percentage = (already_enabled_count / analyzable_total) * 100
457
+ logger.info(f" 📈 Current compliance rate: {compliance_percentage:.1f}%")
458
+
459
+ # Execute logging enablement if not in dry-run mode
460
+ if not dry_run and buckets_without_logging:
461
+ logger.warning(f"📝 ENABLING ACCESS LOGGING: Processing {needs_logging_count} buckets...")
462
+
463
+ successful_enablements = 0
464
+ failed_enablements = []
465
+
466
+ for bucket_data in buckets_without_logging:
467
+ bucket_name = bucket_data["BucketName"]
468
+ effective_target_bucket = bucket_data["ProposedTargetBucket"]
469
+
470
+ try:
471
+ logger.info(f"📝 Enabling access logging for bucket: {bucket_name}")
472
+
473
+ success = enable_bucket_access_logging(bucket_name, effective_target_bucket, log_prefix, s3_client)
474
+
475
+ if success:
476
+ successful_enablements += 1
477
+ logger.info(f"✅ Successfully enabled logging for: {bucket_name}")
478
+ else:
479
+ failed_enablements.append(
480
+ {"bucket_name": bucket_name, "error": "Enable function returned False"}
481
+ )
482
+
483
+ except Exception as e:
484
+ error_message = str(e)
485
+ logger.error(f"❌ Failed to enable logging for {bucket_name}: {error_message}")
486
+ failed_enablements.append({"bucket_name": bucket_name, "error": error_message})
487
+
488
+ # Enablement summary
489
+ logger.info("🏁 ACCESS LOGGING ENABLEMENT COMPLETE:")
490
+ logger.info(f" ✅ Successfully enabled: {successful_enablements} buckets")
491
+ logger.info(f" ❌ Failed enablements: {len(failed_enablements)} buckets")
492
+
493
+ if failed_enablements:
494
+ logger.warning("❌ Failed enablement details:")
495
+ for failure in failed_enablements:
496
+ logger.warning(f" - {failure['bucket_name']}: {failure['error']}")
497
+
498
+ # Calculate final compliance rate
499
+ final_enabled_count = already_enabled_count + successful_enablements
500
+ final_compliance_percentage = (final_enabled_count / analyzable_total) * 100 if analyzable_total > 0 else 0
501
+ logger.info(f" 📈 Final compliance rate: {final_compliance_percentage:.1f}%")
502
+
503
+ # Save results to CSV if requested
504
+ if output_file and bucket_analysis_results:
505
+ try:
506
+ # Use the commons write_to_csv function if available
507
+ from .commons import write_to_csv
508
+
509
+ write_to_csv(bucket_analysis_results, output_file)
510
+ logger.info(f"💾 Results saved to: {output_file}")
511
+ except Exception as e:
512
+ logger.error(f"❌ Failed to save results to {output_file}: {e}")
513
+
514
+ # Display skipped buckets if any
515
+ if skipped_buckets:
516
+ logger.warning("⚠️ Skipped buckets due to errors:")
517
+ for skipped in skipped_buckets:
518
+ logger.warning(f" - {skipped['bucket_name']}: {skipped['error']}")
519
+
520
+ # Final operation summary with actionable recommendations
521
+ if dry_run:
522
+ logger.info("✅ DRY-RUN COMPLETE - No buckets were modified")
523
+ if needs_logging_count > 0:
524
+ logger.info(f"💡 To enable access logging on {needs_logging_count} buckets, run with --no-dry-run")
525
+ logger.info(f"📝 This will improve compliance from {compliance_percentage:.1f}% to 100%")
526
+
527
+ # Estimate storage cost impact
528
+ avg_log_size_mb = 1 # Conservative estimate: 1MB per bucket per day
529
+ monthly_cost_estimate = needs_logging_count * avg_log_size_mb * 30 * 0.023 # $0.023/GB/month
530
+ logger.info(f"💰 Estimated additional monthly cost: ${monthly_cost_estimate:.2f}")
531
+ else:
532
+ logger.info("🎉 All eligible buckets already have access logging enabled!")
533
+ else:
534
+ logger.info("✅ ACCESS LOGGING ENABLEMENT COMPLETE")
535
+ logger.info(f"📝 S3 security monitoring enhanced for {successful_enablements} buckets")
536
+
537
+ except ClientError as e:
538
+ error_code = e.response.get("Error", {}).get("Code", "Unknown")
539
+ error_message = e.response.get("Error", {}).get("Message", str(e))
540
+
541
+ logger.error(f"❌ AWS API error during S3 analysis: {error_code} - {error_message}")
542
+
543
+ # Handle specific AWS errors gracefully
544
+ if error_code in ["AccessDenied", "UnauthorizedOperation"]:
545
+ logger.error("🔒 Insufficient IAM permissions for S3 operations")
546
+ logger.error(" Required permissions: s3:ListAllMyBuckets, s3:GetBucketLogging, s3:PutBucketLogging")
547
+ elif error_code == "InvalidRegion":
548
+ logger.error(f"🌍 Invalid AWS region specified: {region}")
549
+ else:
550
+ raise
551
+
552
+ except BotoCoreError as e:
553
+ logger.error(f"❌ AWS service error during S3 analysis: {e}")
554
+ raise
555
+
556
+ except Exception as e:
557
+ logger.error(f"❌ Unexpected error during S3 analysis: {e}")
558
+ raise
559
+
560
+
561
+ if __name__ == "__main__":
562
+ enable_s3_access_logging()