runbooks 0.7.0__py3-none-any.whl → 0.7.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (132) hide show
  1. runbooks/__init__.py +87 -37
  2. runbooks/cfat/README.md +300 -49
  3. runbooks/cfat/__init__.py +2 -2
  4. runbooks/finops/__init__.py +1 -1
  5. runbooks/finops/cli.py +1 -1
  6. runbooks/inventory/collectors/__init__.py +8 -0
  7. runbooks/inventory/collectors/aws_management.py +791 -0
  8. runbooks/inventory/collectors/aws_networking.py +3 -3
  9. runbooks/main.py +3389 -782
  10. runbooks/operate/__init__.py +207 -0
  11. runbooks/operate/base.py +311 -0
  12. runbooks/operate/cloudformation_operations.py +619 -0
  13. runbooks/operate/cloudwatch_operations.py +496 -0
  14. runbooks/operate/dynamodb_operations.py +812 -0
  15. runbooks/operate/ec2_operations.py +926 -0
  16. runbooks/operate/iam_operations.py +569 -0
  17. runbooks/operate/s3_operations.py +1211 -0
  18. runbooks/operate/tagging_operations.py +655 -0
  19. runbooks/remediation/CLAUDE.md +100 -0
  20. runbooks/remediation/DOME9.md +218 -0
  21. runbooks/remediation/README.md +26 -0
  22. runbooks/remediation/Tests/__init__.py +0 -0
  23. runbooks/remediation/Tests/update_policy.py +74 -0
  24. runbooks/remediation/__init__.py +95 -0
  25. runbooks/remediation/acm_cert_expired_unused.py +98 -0
  26. runbooks/remediation/acm_remediation.py +875 -0
  27. runbooks/remediation/api_gateway_list.py +167 -0
  28. runbooks/remediation/base.py +643 -0
  29. runbooks/remediation/cloudtrail_remediation.py +908 -0
  30. runbooks/remediation/cloudtrail_s3_modifications.py +296 -0
  31. runbooks/remediation/cognito_active_users.py +78 -0
  32. runbooks/remediation/cognito_remediation.py +856 -0
  33. runbooks/remediation/cognito_user_password_reset.py +163 -0
  34. runbooks/remediation/commons.py +455 -0
  35. runbooks/remediation/dynamodb_optimize.py +155 -0
  36. runbooks/remediation/dynamodb_remediation.py +744 -0
  37. runbooks/remediation/dynamodb_server_side_encryption.py +108 -0
  38. runbooks/remediation/ec2_public_ips.py +134 -0
  39. runbooks/remediation/ec2_remediation.py +892 -0
  40. runbooks/remediation/ec2_subnet_disable_auto_ip_assignment.py +72 -0
  41. runbooks/remediation/ec2_unattached_ebs_volumes.py +448 -0
  42. runbooks/remediation/ec2_unused_security_groups.py +202 -0
  43. runbooks/remediation/kms_enable_key_rotation.py +651 -0
  44. runbooks/remediation/kms_remediation.py +717 -0
  45. runbooks/remediation/lambda_list.py +243 -0
  46. runbooks/remediation/lambda_remediation.py +971 -0
  47. runbooks/remediation/multi_account.py +569 -0
  48. runbooks/remediation/rds_instance_list.py +199 -0
  49. runbooks/remediation/rds_remediation.py +873 -0
  50. runbooks/remediation/rds_snapshot_list.py +192 -0
  51. runbooks/remediation/requirements.txt +118 -0
  52. runbooks/remediation/s3_block_public_access.py +159 -0
  53. runbooks/remediation/s3_bucket_public_access.py +143 -0
  54. runbooks/remediation/s3_disable_static_website_hosting.py +74 -0
  55. runbooks/remediation/s3_downloader.py +215 -0
  56. runbooks/remediation/s3_enable_access_logging.py +562 -0
  57. runbooks/remediation/s3_encryption.py +526 -0
  58. runbooks/remediation/s3_force_ssl_secure_policy.py +143 -0
  59. runbooks/remediation/s3_list.py +141 -0
  60. runbooks/remediation/s3_object_search.py +201 -0
  61. runbooks/remediation/s3_remediation.py +816 -0
  62. runbooks/remediation/scan_for_phrase.py +425 -0
  63. runbooks/remediation/workspaces_list.py +220 -0
  64. runbooks/security/__init__.py +9 -10
  65. runbooks/security/security_baseline_tester.py +4 -2
  66. runbooks-0.7.6.dist-info/METADATA +608 -0
  67. {runbooks-0.7.0.dist-info → runbooks-0.7.6.dist-info}/RECORD +84 -76
  68. {runbooks-0.7.0.dist-info → runbooks-0.7.6.dist-info}/entry_points.txt +0 -1
  69. {runbooks-0.7.0.dist-info → runbooks-0.7.6.dist-info}/top_level.txt +0 -1
  70. jupyter-agent/.env +0 -2
  71. jupyter-agent/.env.template +0 -2
  72. jupyter-agent/.gitattributes +0 -35
  73. jupyter-agent/.gradio/certificate.pem +0 -31
  74. jupyter-agent/README.md +0 -16
  75. jupyter-agent/__main__.log +0 -8
  76. jupyter-agent/app.py +0 -256
  77. jupyter-agent/cloudops-agent.png +0 -0
  78. jupyter-agent/ds-system-prompt.txt +0 -154
  79. jupyter-agent/jupyter-agent.png +0 -0
  80. jupyter-agent/llama3_template.jinja +0 -123
  81. jupyter-agent/requirements.txt +0 -9
  82. jupyter-agent/tmp/4ojbs8a02ir/jupyter-agent.ipynb +0 -68
  83. jupyter-agent/tmp/cm5iasgpm3p/jupyter-agent.ipynb +0 -91
  84. jupyter-agent/tmp/crqbsseag5/jupyter-agent.ipynb +0 -91
  85. jupyter-agent/tmp/hohanq1u097/jupyter-agent.ipynb +0 -57
  86. jupyter-agent/tmp/jns1sam29wm/jupyter-agent.ipynb +0 -53
  87. jupyter-agent/tmp/jupyter-agent.ipynb +0 -27
  88. jupyter-agent/utils.py +0 -409
  89. runbooks/aws/__init__.py +0 -58
  90. runbooks/aws/dynamodb_operations.py +0 -231
  91. runbooks/aws/ec2_copy_image_cross-region.py +0 -195
  92. runbooks/aws/ec2_describe_instances.py +0 -202
  93. runbooks/aws/ec2_ebs_snapshots_delete.py +0 -186
  94. runbooks/aws/ec2_run_instances.py +0 -213
  95. runbooks/aws/ec2_start_stop_instances.py +0 -212
  96. runbooks/aws/ec2_terminate_instances.py +0 -143
  97. runbooks/aws/ec2_unused_eips.py +0 -196
  98. runbooks/aws/ec2_unused_volumes.py +0 -188
  99. runbooks/aws/s3_create_bucket.py +0 -142
  100. runbooks/aws/s3_list_buckets.py +0 -152
  101. runbooks/aws/s3_list_objects.py +0 -156
  102. runbooks/aws/s3_object_operations.py +0 -183
  103. runbooks/aws/tagging_lambda_handler.py +0 -183
  104. runbooks/inventory/FAILED_SCRIPTS_TROUBLESHOOTING.md +0 -619
  105. runbooks/inventory/PASSED_SCRIPTS_GUIDE.md +0 -738
  106. runbooks/inventory/aws_organization.png +0 -0
  107. runbooks/inventory/cfn_move_stack_instances.py +0 -1526
  108. runbooks/inventory/delete_s3_buckets_objects.py +0 -169
  109. runbooks/inventory/lockdown_cfn_stackset_role.py +0 -224
  110. runbooks/inventory/update_aws_actions.py +0 -173
  111. runbooks/inventory/update_cfn_stacksets.py +0 -1215
  112. runbooks/inventory/update_cloudwatch_logs_retention_policy.py +0 -294
  113. runbooks/inventory/update_iam_roles_cross_accounts.py +0 -478
  114. runbooks/inventory/update_s3_public_access_block.py +0 -539
  115. runbooks/organizations/__init__.py +0 -12
  116. runbooks/organizations/manager.py +0 -374
  117. runbooks-0.7.0.dist-info/METADATA +0 -375
  118. /runbooks/inventory/{tests → Tests}/common_test_data.py +0 -0
  119. /runbooks/inventory/{tests → Tests}/common_test_functions.py +0 -0
  120. /runbooks/inventory/{tests → Tests}/script_test_data.py +0 -0
  121. /runbooks/inventory/{tests → Tests}/setup.py +0 -0
  122. /runbooks/inventory/{tests → Tests}/src.py +0 -0
  123. /runbooks/inventory/{tests/test_inventory_modules.py → Tests/test_Inventory_Modules.py} +0 -0
  124. /runbooks/inventory/{tests → Tests}/test_cfn_describe_stacks.py +0 -0
  125. /runbooks/inventory/{tests → Tests}/test_ec2_describe_instances.py +0 -0
  126. /runbooks/inventory/{tests → Tests}/test_lambda_list_functions.py +0 -0
  127. /runbooks/inventory/{tests → Tests}/test_moto_integration_example.py +0 -0
  128. /runbooks/inventory/{tests → Tests}/test_org_list_accounts.py +0 -0
  129. /runbooks/inventory/{Inventory_Modules.py → inventory_modules.py} +0 -0
  130. /runbooks/{aws → operate}/tags.json +0 -0
  131. {runbooks-0.7.0.dist-info → runbooks-0.7.6.dist-info}/WHEEL +0 -0
  132. {runbooks-0.7.0.dist-info → runbooks-0.7.6.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,192 @@
1
+ """
2
+ RDS Snapshot Analysis - Analyze RDS snapshots for lifecycle management and cost optimization.
3
+ """
4
+
5
+ import logging
6
+ from datetime import datetime, timedelta, timezone
7
+
8
+ import click
9
+ from botocore.exceptions import ClientError
10
+
11
+ from .commons import display_aws_account_info, get_client, write_to_csv
12
+
13
+ logger = logging.getLogger(__name__)
14
+
15
+
16
+ def calculate_snapshot_age(create_time):
17
+ """Calculate snapshot age in days."""
18
+ if isinstance(create_time, str):
19
+ create_time = datetime.fromisoformat(create_time.replace("Z", "+00:00"))
20
+
21
+ now = datetime.now(tz=timezone.utc)
22
+ age = (now - create_time).days
23
+ return age
24
+
25
+
26
+ def estimate_snapshot_cost(allocated_storage, storage_type="gp2", days_old=1):
27
+ """Estimate monthly snapshot storage cost (simplified)."""
28
+ # Simplified cost estimation per GB per month
29
+ cost_per_gb_month = {
30
+ "gp2": 0.095, # General Purpose SSD
31
+ "gp3": 0.08, # General Purpose SSD (gp3)
32
+ "io1": 0.125, # Provisioned IOPS SSD
33
+ "io2": 0.125, # Provisioned IOPS SSD
34
+ "standard": 0.05, # Magnetic
35
+ }
36
+
37
+ rate = cost_per_gb_month.get(storage_type.lower(), 0.095) # Default to gp2
38
+ monthly_cost = allocated_storage * rate
39
+
40
+ # Pro-rate for actual age if less than a month
41
+ if days_old < 30:
42
+ return round((monthly_cost / 30) * days_old, 2)
43
+ else:
44
+ return round(monthly_cost, 2)
45
+
46
+
47
+ @click.command()
48
+ @click.option("--output-file", default="/tmp/rds_snapshots.csv", help="Output CSV file path")
49
+ @click.option("--old-days", default=30, help="Days threshold for considering snapshots old")
50
+ @click.option("--include-cost", is_flag=True, help="Include estimated cost analysis")
51
+ @click.option("--snapshot-type", help="Filter by snapshot type (automated, manual)")
52
+ def get_rds_snapshot_details(output_file, old_days, include_cost, snapshot_type):
53
+ """Analyze RDS snapshots for lifecycle management and cost optimization."""
54
+ logger.info(f"Analyzing RDS snapshots in {display_aws_account_info()}")
55
+
56
+ try:
57
+ rds = get_client("rds")
58
+
59
+ # Get all snapshots
60
+ logger.info("Collecting RDS snapshot data...")
61
+ response = rds.describe_db_snapshots()
62
+ snapshots = response.get("DBSnapshots", [])
63
+
64
+ if not snapshots:
65
+ logger.info("No RDS snapshots found")
66
+ return
67
+
68
+ logger.info(f"Found {len(snapshots)} RDS snapshots to analyze")
69
+
70
+ # Filter by snapshot type if specified
71
+ if snapshot_type:
72
+ original_count = len(snapshots)
73
+ snapshots = [s for s in snapshots if s.get("SnapshotType", "").lower() == snapshot_type.lower()]
74
+ logger.info(f"Filtered to {len(snapshots)} snapshots of type '{snapshot_type}'")
75
+
76
+ data = []
77
+ old_snapshots = []
78
+ manual_snapshots = []
79
+ automated_snapshots = []
80
+ total_storage = 0
81
+ total_estimated_cost = 0
82
+
83
+ for i, snapshot in enumerate(snapshots, 1):
84
+ snapshot_id = snapshot["DBSnapshotIdentifier"]
85
+ logger.info(f"Analyzing snapshot {i}/{len(snapshots)}: {snapshot_id}")
86
+
87
+ create_time = snapshot.get("SnapshotCreateTime")
88
+ age_days = calculate_snapshot_age(create_time) if create_time else 0
89
+ allocated_storage = snapshot.get("AllocatedStorage", 0)
90
+ storage_type = snapshot.get("StorageType", "gp2")
91
+ snap_type = snapshot.get("SnapshotType", "unknown")
92
+
93
+ snapshot_data = {
94
+ "DBSnapshotIdentifier": snapshot_id,
95
+ "DBInstanceIdentifier": snapshot.get("DBInstanceIdentifier", "Unknown"),
96
+ "SnapshotCreateTime": create_time.strftime("%Y-%m-%d %H:%M:%S") if create_time else "Unknown",
97
+ "AgeDays": age_days,
98
+ "SnapshotType": snap_type,
99
+ "Status": snapshot.get("Status", "Unknown"),
100
+ "Engine": snapshot.get("Engine", "Unknown"),
101
+ "EngineVersion": snapshot.get("EngineVersion", "Unknown"),
102
+ "StorageType": storage_type,
103
+ "AllocatedStorage": allocated_storage,
104
+ "Encrypted": snapshot.get("Encrypted", False),
105
+ "AvailabilityZone": snapshot.get("AvailabilityZone", "Unknown"),
106
+ }
107
+
108
+ # Cost analysis
109
+ if include_cost and allocated_storage > 0:
110
+ estimated_cost = estimate_snapshot_cost(allocated_storage, storage_type, age_days)
111
+ snapshot_data["EstimatedMonthlyCost"] = estimated_cost
112
+ total_estimated_cost += estimated_cost
113
+ else:
114
+ snapshot_data["EstimatedMonthlyCost"] = 0
115
+
116
+ # Categorization for analysis
117
+ if age_days >= old_days:
118
+ old_snapshots.append(snapshot_id)
119
+ snapshot_data["IsOld"] = True
120
+ else:
121
+ snapshot_data["IsOld"] = False
122
+
123
+ if snap_type.lower() == "manual":
124
+ manual_snapshots.append(snapshot_id)
125
+ elif snap_type.lower() == "automated":
126
+ automated_snapshots.append(snapshot_id)
127
+
128
+ total_storage += allocated_storage
129
+
130
+ # Cleanup recommendations
131
+ recommendations = []
132
+ if age_days >= old_days and snap_type.lower() == "manual":
133
+ recommendations.append(f"Consider deletion (>{old_days} days old)")
134
+ if snap_type.lower() == "automated" and age_days > 35: # AWS default retention
135
+ recommendations.append("Check retention policy")
136
+ if not snapshot.get("Encrypted", False):
137
+ recommendations.append("Not encrypted")
138
+
139
+ snapshot_data["Recommendations"] = "; ".join(recommendations) if recommendations else "None"
140
+
141
+ data.append(snapshot_data)
142
+
143
+ # Log summary for this snapshot
144
+ status = "OLD" if age_days >= old_days else "RECENT"
145
+ logger.info(f" → {snap_type}, {age_days}d old, {allocated_storage}GB, {status}")
146
+
147
+ # Export results
148
+ write_to_csv(data, output_file)
149
+ logger.info(f"RDS snapshot analysis exported to: {output_file}")
150
+
151
+ # Summary report
152
+ logger.info("\n=== ANALYSIS SUMMARY ===")
153
+ logger.info(f"Total snapshots: {len(snapshots)}")
154
+ logger.info(f"Manual snapshots: {len(manual_snapshots)}")
155
+ logger.info(f"Automated snapshots: {len(automated_snapshots)}")
156
+ logger.info(f"Old snapshots (>{old_days} days): {len(old_snapshots)}")
157
+ logger.info(f"Total storage: {total_storage} GB")
158
+
159
+ if include_cost:
160
+ logger.info(f"Estimated total monthly cost: ${total_estimated_cost:.2f}")
161
+
162
+ # Cleanup recommendations
163
+ cleanup_candidates = [s for s in data if s["IsOld"] and s["SnapshotType"].lower() == "manual"]
164
+ if cleanup_candidates:
165
+ logger.warning(f"⚠ {len(cleanup_candidates)} old manual snapshots for review:")
166
+ for snap in cleanup_candidates:
167
+ logger.warning(
168
+ f" - {snap['DBSnapshotIdentifier']}: {snap['AgeDays']} days old, {snap['AllocatedStorage']}GB"
169
+ )
170
+ else:
171
+ logger.info("✓ No old manual snapshots found")
172
+
173
+ # Encryption status
174
+ encrypted_count = sum(1 for s in data if s["Encrypted"])
175
+ unencrypted_count = len(data) - encrypted_count
176
+ logger.info(f"Encrypted snapshots: {encrypted_count}")
177
+ if unencrypted_count > 0:
178
+ logger.warning(f"⚠ Unencrypted snapshots: {unencrypted_count}")
179
+
180
+ # Engine distribution
181
+ engines = {}
182
+ for snapshot in data:
183
+ engine = snapshot["Engine"]
184
+ engines[engine] = engines.get(engine, 0) + 1
185
+
186
+ logger.info("Engine distribution:")
187
+ for engine, count in sorted(engines.items()):
188
+ logger.info(f" {engine}: {count} snapshots")
189
+
190
+ except Exception as e:
191
+ logger.error(f"Failed to analyze RDS snapshots: {e}")
192
+ raise
@@ -0,0 +1,118 @@
1
+ aioquic==0.9.25
2
+ anyio==4.3.0
3
+ appnope==0.1.4
4
+ argon2-cffi==23.1.0
5
+ argon2-cffi-bindings==21.2.0
6
+ arrow==1.3.0
7
+ asgiref==3.7.2
8
+ asttokens==2.4.1
9
+ async-lru==2.0.4
10
+ attrs==23.2.0
11
+ bcrypt==3.2.2
12
+ beautifulsoup4==4.12.3
13
+ bleach==6.1.0
14
+ blinker==1.7.0
15
+ bokeh==3.3.4
16
+ boto3==1.34.2
17
+ botocore==1.34.2
18
+ certifi==2024.2.2
19
+ cffi==1.16.0
20
+ charset-normalizer==3.3.2
21
+ click==8.1.7
22
+ comm==0.2.1
23
+ contourpy==1.2.0
24
+ cryptography==42.0.4
25
+ debugpy==1.8.1
26
+ decorator==5.1.1
27
+ defusedxml==0.7.6
28
+ executing==2.0.1
29
+ fastjsonschema==2.19.1
30
+ fqdn==1.5.1
31
+ h11==0.14.0
32
+ h2==4.1.0
33
+ hpack==4.0.0
34
+ httpcore==1.0.3
35
+ httpx==0.26.0
36
+ hyperframe==6.0.1
37
+ idna==3.6
38
+ ipykernel==6.29.2
39
+ ipython==8.21.0
40
+ isoduration==20.11.0
41
+ itsdangerous==2.1.2
42
+ jedi==0.19.1
43
+ jmespath==1.0.1
44
+ json5==0.9.16
45
+ jsonpointer==2.4
46
+ jsonschema==4.21.1
47
+ jsonschema-specifications==2023.12.1
48
+ jupyter-events==0.9.0
49
+ jupyter-lsp==2.2.2
50
+ jupyter_client==8.6.0
51
+ jupyter_core==5.7.1
52
+ jupyterlab==4.1.2
53
+ kaitaistruct==0.10
54
+ ldap3==2.9.1
55
+ matplotlib-inline==0.1.6
56
+ mistune==3.0.2
57
+ mitmproxy-macos==0.5.1
58
+ msgpack==1.0.7
59
+ nbclient==0.9.0
60
+ nbconvert==7.16.1
61
+ nbformat==5.9.2
62
+ nest-asyncio==1.6.0
63
+ numpy==1.26.4
64
+ overrides==7.7.0
65
+ packaging==23.2
66
+ pandas==2.2.1
67
+ pandocfilters==1.5.1
68
+ paramiko==3.4.0
69
+ parso==0.8.3
70
+ passlib==1.7.4
71
+ pexpect==4.9.0
72
+ pillow==10.2.0
73
+ platformdirs==4.2.0
74
+ prompt-toolkit==3.0.43
75
+ protobuf==4.25.3
76
+ psutil==5.9.8
77
+ ptyprocess==0.7.6
78
+ publicsuffix2==2.20191221
79
+ pure-eval==0.2.2
80
+ pyasn1==0.5.1
81
+ pyasn1-modules==0.3.0
82
+ pycparser==2.21
83
+ pylsqpack==0.3.18
84
+ pyparsing==3.1.1
85
+ pyperclip==1.8.2
86
+ python-dateutil==2.8.2
87
+ python-json-logger==2.0.7
88
+ pytz==2024.1
89
+ pyzmq==25.1.2
90
+ referencing==0.33.0
91
+ requests==2.31.0
92
+ rfc3339-validator==0.1.4
93
+ rfc3986-validator==0.1.1
94
+ rpds-py==0.18.0
95
+ s3transfer==0.9.0
96
+ service-identity==24.1.0
97
+ six==1.16.0
98
+ sniffio==1.3.0
99
+ sortedcontainers==2.4.0
100
+ soupsieve==2.5
101
+ stack-data==0.6.3
102
+ terminado==0.18.0
103
+ tinycss2==1.2.1
104
+ tornado==6.4
105
+ tqdm==4.66.2
106
+ traitlets==5.14.1
107
+ types-python-dateutil==2.8.19.20240106
108
+ tzdata==2024.1
109
+ uri-template==1.3.0
110
+ urllib3==2.0.7
111
+ urwid-mitmproxy==2.1.2.1
112
+ wcwidth==0.2.13
113
+ webcolors==1.13
114
+ webencodings==0.5.1
115
+ websocket-client==1.7.0
116
+ wsproto==1.2.0
117
+ xyzservices==2023.10.1
118
+ zstandard==0.22.0
@@ -0,0 +1,159 @@
1
+ """
2
+ Enterprise S3 Public Access Security - Automated Bucket Hardening
3
+
4
+ ## Overview
5
+
6
+ This module provides comprehensive S3 public access blocking capabilities to prevent
7
+ accidental data exposure and enhance security posture. Public S3 buckets are a leading
8
+ cause of data breaches and security incidents in cloud environments.
9
+
10
+ ## Key Features
11
+
12
+ - **Comprehensive Detection**: Identifies buckets without public access blocks
13
+ - **Safe Configuration**: Enables all four public access block settings
14
+ - **Bulk Operations**: Efficiently processes all buckets in an account
15
+ - **Compliance Integration**: Supports CIS, NIST, and SOC2 requirements
16
+ - **Audit Trail**: Comprehensive logging of all security operations
17
+ - **Cost Optimization**: Prevents unexpected charges from public data transfer
18
+
19
+ ## Security Benefits
20
+
21
+ - **Data Protection**: Prevents accidental public exposure of sensitive data
22
+ - **Compliance Adherence**: Meets regulatory requirements for data privacy
23
+ - **Defense in Depth**: Adds bucket-level security controls
24
+ - **Risk Mitigation**: Reduces attack surface for data exfiltration
25
+
26
+ ## Public Access Block Settings
27
+
28
+ This tool enables all four critical settings:
29
+ 1. **BlockPublicAcls**: Blocks new public ACLs
30
+ 2. **IgnorePublicAcls**: Ignores existing public ACLs
31
+ 3. **BlockPublicPolicy**: Blocks new public bucket policies
32
+ 4. **RestrictPublicBuckets**: Restricts public bucket access
33
+
34
+ ## Usage Examples
35
+
36
+ ```python
37
+ # Audit mode - detect buckets without blocks (safe)
38
+ python s3_block_public_access.py
39
+
40
+ # Enable public access blocks on all buckets
41
+ python s3_block_public_access.py --block
42
+ ```
43
+
44
+ ## Important Security Notes
45
+
46
+ ⚠️ **APPLICATION IMPACT**: May break applications relying on public S3 access
47
+ ⚠️ **WEBSITE HOSTING**: Will disable S3 static website hosting features
48
+ ⚠️ **CDN INTEGRATION**: May affect CloudFront and other CDN configurations
49
+
50
+ Version: 0.7.6 - Enterprise Production Ready
51
+ Compliance: CIS AWS Foundations 2.1.5, NIST SP 800-53
52
+ """
53
+
54
+ import logging
55
+ from typing import Any, Dict, List, Optional, Tuple
56
+
57
+ import click
58
+ from botocore.exceptions import BotoCoreError, ClientError
59
+
60
+ from .commons import display_aws_account_info, get_bucket_policy, get_client
61
+
62
+ # Configure enterprise logging
63
+ logger = logging.getLogger(__name__)
64
+ logger.setLevel(logging.INFO)
65
+
66
+
67
+ def check_flags(
68
+ public_access_block: Dict[str, Any], public_access_block_configuration: Dict[str, bool]
69
+ ) -> Optional[bool]:
70
+ """
71
+ Compare current public access block settings with target configuration.
72
+
73
+ This utility function validates whether a bucket's current public access block
74
+ configuration matches the desired security settings. It handles various edge
75
+ cases and data types that may be returned from the S3 API.
76
+
77
+ ## Implementation Details
78
+
79
+ - Performs deep comparison of configuration dictionaries
80
+ - Handles None and string values gracefully
81
+ - Returns None for invalid or incomparable configurations
82
+ - Provides type safety for configuration validation
83
+
84
+ Args:
85
+ public_access_block (Dict[str, Any]): Current bucket's public access block configuration
86
+ May contain boolean values or be None/string
87
+ public_access_block_configuration (Dict[str, bool]): Target configuration with boolean values
88
+ Should contain all four PAB settings
89
+
90
+ Returns:
91
+ Optional[bool]: True if configurations match exactly, False if different,
92
+ None if comparison is not possible
93
+
94
+ Example:
95
+ >>> current_config = {'BlockPublicAcls': True, 'IgnorePublicAcls': True, 'BlockPublicPolicy': True, 'RestrictPublicBuckets': True}
96
+ >>> target_config = {'BlockPublicAcls': True, 'IgnorePublicAcls': True, 'BlockPublicPolicy': True, 'RestrictPublicBuckets': True}
97
+ >>> check_flags(current_config, target_config)
98
+ True
99
+ """
100
+
101
+ # Input validation
102
+ if not isinstance(public_access_block_configuration, dict):
103
+ logger.debug("Target configuration is not a dictionary")
104
+ return None
105
+
106
+ # Handle case where current configuration is not a dictionary
107
+ if not isinstance(public_access_block, dict):
108
+ logger.debug(f"Current public access block is not a dictionary: {type(public_access_block)}")
109
+ return None
110
+
111
+ try:
112
+ # Perform deep comparison of configuration dictionaries
113
+ return public_access_block == public_access_block_configuration
114
+
115
+ except Exception as e:
116
+ logger.debug(f"Error comparing public access block configurations: {e}")
117
+ return None
118
+
119
+
120
+ @click.command()
121
+ @click.option("--block", default=False, is_flag=True, help="Enable public access block on all buckets")
122
+ def enable_public_access_block_on_all_buckets(block: bool = False):
123
+ s3 = get_client("s3")
124
+
125
+ logger.info(f"Using {display_aws_account_info()}")
126
+
127
+ if block:
128
+ logger.info("Enabling 'Block Public Access' on all buckets...")
129
+
130
+ response = s3.list_buckets()
131
+
132
+ # Define the public access block configuration
133
+ public_access_block_configuration = {
134
+ "BlockPublicAcls": True,
135
+ "IgnorePublicAcls": True,
136
+ "BlockPublicPolicy": True,
137
+ "RestrictPublicBuckets": True,
138
+ }
139
+
140
+ # Apply the configuration to each bucket
141
+ for bucket in response["Buckets"]:
142
+ bucket_name = bucket["Name"]
143
+ policy, public_access_block = get_bucket_policy(bucket_name)
144
+ if block and (
145
+ (public_access_block == "No public access block configuration")
146
+ or (not check_flags(public_access_block, public_access_block_configuration))
147
+ ):
148
+ logger.info(f"Enabling 'Block Public Access' on bucket: {bucket_name} as it does not have it enabled...")
149
+ if public_access_block == "Access Denied":
150
+ logger.warning(f"Access Denied to enable 'Block Public Access' on Bucket: {bucket_name}")
151
+ continue
152
+ s3.put_public_access_block(
153
+ Bucket=bucket_name, PublicAccessBlockConfiguration=public_access_block_configuration
154
+ )
155
+ policy, public_access_block = get_bucket_policy(bucket_name)
156
+ logger.info(
157
+ f"After enabling 'Block Public Access' on Bucket: {bucket_name},"
158
+ f" Public Access Block: {public_access_block}"
159
+ )
@@ -0,0 +1,143 @@
1
+ """
2
+ S3 Public Access Analyzer - External HTTP testing for bucket accessibility.
3
+ """
4
+
5
+ import logging
6
+
7
+ import click
8
+ import requests
9
+ from botocore.exceptions import ClientError
10
+ from requests.packages.urllib3.exceptions import InsecureRequestWarning
11
+
12
+ from .commons import display_aws_account_info, get_client, write_to_csv
13
+
14
+ requests.packages.urllib3.disable_warnings(category=InsecureRequestWarning)
15
+
16
+ logger = logging.getLogger(__name__)
17
+
18
+
19
+ def check_bucket_public_http_access(bucket_name: str):
20
+ """Check if S3 bucket is accessible via HTTP without credentials."""
21
+ try:
22
+ url = f"https://{bucket_name}.s3.amazonaws.com"
23
+ logger.debug(f"Testing HTTP access to: {url}")
24
+
25
+ # Test GET request
26
+ response = requests.get(url, verify=False, timeout=10)
27
+
28
+ # Check if bucket responds (200=accessible, 403=exists but protected)
29
+ if response.status_code in [200, 403]:
30
+ logger.info(f" → Bucket responds: {response.status_code}")
31
+
32
+ # Test OPTIONS for additional info
33
+ try:
34
+ options_response = requests.options(url, verify=False, timeout=5)
35
+ cors_info = options_response.headers.get("Access-Control-Allow-Origin", "Not configured")
36
+ except Exception:
37
+ cors_info = "Failed to check"
38
+
39
+ return {
40
+ "accessible": True,
41
+ "status_code": response.status_code,
42
+ "content_length": len(response.text),
43
+ "server": response.headers.get("Server", "Unknown"),
44
+ "cors": cors_info,
45
+ "last_modified": response.headers.get("Last-Modified", "Unknown"),
46
+ }
47
+ else:
48
+ logger.debug(f" → Bucket not accessible: {response.status_code}")
49
+ return {"accessible": False, "status_code": response.status_code}
50
+
51
+ except requests.exceptions.RequestException as e:
52
+ logger.debug(f" → HTTP test failed for {bucket_name}: {e}")
53
+ return {"accessible": False, "error": str(e)}
54
+
55
+
56
+ def get_bucket_list_from_aws():
57
+ """Get list of S3 buckets from AWS API."""
58
+ try:
59
+ s3 = get_client("s3")
60
+ response = s3.list_buckets()
61
+ return [bucket["Name"] for bucket in response.get("Buckets", [])]
62
+ except ClientError as e:
63
+ logger.error(f"Failed to list S3 buckets: {e}")
64
+ return []
65
+
66
+
67
+ @click.command()
68
+ @click.option("--bucket-names", help="Comma-separated list of bucket names to test (uses AWS list if not provided)")
69
+ @click.option("--output-file", default="s3_public_access_test.csv", help="Output CSV file path")
70
+ @click.option("--timeout", default=10, help="HTTP request timeout in seconds")
71
+ def analyze_s3_public_access(bucket_names, output_file, timeout):
72
+ """Analyze S3 buckets for public HTTP accessibility."""
73
+ logger.info(f"S3 public access analysis in {display_aws_account_info()}")
74
+
75
+ try:
76
+ # Get bucket list
77
+ if bucket_names:
78
+ bucket_list = [name.strip() for name in bucket_names.split(",")]
79
+ logger.info(f"Testing provided buckets: {bucket_list}")
80
+ else:
81
+ logger.info("Getting bucket list from AWS...")
82
+ bucket_list = get_bucket_list_from_aws()
83
+
84
+ if not bucket_list:
85
+ logger.warning("No buckets to test")
86
+ return
87
+
88
+ logger.info(f"Testing {len(bucket_list)} buckets for public HTTP access")
89
+
90
+ # Test each bucket
91
+ results = []
92
+ public_buckets = []
93
+
94
+ for i, bucket_name in enumerate(bucket_list, 1):
95
+ logger.info(f"Testing bucket {i}/{len(bucket_list)}: {bucket_name}")
96
+
97
+ access_info = check_bucket_public_http_access(bucket_name)
98
+
99
+ result = {
100
+ "Bucket Name": bucket_name,
101
+ "HTTP Accessible": access_info.get("accessible", False),
102
+ "Status Code": access_info.get("status_code", "N/A"),
103
+ "Content Length": access_info.get("content_length", 0),
104
+ "Server": access_info.get("server", "Unknown"),
105
+ "CORS Configuration": access_info.get("cors", "Not checked"),
106
+ "Last Modified": access_info.get("last_modified", "Unknown"),
107
+ "Error": access_info.get("error", "None"),
108
+ }
109
+
110
+ results.append(result)
111
+
112
+ if access_info.get("accessible"):
113
+ public_buckets.append(bucket_name)
114
+ status = access_info.get("status_code")
115
+ if status == 200:
116
+ logger.warning(f" ⚠ PUBLICLY READABLE: {bucket_name}")
117
+ elif status == 403:
118
+ logger.info(f" ℹ Exists but protected: {bucket_name}")
119
+
120
+ # Export results
121
+ write_to_csv(results, output_file)
122
+ logger.info(f"Results exported to: {output_file}")
123
+
124
+ # Summary
125
+ logger.info("\n=== SUMMARY ===")
126
+ logger.info(f"Total buckets tested: {len(bucket_list)}")
127
+ logger.info(f"Buckets responding to HTTP: {len(public_buckets)}")
128
+
129
+ if public_buckets:
130
+ logger.warning(f"⚠ {len(public_buckets)} buckets are accessible via HTTP:")
131
+ for bucket in public_buckets:
132
+ logger.warning(f" - {bucket}")
133
+ logger.warning("Review these buckets for unintended public access")
134
+ else:
135
+ logger.info("✓ No buckets found with public HTTP access")
136
+
137
+ except Exception as e:
138
+ logger.error(f"Failed to analyze S3 public access: {e}")
139
+ raise
140
+
141
+
142
+ if __name__ == "__main__":
143
+ analyze_s3_public_access()
@@ -0,0 +1,74 @@
1
+ """
2
+ S3 Static Website Hosting Disable - Remove website configuration from S3 buckets.
3
+ """
4
+
5
+ import logging
6
+
7
+ import click
8
+ from botocore.exceptions import ClientError
9
+
10
+ from .commons import display_aws_account_info, get_client
11
+
12
+ logger = logging.getLogger(__name__)
13
+
14
+
15
+ @click.command()
16
+ @click.option("--dry-run", is_flag=True, default=True, help="Preview mode - show actions without making changes")
17
+ def disable_static_web_hosting_on_all_buckets(dry_run: bool = True):
18
+ """Disable static website hosting on all S3 buckets."""
19
+ logger.info(f"Checking S3 static website hosting in {display_aws_account_info()}")
20
+
21
+ try:
22
+ s3 = get_client("s3")
23
+ response = s3.list_buckets()
24
+ buckets = response.get("Buckets", [])
25
+
26
+ if not buckets:
27
+ logger.info("No S3 buckets found")
28
+ return
29
+
30
+ logger.info(f"Found {len(buckets)} buckets to check")
31
+
32
+ # Track results
33
+ buckets_with_hosting = []
34
+ buckets_disabled = []
35
+
36
+ # Check each bucket for static website hosting
37
+ for bucket in buckets:
38
+ bucket_name = bucket["Name"]
39
+ logger.info(f"Checking bucket: {bucket_name}")
40
+
41
+ try:
42
+ # Check if website configuration exists
43
+ s3.get_bucket_website(Bucket=bucket_name)
44
+
45
+ # If we get here, website hosting is enabled
46
+ buckets_with_hosting.append(bucket_name)
47
+ logger.info(f" ✗ Static website hosting is enabled")
48
+
49
+ # Disable hosting if not in dry-run mode
50
+ if not dry_run:
51
+ logger.info(f" → Disabling static website hosting...")
52
+ s3.delete_bucket_website(Bucket=bucket_name)
53
+ buckets_disabled.append(bucket_name)
54
+ logger.info(f" ✓ Successfully disabled static website hosting")
55
+
56
+ except ClientError as e:
57
+ error_code = e.response.get("Error", {}).get("Code", "Unknown")
58
+ if error_code == "NoSuchWebsiteConfiguration":
59
+ logger.info(f" ✓ Static website hosting already disabled")
60
+ else:
61
+ logger.error(f" ✗ Error checking bucket: {e}")
62
+
63
+ # Summary
64
+ logger.info("\n=== SUMMARY ===")
65
+ logger.info(f"Buckets with static hosting: {len(buckets_with_hosting)}")
66
+
67
+ if dry_run and buckets_with_hosting:
68
+ logger.info(f"To disable hosting on {len(buckets_with_hosting)} buckets, run with --no-dry-run")
69
+ elif not dry_run:
70
+ logger.info(f"Successfully disabled hosting on {len(buckets_disabled)} buckets")
71
+
72
+ except Exception as e:
73
+ logger.error(f"Failed to process S3 static website hosting: {e}")
74
+ raise