runbooks 0.7.0__py3-none-any.whl → 0.7.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- runbooks/__init__.py +87 -37
- runbooks/cfat/README.md +300 -49
- runbooks/cfat/__init__.py +2 -2
- runbooks/finops/__init__.py +1 -1
- runbooks/finops/cli.py +1 -1
- runbooks/inventory/collectors/__init__.py +8 -0
- runbooks/inventory/collectors/aws_management.py +791 -0
- runbooks/inventory/collectors/aws_networking.py +3 -3
- runbooks/main.py +3389 -782
- runbooks/operate/__init__.py +207 -0
- runbooks/operate/base.py +311 -0
- runbooks/operate/cloudformation_operations.py +619 -0
- runbooks/operate/cloudwatch_operations.py +496 -0
- runbooks/operate/dynamodb_operations.py +812 -0
- runbooks/operate/ec2_operations.py +926 -0
- runbooks/operate/iam_operations.py +569 -0
- runbooks/operate/s3_operations.py +1211 -0
- runbooks/operate/tagging_operations.py +655 -0
- runbooks/remediation/CLAUDE.md +100 -0
- runbooks/remediation/DOME9.md +218 -0
- runbooks/remediation/README.md +26 -0
- runbooks/remediation/Tests/__init__.py +0 -0
- runbooks/remediation/Tests/update_policy.py +74 -0
- runbooks/remediation/__init__.py +95 -0
- runbooks/remediation/acm_cert_expired_unused.py +98 -0
- runbooks/remediation/acm_remediation.py +875 -0
- runbooks/remediation/api_gateway_list.py +167 -0
- runbooks/remediation/base.py +643 -0
- runbooks/remediation/cloudtrail_remediation.py +908 -0
- runbooks/remediation/cloudtrail_s3_modifications.py +296 -0
- runbooks/remediation/cognito_active_users.py +78 -0
- runbooks/remediation/cognito_remediation.py +856 -0
- runbooks/remediation/cognito_user_password_reset.py +163 -0
- runbooks/remediation/commons.py +455 -0
- runbooks/remediation/dynamodb_optimize.py +155 -0
- runbooks/remediation/dynamodb_remediation.py +744 -0
- runbooks/remediation/dynamodb_server_side_encryption.py +108 -0
- runbooks/remediation/ec2_public_ips.py +134 -0
- runbooks/remediation/ec2_remediation.py +892 -0
- runbooks/remediation/ec2_subnet_disable_auto_ip_assignment.py +72 -0
- runbooks/remediation/ec2_unattached_ebs_volumes.py +448 -0
- runbooks/remediation/ec2_unused_security_groups.py +202 -0
- runbooks/remediation/kms_enable_key_rotation.py +651 -0
- runbooks/remediation/kms_remediation.py +717 -0
- runbooks/remediation/lambda_list.py +243 -0
- runbooks/remediation/lambda_remediation.py +971 -0
- runbooks/remediation/multi_account.py +569 -0
- runbooks/remediation/rds_instance_list.py +199 -0
- runbooks/remediation/rds_remediation.py +873 -0
- runbooks/remediation/rds_snapshot_list.py +192 -0
- runbooks/remediation/requirements.txt +118 -0
- runbooks/remediation/s3_block_public_access.py +159 -0
- runbooks/remediation/s3_bucket_public_access.py +143 -0
- runbooks/remediation/s3_disable_static_website_hosting.py +74 -0
- runbooks/remediation/s3_downloader.py +215 -0
- runbooks/remediation/s3_enable_access_logging.py +562 -0
- runbooks/remediation/s3_encryption.py +526 -0
- runbooks/remediation/s3_force_ssl_secure_policy.py +143 -0
- runbooks/remediation/s3_list.py +141 -0
- runbooks/remediation/s3_object_search.py +201 -0
- runbooks/remediation/s3_remediation.py +816 -0
- runbooks/remediation/scan_for_phrase.py +425 -0
- runbooks/remediation/workspaces_list.py +220 -0
- runbooks/security/__init__.py +9 -10
- runbooks/security/security_baseline_tester.py +4 -2
- runbooks-0.7.6.dist-info/METADATA +608 -0
- {runbooks-0.7.0.dist-info → runbooks-0.7.6.dist-info}/RECORD +84 -76
- {runbooks-0.7.0.dist-info → runbooks-0.7.6.dist-info}/entry_points.txt +0 -1
- {runbooks-0.7.0.dist-info → runbooks-0.7.6.dist-info}/top_level.txt +0 -1
- jupyter-agent/.env +0 -2
- jupyter-agent/.env.template +0 -2
- jupyter-agent/.gitattributes +0 -35
- jupyter-agent/.gradio/certificate.pem +0 -31
- jupyter-agent/README.md +0 -16
- jupyter-agent/__main__.log +0 -8
- jupyter-agent/app.py +0 -256
- jupyter-agent/cloudops-agent.png +0 -0
- jupyter-agent/ds-system-prompt.txt +0 -154
- jupyter-agent/jupyter-agent.png +0 -0
- jupyter-agent/llama3_template.jinja +0 -123
- jupyter-agent/requirements.txt +0 -9
- jupyter-agent/tmp/4ojbs8a02ir/jupyter-agent.ipynb +0 -68
- jupyter-agent/tmp/cm5iasgpm3p/jupyter-agent.ipynb +0 -91
- jupyter-agent/tmp/crqbsseag5/jupyter-agent.ipynb +0 -91
- jupyter-agent/tmp/hohanq1u097/jupyter-agent.ipynb +0 -57
- jupyter-agent/tmp/jns1sam29wm/jupyter-agent.ipynb +0 -53
- jupyter-agent/tmp/jupyter-agent.ipynb +0 -27
- jupyter-agent/utils.py +0 -409
- runbooks/aws/__init__.py +0 -58
- runbooks/aws/dynamodb_operations.py +0 -231
- runbooks/aws/ec2_copy_image_cross-region.py +0 -195
- runbooks/aws/ec2_describe_instances.py +0 -202
- runbooks/aws/ec2_ebs_snapshots_delete.py +0 -186
- runbooks/aws/ec2_run_instances.py +0 -213
- runbooks/aws/ec2_start_stop_instances.py +0 -212
- runbooks/aws/ec2_terminate_instances.py +0 -143
- runbooks/aws/ec2_unused_eips.py +0 -196
- runbooks/aws/ec2_unused_volumes.py +0 -188
- runbooks/aws/s3_create_bucket.py +0 -142
- runbooks/aws/s3_list_buckets.py +0 -152
- runbooks/aws/s3_list_objects.py +0 -156
- runbooks/aws/s3_object_operations.py +0 -183
- runbooks/aws/tagging_lambda_handler.py +0 -183
- runbooks/inventory/FAILED_SCRIPTS_TROUBLESHOOTING.md +0 -619
- runbooks/inventory/PASSED_SCRIPTS_GUIDE.md +0 -738
- runbooks/inventory/aws_organization.png +0 -0
- runbooks/inventory/cfn_move_stack_instances.py +0 -1526
- runbooks/inventory/delete_s3_buckets_objects.py +0 -169
- runbooks/inventory/lockdown_cfn_stackset_role.py +0 -224
- runbooks/inventory/update_aws_actions.py +0 -173
- runbooks/inventory/update_cfn_stacksets.py +0 -1215
- runbooks/inventory/update_cloudwatch_logs_retention_policy.py +0 -294
- runbooks/inventory/update_iam_roles_cross_accounts.py +0 -478
- runbooks/inventory/update_s3_public_access_block.py +0 -539
- runbooks/organizations/__init__.py +0 -12
- runbooks/organizations/manager.py +0 -374
- runbooks-0.7.0.dist-info/METADATA +0 -375
- /runbooks/inventory/{tests → Tests}/common_test_data.py +0 -0
- /runbooks/inventory/{tests → Tests}/common_test_functions.py +0 -0
- /runbooks/inventory/{tests → Tests}/script_test_data.py +0 -0
- /runbooks/inventory/{tests → Tests}/setup.py +0 -0
- /runbooks/inventory/{tests → Tests}/src.py +0 -0
- /runbooks/inventory/{tests/test_inventory_modules.py → Tests/test_Inventory_Modules.py} +0 -0
- /runbooks/inventory/{tests → Tests}/test_cfn_describe_stacks.py +0 -0
- /runbooks/inventory/{tests → Tests}/test_ec2_describe_instances.py +0 -0
- /runbooks/inventory/{tests → Tests}/test_lambda_list_functions.py +0 -0
- /runbooks/inventory/{tests → Tests}/test_moto_integration_example.py +0 -0
- /runbooks/inventory/{tests → Tests}/test_org_list_accounts.py +0 -0
- /runbooks/inventory/{Inventory_Modules.py → inventory_modules.py} +0 -0
- /runbooks/{aws → operate}/tags.json +0 -0
- {runbooks-0.7.0.dist-info → runbooks-0.7.6.dist-info}/WHEEL +0 -0
- {runbooks-0.7.0.dist-info → runbooks-0.7.6.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,215 @@
|
|
1
|
+
"""
|
2
|
+
S3 Bulk Downloader - Download files from S3 with concurrent processing and progress tracking.
|
3
|
+
"""
|
4
|
+
|
5
|
+
import concurrent.futures
|
6
|
+
import logging
|
7
|
+
import os
|
8
|
+
from pathlib import Path
|
9
|
+
|
10
|
+
import click
|
11
|
+
from botocore.exceptions import ClientError
|
12
|
+
|
13
|
+
from .commons import display_aws_account_info, get_client
|
14
|
+
|
15
|
+
logger = logging.getLogger(__name__)
|
16
|
+
|
17
|
+
|
18
|
+
def download_file(s3, bucket, key, destination, preserve_structure=False):
|
19
|
+
"""Download a single file from S3 with error handling."""
|
20
|
+
try:
|
21
|
+
# Determine local file path
|
22
|
+
if preserve_structure:
|
23
|
+
# Keep S3 directory structure
|
24
|
+
local_path = os.path.join(destination, key)
|
25
|
+
# Create subdirectories if needed
|
26
|
+
os.makedirs(os.path.dirname(local_path), exist_ok=True)
|
27
|
+
else:
|
28
|
+
# Flatten structure - use just filename
|
29
|
+
filename = key.split("/")[-1]
|
30
|
+
local_path = os.path.join(destination, filename)
|
31
|
+
|
32
|
+
# Check if file already exists
|
33
|
+
if os.path.exists(local_path):
|
34
|
+
file_size = os.path.getsize(local_path)
|
35
|
+
logger.debug(f"File {local_path} already exists ({file_size} bytes). Skipping.")
|
36
|
+
return {"status": "skipped", "key": key, "local_path": local_path, "size": file_size}
|
37
|
+
|
38
|
+
# Download the file
|
39
|
+
s3.download_file(bucket, key, local_path)
|
40
|
+
file_size = os.path.getsize(local_path)
|
41
|
+
logger.info(f"✓ Downloaded: {key} ({file_size} bytes)")
|
42
|
+
|
43
|
+
return {"status": "downloaded", "key": key, "local_path": local_path, "size": file_size}
|
44
|
+
|
45
|
+
except ClientError as e:
|
46
|
+
error_code = e.response.get("Error", {}).get("Code", "Unknown")
|
47
|
+
if error_code == "NoSuchKey":
|
48
|
+
logger.warning(f"⚠ Key not found: {key}")
|
49
|
+
elif error_code == "AccessDenied":
|
50
|
+
logger.error(f"✗ Access denied: {key}")
|
51
|
+
else:
|
52
|
+
logger.error(f"✗ Failed to download {key}: {e}")
|
53
|
+
return {"status": "error", "key": key, "error": str(e)}
|
54
|
+
|
55
|
+
except Exception as e:
|
56
|
+
logger.error(f"✗ Unexpected error downloading {key}: {e}")
|
57
|
+
return {"status": "error", "key": key, "error": str(e)}
|
58
|
+
|
59
|
+
|
60
|
+
def get_object_list(s3, bucket, prefix):
|
61
|
+
"""Get list of objects to download with error handling."""
|
62
|
+
try:
|
63
|
+
objects = []
|
64
|
+
paginator = s3.get_paginator("list_objects_v2")
|
65
|
+
|
66
|
+
# Use prefix instead of directory for better S3 API alignment
|
67
|
+
page_iterator = paginator.paginate(Bucket=bucket, Prefix=prefix)
|
68
|
+
|
69
|
+
for page in page_iterator:
|
70
|
+
if "Contents" in page:
|
71
|
+
for obj in page["Contents"]:
|
72
|
+
# Skip directories (keys ending with /)
|
73
|
+
if not obj["Key"].endswith("/"):
|
74
|
+
objects.append({"key": obj["Key"], "size": obj["Size"], "modified": obj["LastModified"]})
|
75
|
+
|
76
|
+
return objects
|
77
|
+
|
78
|
+
except ClientError as e:
|
79
|
+
error_code = e.response.get("Error", {}).get("Code", "Unknown")
|
80
|
+
if error_code == "NoSuchBucket":
|
81
|
+
logger.error(f"Bucket '{bucket}' does not exist")
|
82
|
+
elif error_code == "AccessDenied":
|
83
|
+
logger.error(f"Access denied to bucket '{bucket}'")
|
84
|
+
else:
|
85
|
+
logger.error(f"Failed to list objects in bucket '{bucket}': {e}")
|
86
|
+
raise
|
87
|
+
|
88
|
+
|
89
|
+
@click.command()
|
90
|
+
@click.option("--bucket", required=True, help="S3 bucket name")
|
91
|
+
@click.option("--prefix", default="", help="S3 prefix/directory path (e.g., 'folder1/subfolder2/')")
|
92
|
+
@click.option("--destination", default="./downloads", help="Local destination directory")
|
93
|
+
@click.option("--threads", default=10, help="Number of concurrent download threads")
|
94
|
+
@click.option("--preserve-structure", is_flag=True, help="Preserve S3 directory structure locally")
|
95
|
+
@click.option("--dry-run", is_flag=True, help="Show what would be downloaded without actually downloading")
|
96
|
+
@click.option("--file-pattern", help="Only download files matching this pattern (e.g., '*.pdf')")
|
97
|
+
def download_files(bucket, prefix, destination, threads, preserve_structure, dry_run, file_pattern):
|
98
|
+
"""Download files from S3 with concurrent processing and progress tracking."""
|
99
|
+
logger.info(f"S3 bulk download from {display_aws_account_info()}")
|
100
|
+
|
101
|
+
try:
|
102
|
+
s3 = get_client("s3")
|
103
|
+
|
104
|
+
# Validate bucket access
|
105
|
+
logger.info(f"Checking access to bucket: {bucket}")
|
106
|
+
try:
|
107
|
+
s3.head_bucket(Bucket=bucket)
|
108
|
+
logger.info("✓ Bucket access confirmed")
|
109
|
+
except ClientError as e:
|
110
|
+
error_code = e.response.get("Error", {}).get("Code", "Unknown")
|
111
|
+
if error_code == "NotFound":
|
112
|
+
logger.error(f"✗ Bucket '{bucket}' not found")
|
113
|
+
elif error_code == "Forbidden":
|
114
|
+
logger.error(f"✗ Access forbidden to bucket '{bucket}'")
|
115
|
+
else:
|
116
|
+
logger.error(f"✗ Cannot access bucket '{bucket}': {e}")
|
117
|
+
return
|
118
|
+
|
119
|
+
# Create destination directory
|
120
|
+
Path(destination).mkdir(parents=True, exist_ok=True)
|
121
|
+
logger.info(f"Destination directory: {os.path.abspath(destination)}")
|
122
|
+
|
123
|
+
# Get list of objects to download
|
124
|
+
logger.info(f"Listing objects with prefix: '{prefix}'")
|
125
|
+
objects = get_object_list(s3, bucket, prefix)
|
126
|
+
|
127
|
+
if not objects:
|
128
|
+
logger.warning(f"No objects found with prefix '{prefix}'")
|
129
|
+
return
|
130
|
+
|
131
|
+
# Filter by file pattern if specified
|
132
|
+
if file_pattern:
|
133
|
+
import fnmatch
|
134
|
+
|
135
|
+
original_count = len(objects)
|
136
|
+
objects = [obj for obj in objects if fnmatch.fnmatch(obj["key"], file_pattern)]
|
137
|
+
logger.info(f"Filtered to {len(objects)} objects matching pattern '{file_pattern}' (from {original_count})")
|
138
|
+
|
139
|
+
if not objects:
|
140
|
+
logger.warning("No objects to download after filtering")
|
141
|
+
return
|
142
|
+
|
143
|
+
# Calculate total size
|
144
|
+
total_size = sum(obj["size"] for obj in objects)
|
145
|
+
logger.info(f"Found {len(objects)} files to download ({total_size:,} bytes total)")
|
146
|
+
|
147
|
+
# Show dry-run information
|
148
|
+
if dry_run:
|
149
|
+
logger.info("DRY-RUN MODE: Files that would be downloaded:")
|
150
|
+
for obj in objects[:10]: # Show first 10
|
151
|
+
logger.info(f" {obj['key']} ({obj['size']:,} bytes)")
|
152
|
+
if len(objects) > 10:
|
153
|
+
logger.info(f" ... and {len(objects) - 10} more files")
|
154
|
+
logger.info(f"Total: {len(objects)} files, {total_size:,} bytes")
|
155
|
+
return
|
156
|
+
|
157
|
+
# Download files concurrently
|
158
|
+
logger.info(f"Starting download with {threads} concurrent threads...")
|
159
|
+
|
160
|
+
downloaded_count = 0
|
161
|
+
skipped_count = 0
|
162
|
+
error_count = 0
|
163
|
+
downloaded_size = 0
|
164
|
+
|
165
|
+
with concurrent.futures.ThreadPoolExecutor(max_workers=threads) as executor:
|
166
|
+
# Submit all download tasks
|
167
|
+
future_to_key = {
|
168
|
+
executor.submit(download_file, s3, bucket, obj["key"], destination, preserve_structure): obj["key"]
|
169
|
+
for obj in objects
|
170
|
+
}
|
171
|
+
|
172
|
+
# Process completed downloads
|
173
|
+
for future in concurrent.futures.as_completed(future_to_key):
|
174
|
+
key = future_to_key[future]
|
175
|
+
try:
|
176
|
+
result = future.result()
|
177
|
+
|
178
|
+
if result["status"] == "downloaded":
|
179
|
+
downloaded_count += 1
|
180
|
+
downloaded_size += result["size"]
|
181
|
+
elif result["status"] == "skipped":
|
182
|
+
skipped_count += 1
|
183
|
+
elif result["status"] == "error":
|
184
|
+
error_count += 1
|
185
|
+
|
186
|
+
# Progress update every 10 files
|
187
|
+
total_processed = downloaded_count + skipped_count + error_count
|
188
|
+
if total_processed % 10 == 0 or total_processed == len(objects):
|
189
|
+
logger.info(f"Progress: {total_processed}/{len(objects)} files processed")
|
190
|
+
|
191
|
+
except Exception as exc:
|
192
|
+
logger.error(f"✗ Error processing {key}: {exc}")
|
193
|
+
error_count += 1
|
194
|
+
|
195
|
+
# Final summary
|
196
|
+
logger.info("\n=== DOWNLOAD SUMMARY ===")
|
197
|
+
logger.info(f"Total files: {len(objects)}")
|
198
|
+
logger.info(f"Downloaded: {downloaded_count} files ({downloaded_size:,} bytes)")
|
199
|
+
logger.info(f"Skipped (existing): {skipped_count}")
|
200
|
+
logger.info(f"Errors: {error_count}")
|
201
|
+
|
202
|
+
if error_count == 0:
|
203
|
+
logger.info("✅ All downloads completed successfully")
|
204
|
+
elif downloaded_count > 0:
|
205
|
+
logger.warning(f"⚠ Download completed with {error_count} errors")
|
206
|
+
else:
|
207
|
+
logger.error("❌ Download failed")
|
208
|
+
|
209
|
+
except Exception as e:
|
210
|
+
logger.error(f"Failed to download files: {e}")
|
211
|
+
raise
|
212
|
+
|
213
|
+
|
214
|
+
if __name__ == "__main__":
|
215
|
+
download_files()
|