runbooks 0.7.0__py3-none-any.whl → 0.7.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- runbooks/__init__.py +87 -37
- runbooks/cfat/README.md +300 -49
- runbooks/cfat/__init__.py +2 -2
- runbooks/finops/__init__.py +1 -1
- runbooks/finops/cli.py +1 -1
- runbooks/inventory/collectors/__init__.py +8 -0
- runbooks/inventory/collectors/aws_management.py +791 -0
- runbooks/inventory/collectors/aws_networking.py +3 -3
- runbooks/main.py +3389 -782
- runbooks/operate/__init__.py +207 -0
- runbooks/operate/base.py +311 -0
- runbooks/operate/cloudformation_operations.py +619 -0
- runbooks/operate/cloudwatch_operations.py +496 -0
- runbooks/operate/dynamodb_operations.py +812 -0
- runbooks/operate/ec2_operations.py +926 -0
- runbooks/operate/iam_operations.py +569 -0
- runbooks/operate/s3_operations.py +1211 -0
- runbooks/operate/tagging_operations.py +655 -0
- runbooks/remediation/CLAUDE.md +100 -0
- runbooks/remediation/DOME9.md +218 -0
- runbooks/remediation/README.md +26 -0
- runbooks/remediation/Tests/__init__.py +0 -0
- runbooks/remediation/Tests/update_policy.py +74 -0
- runbooks/remediation/__init__.py +95 -0
- runbooks/remediation/acm_cert_expired_unused.py +98 -0
- runbooks/remediation/acm_remediation.py +875 -0
- runbooks/remediation/api_gateway_list.py +167 -0
- runbooks/remediation/base.py +643 -0
- runbooks/remediation/cloudtrail_remediation.py +908 -0
- runbooks/remediation/cloudtrail_s3_modifications.py +296 -0
- runbooks/remediation/cognito_active_users.py +78 -0
- runbooks/remediation/cognito_remediation.py +856 -0
- runbooks/remediation/cognito_user_password_reset.py +163 -0
- runbooks/remediation/commons.py +455 -0
- runbooks/remediation/dynamodb_optimize.py +155 -0
- runbooks/remediation/dynamodb_remediation.py +744 -0
- runbooks/remediation/dynamodb_server_side_encryption.py +108 -0
- runbooks/remediation/ec2_public_ips.py +134 -0
- runbooks/remediation/ec2_remediation.py +892 -0
- runbooks/remediation/ec2_subnet_disable_auto_ip_assignment.py +72 -0
- runbooks/remediation/ec2_unattached_ebs_volumes.py +448 -0
- runbooks/remediation/ec2_unused_security_groups.py +202 -0
- runbooks/remediation/kms_enable_key_rotation.py +651 -0
- runbooks/remediation/kms_remediation.py +717 -0
- runbooks/remediation/lambda_list.py +243 -0
- runbooks/remediation/lambda_remediation.py +971 -0
- runbooks/remediation/multi_account.py +569 -0
- runbooks/remediation/rds_instance_list.py +199 -0
- runbooks/remediation/rds_remediation.py +873 -0
- runbooks/remediation/rds_snapshot_list.py +192 -0
- runbooks/remediation/requirements.txt +118 -0
- runbooks/remediation/s3_block_public_access.py +159 -0
- runbooks/remediation/s3_bucket_public_access.py +143 -0
- runbooks/remediation/s3_disable_static_website_hosting.py +74 -0
- runbooks/remediation/s3_downloader.py +215 -0
- runbooks/remediation/s3_enable_access_logging.py +562 -0
- runbooks/remediation/s3_encryption.py +526 -0
- runbooks/remediation/s3_force_ssl_secure_policy.py +143 -0
- runbooks/remediation/s3_list.py +141 -0
- runbooks/remediation/s3_object_search.py +201 -0
- runbooks/remediation/s3_remediation.py +816 -0
- runbooks/remediation/scan_for_phrase.py +425 -0
- runbooks/remediation/workspaces_list.py +220 -0
- runbooks/security/__init__.py +9 -10
- runbooks/security/security_baseline_tester.py +4 -2
- runbooks-0.7.6.dist-info/METADATA +608 -0
- {runbooks-0.7.0.dist-info → runbooks-0.7.6.dist-info}/RECORD +84 -76
- {runbooks-0.7.0.dist-info → runbooks-0.7.6.dist-info}/entry_points.txt +0 -1
- {runbooks-0.7.0.dist-info → runbooks-0.7.6.dist-info}/top_level.txt +0 -1
- jupyter-agent/.env +0 -2
- jupyter-agent/.env.template +0 -2
- jupyter-agent/.gitattributes +0 -35
- jupyter-agent/.gradio/certificate.pem +0 -31
- jupyter-agent/README.md +0 -16
- jupyter-agent/__main__.log +0 -8
- jupyter-agent/app.py +0 -256
- jupyter-agent/cloudops-agent.png +0 -0
- jupyter-agent/ds-system-prompt.txt +0 -154
- jupyter-agent/jupyter-agent.png +0 -0
- jupyter-agent/llama3_template.jinja +0 -123
- jupyter-agent/requirements.txt +0 -9
- jupyter-agent/tmp/4ojbs8a02ir/jupyter-agent.ipynb +0 -68
- jupyter-agent/tmp/cm5iasgpm3p/jupyter-agent.ipynb +0 -91
- jupyter-agent/tmp/crqbsseag5/jupyter-agent.ipynb +0 -91
- jupyter-agent/tmp/hohanq1u097/jupyter-agent.ipynb +0 -57
- jupyter-agent/tmp/jns1sam29wm/jupyter-agent.ipynb +0 -53
- jupyter-agent/tmp/jupyter-agent.ipynb +0 -27
- jupyter-agent/utils.py +0 -409
- runbooks/aws/__init__.py +0 -58
- runbooks/aws/dynamodb_operations.py +0 -231
- runbooks/aws/ec2_copy_image_cross-region.py +0 -195
- runbooks/aws/ec2_describe_instances.py +0 -202
- runbooks/aws/ec2_ebs_snapshots_delete.py +0 -186
- runbooks/aws/ec2_run_instances.py +0 -213
- runbooks/aws/ec2_start_stop_instances.py +0 -212
- runbooks/aws/ec2_terminate_instances.py +0 -143
- runbooks/aws/ec2_unused_eips.py +0 -196
- runbooks/aws/ec2_unused_volumes.py +0 -188
- runbooks/aws/s3_create_bucket.py +0 -142
- runbooks/aws/s3_list_buckets.py +0 -152
- runbooks/aws/s3_list_objects.py +0 -156
- runbooks/aws/s3_object_operations.py +0 -183
- runbooks/aws/tagging_lambda_handler.py +0 -183
- runbooks/inventory/FAILED_SCRIPTS_TROUBLESHOOTING.md +0 -619
- runbooks/inventory/PASSED_SCRIPTS_GUIDE.md +0 -738
- runbooks/inventory/aws_organization.png +0 -0
- runbooks/inventory/cfn_move_stack_instances.py +0 -1526
- runbooks/inventory/delete_s3_buckets_objects.py +0 -169
- runbooks/inventory/lockdown_cfn_stackset_role.py +0 -224
- runbooks/inventory/update_aws_actions.py +0 -173
- runbooks/inventory/update_cfn_stacksets.py +0 -1215
- runbooks/inventory/update_cloudwatch_logs_retention_policy.py +0 -294
- runbooks/inventory/update_iam_roles_cross_accounts.py +0 -478
- runbooks/inventory/update_s3_public_access_block.py +0 -539
- runbooks/organizations/__init__.py +0 -12
- runbooks/organizations/manager.py +0 -374
- runbooks-0.7.0.dist-info/METADATA +0 -375
- /runbooks/inventory/{tests → Tests}/common_test_data.py +0 -0
- /runbooks/inventory/{tests → Tests}/common_test_functions.py +0 -0
- /runbooks/inventory/{tests → Tests}/script_test_data.py +0 -0
- /runbooks/inventory/{tests → Tests}/setup.py +0 -0
- /runbooks/inventory/{tests → Tests}/src.py +0 -0
- /runbooks/inventory/{tests/test_inventory_modules.py → Tests/test_Inventory_Modules.py} +0 -0
- /runbooks/inventory/{tests → Tests}/test_cfn_describe_stacks.py +0 -0
- /runbooks/inventory/{tests → Tests}/test_ec2_describe_instances.py +0 -0
- /runbooks/inventory/{tests → Tests}/test_lambda_list_functions.py +0 -0
- /runbooks/inventory/{tests → Tests}/test_moto_integration_example.py +0 -0
- /runbooks/inventory/{tests → Tests}/test_org_list_accounts.py +0 -0
- /runbooks/inventory/{Inventory_Modules.py → inventory_modules.py} +0 -0
- /runbooks/{aws → operate}/tags.json +0 -0
- {runbooks-0.7.0.dist-info → runbooks-0.7.6.dist-info}/WHEEL +0 -0
- {runbooks-0.7.0.dist-info → runbooks-0.7.6.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,141 @@
|
|
1
|
+
"""
|
2
|
+
S3 Bucket Inventory - List and analyze S3 bucket configurations.
|
3
|
+
"""
|
4
|
+
|
5
|
+
import logging
|
6
|
+
|
7
|
+
import click
|
8
|
+
from botocore.exceptions import ClientError
|
9
|
+
|
10
|
+
from .commons import display_aws_account_info, get_bucket_policy, get_client, write_to_csv
|
11
|
+
|
12
|
+
logger = logging.getLogger(__name__)
|
13
|
+
|
14
|
+
|
15
|
+
@click.command()
|
16
|
+
@click.option("--output-file", default="s3_buckets.csv", help="Output CSV file path")
|
17
|
+
@click.option("--include-versioning", is_flag=True, help="Include versioning status in analysis")
|
18
|
+
@click.option("--include-encryption", is_flag=True, help="Include encryption status in analysis")
|
19
|
+
def list_buckets(output_file, include_versioning, include_encryption):
|
20
|
+
"""List all S3 buckets with policy and configuration analysis."""
|
21
|
+
logger.info(f"Listing S3 buckets in {display_aws_account_info()}")
|
22
|
+
|
23
|
+
try:
|
24
|
+
s3 = get_client("s3")
|
25
|
+
response = s3.list_buckets()
|
26
|
+
buckets = response.get("Buckets", [])
|
27
|
+
|
28
|
+
if not buckets:
|
29
|
+
logger.info("No S3 buckets found")
|
30
|
+
return
|
31
|
+
|
32
|
+
logger.info(f"Found {len(buckets)} S3 buckets to analyze")
|
33
|
+
|
34
|
+
data = []
|
35
|
+
for i, bucket in enumerate(buckets, 1):
|
36
|
+
bucket_name = bucket["Name"]
|
37
|
+
creation_date = bucket.get("CreationDate", "Unknown")
|
38
|
+
|
39
|
+
logger.info(f"Analyzing bucket {i}/{len(buckets)}: {bucket_name}")
|
40
|
+
|
41
|
+
try:
|
42
|
+
# Get bucket region
|
43
|
+
location_response = s3.get_bucket_location(Bucket=bucket_name)
|
44
|
+
bucket_region = location_response.get("LocationConstraint") or "us-east-1"
|
45
|
+
|
46
|
+
# Get bucket policy and public access block
|
47
|
+
policy, public_access_block = get_bucket_policy(bucket_name)
|
48
|
+
|
49
|
+
bucket_data = {
|
50
|
+
"BucketName": bucket_name,
|
51
|
+
"Region": bucket_region,
|
52
|
+
"CreationDate": creation_date.strftime("%Y-%m-%d")
|
53
|
+
if hasattr(creation_date, "strftime")
|
54
|
+
else str(creation_date),
|
55
|
+
"HasPolicy": "Yes" if policy else "No",
|
56
|
+
"PublicAccessBlock": public_access_block,
|
57
|
+
}
|
58
|
+
|
59
|
+
# Add versioning status if requested
|
60
|
+
if include_versioning:
|
61
|
+
try:
|
62
|
+
versioning_response = s3.get_bucket_versioning(Bucket=bucket_name)
|
63
|
+
versioning_status = versioning_response.get("Status", "Disabled")
|
64
|
+
bucket_data["VersioningStatus"] = versioning_status
|
65
|
+
logger.debug(f" Versioning: {versioning_status}")
|
66
|
+
except ClientError as e:
|
67
|
+
bucket_data["VersioningStatus"] = "Error"
|
68
|
+
logger.debug(f" Could not get versioning status: {e}")
|
69
|
+
|
70
|
+
# Add encryption status if requested
|
71
|
+
if include_encryption:
|
72
|
+
try:
|
73
|
+
encryption_response = s3.get_bucket_encryption(Bucket=bucket_name)
|
74
|
+
encryption_rules = encryption_response.get("ServerSideEncryptionConfiguration", {}).get(
|
75
|
+
"Rules", []
|
76
|
+
)
|
77
|
+
if encryption_rules:
|
78
|
+
# Get the first encryption rule
|
79
|
+
sse_algorithm = (
|
80
|
+
encryption_rules[0]
|
81
|
+
.get("ApplyServerSideEncryptionByDefault", {})
|
82
|
+
.get("SSEAlgorithm", "Unknown")
|
83
|
+
)
|
84
|
+
bucket_data["EncryptionStatus"] = f"Enabled ({sse_algorithm})"
|
85
|
+
else:
|
86
|
+
bucket_data["EncryptionStatus"] = "Enabled (Unknown algorithm)"
|
87
|
+
logger.debug(f" Encryption: {bucket_data['EncryptionStatus']}")
|
88
|
+
except ClientError as e:
|
89
|
+
error_code = e.response.get("Error", {}).get("Code", "Unknown")
|
90
|
+
if error_code == "ServerSideEncryptionConfigurationNotFoundError":
|
91
|
+
bucket_data["EncryptionStatus"] = "Disabled"
|
92
|
+
logger.debug(f" Encryption: Disabled")
|
93
|
+
else:
|
94
|
+
bucket_data["EncryptionStatus"] = "Error"
|
95
|
+
logger.debug(f" Could not get encryption status: {e}")
|
96
|
+
|
97
|
+
# Log summary for this bucket
|
98
|
+
policy_status = "with policy" if policy else "no policy"
|
99
|
+
pab_status = public_access_block if public_access_block else "no PAB"
|
100
|
+
logger.info(f" → {bucket_region}, {policy_status}, {pab_status}")
|
101
|
+
|
102
|
+
data.append(bucket_data)
|
103
|
+
|
104
|
+
except ClientError as e:
|
105
|
+
logger.error(f" ✗ Failed to analyze bucket {bucket_name}: {e}")
|
106
|
+
# Add minimal data for failed analysis
|
107
|
+
data.append({"BucketName": bucket_name, "Region": "Error", "Error": str(e)})
|
108
|
+
|
109
|
+
# Export results
|
110
|
+
write_to_csv(data, output_file)
|
111
|
+
logger.info(f"S3 bucket inventory exported to: {output_file}")
|
112
|
+
|
113
|
+
# Summary
|
114
|
+
logger.info("\n=== SUMMARY ===")
|
115
|
+
logger.info(f"Total buckets: {len(buckets)}")
|
116
|
+
|
117
|
+
if data:
|
118
|
+
buckets_with_policy = sum(1 for b in data if b.get("HasPolicy") == "Yes")
|
119
|
+
logger.info(f"Buckets with policies: {buckets_with_policy}")
|
120
|
+
|
121
|
+
# Region distribution
|
122
|
+
regions = {}
|
123
|
+
for bucket in data:
|
124
|
+
region = bucket.get("Region", "Unknown")
|
125
|
+
regions[region] = regions.get(region, 0) + 1
|
126
|
+
|
127
|
+
logger.info("Region distribution:")
|
128
|
+
for region, count in sorted(regions.items()):
|
129
|
+
logger.info(f" {region}: {count} buckets")
|
130
|
+
|
131
|
+
if include_encryption:
|
132
|
+
encrypted_buckets = sum(1 for b in data if b.get("EncryptionStatus", "").startswith("Enabled"))
|
133
|
+
logger.info(f"Encrypted buckets: {encrypted_buckets}")
|
134
|
+
|
135
|
+
if include_versioning:
|
136
|
+
versioned_buckets = sum(1 for b in data if b.get("VersioningStatus") == "Enabled")
|
137
|
+
logger.info(f"Versioned buckets: {versioned_buckets}")
|
138
|
+
|
139
|
+
except Exception as e:
|
140
|
+
logger.error(f"Failed to list S3 buckets: {e}")
|
141
|
+
raise
|
@@ -0,0 +1,201 @@
|
|
1
|
+
"""
|
2
|
+
S3 Object Search - Find objects across all S3 buckets with advanced search capabilities.
|
3
|
+
"""
|
4
|
+
|
5
|
+
import logging
|
6
|
+
|
7
|
+
import click
|
8
|
+
from botocore.exceptions import ClientError
|
9
|
+
|
10
|
+
from .commons import display_aws_account_info, get_client, write_to_csv
|
11
|
+
|
12
|
+
logger = logging.getLogger(__name__)
|
13
|
+
|
14
|
+
|
15
|
+
def search_objects_in_bucket(s3_client, bucket_name, search_term, case_sensitive=False, exact_match=False):
|
16
|
+
"""Search for objects in a specific bucket with various matching options."""
|
17
|
+
try:
|
18
|
+
found_objects = []
|
19
|
+
paginator = s3_client.get_paginator("list_objects_v2")
|
20
|
+
|
21
|
+
logger.debug(f"Searching in bucket: {bucket_name}")
|
22
|
+
|
23
|
+
for page in paginator.paginate(Bucket=bucket_name):
|
24
|
+
objects = page.get("Contents", [])
|
25
|
+
|
26
|
+
for obj in objects:
|
27
|
+
key = obj["Key"]
|
28
|
+
|
29
|
+
# Apply search logic based on options
|
30
|
+
if exact_match:
|
31
|
+
# Exact filename match (not the full key path)
|
32
|
+
filename = key.split("/")[-1]
|
33
|
+
match = (search_term == filename) if case_sensitive else (search_term.lower() == filename.lower())
|
34
|
+
else:
|
35
|
+
# Substring search
|
36
|
+
match = (search_term in key) if case_sensitive else (search_term.lower() in key.lower())
|
37
|
+
|
38
|
+
if match:
|
39
|
+
found_objects.append(
|
40
|
+
{
|
41
|
+
"bucket": bucket_name,
|
42
|
+
"key": key,
|
43
|
+
"size": obj.get("Size", 0),
|
44
|
+
"last_modified": obj.get("LastModified"),
|
45
|
+
"storage_class": obj.get("StorageClass", "STANDARD"),
|
46
|
+
}
|
47
|
+
)
|
48
|
+
|
49
|
+
if found_objects:
|
50
|
+
logger.info(f" → Found {len(found_objects)} matching objects")
|
51
|
+
else:
|
52
|
+
logger.debug(f" → No matching objects found")
|
53
|
+
|
54
|
+
return found_objects
|
55
|
+
|
56
|
+
except ClientError as e:
|
57
|
+
error_code = e.response.get("Error", {}).get("Code", "Unknown")
|
58
|
+
if error_code == "AccessDenied":
|
59
|
+
logger.warning(f" ⚠ Access denied to bucket: {bucket_name}")
|
60
|
+
elif error_code == "NoSuchBucket":
|
61
|
+
logger.warning(f" ⚠ Bucket does not exist: {bucket_name}")
|
62
|
+
else:
|
63
|
+
logger.error(f" ✗ Error accessing bucket {bucket_name}: {e}")
|
64
|
+
return []
|
65
|
+
|
66
|
+
except Exception as e:
|
67
|
+
logger.error(f" ✗ Unexpected error with bucket {bucket_name}: {e}")
|
68
|
+
return []
|
69
|
+
|
70
|
+
|
71
|
+
def get_bucket_list(s3_client, bucket_filter=None):
|
72
|
+
"""Get list of S3 buckets with optional filtering."""
|
73
|
+
try:
|
74
|
+
response = s3_client.list_buckets()
|
75
|
+
buckets = response.get("Buckets", [])
|
76
|
+
|
77
|
+
if bucket_filter:
|
78
|
+
original_count = len(buckets)
|
79
|
+
buckets = [b for b in buckets if bucket_filter.lower() in b["Name"].lower()]
|
80
|
+
logger.info(f"Filtered to {len(buckets)} buckets matching '{bucket_filter}' (from {original_count})")
|
81
|
+
|
82
|
+
return [bucket["Name"] for bucket in buckets]
|
83
|
+
|
84
|
+
except ClientError as e:
|
85
|
+
logger.error(f"Failed to list buckets: {e}")
|
86
|
+
raise
|
87
|
+
|
88
|
+
|
89
|
+
@click.command()
|
90
|
+
@click.option("--search-term", required=True, help="Object name or prefix to search for")
|
91
|
+
@click.option("--bucket-filter", help="Only search buckets containing this string in their name")
|
92
|
+
@click.option("--case-sensitive", is_flag=True, help="Perform case-sensitive search")
|
93
|
+
@click.option("--exact-match", is_flag=True, help="Match exact filename (not substring)")
|
94
|
+
@click.option("--output-file", default="s3_search_results.csv", help="Output CSV file path")
|
95
|
+
@click.option("--max-buckets", type=int, help="Limit search to first N buckets (for testing)")
|
96
|
+
def s3_object_search(search_term, bucket_filter, case_sensitive, exact_match, output_file, max_buckets):
|
97
|
+
"""Search for objects across all S3 buckets with advanced filtering options."""
|
98
|
+
logger.info(f"S3 object search in {display_aws_account_info()}")
|
99
|
+
|
100
|
+
try:
|
101
|
+
s3_client = get_client("s3")
|
102
|
+
|
103
|
+
# Get list of buckets to search
|
104
|
+
logger.info("Getting list of S3 buckets...")
|
105
|
+
bucket_names = get_bucket_list(s3_client, bucket_filter)
|
106
|
+
|
107
|
+
if not bucket_names:
|
108
|
+
logger.warning("No buckets found to search")
|
109
|
+
return
|
110
|
+
|
111
|
+
# Apply bucket limit if specified
|
112
|
+
if max_buckets and max_buckets < len(bucket_names):
|
113
|
+
bucket_names = bucket_names[:max_buckets]
|
114
|
+
logger.info(f"Limited search to first {max_buckets} buckets")
|
115
|
+
|
116
|
+
logger.info(f"Searching {len(bucket_names)} buckets for: '{search_term}'")
|
117
|
+
if case_sensitive:
|
118
|
+
logger.info("Search mode: Case-sensitive")
|
119
|
+
if exact_match:
|
120
|
+
logger.info("Search mode: Exact filename match")
|
121
|
+
|
122
|
+
# Search all buckets
|
123
|
+
all_found_objects = []
|
124
|
+
successful_searches = 0
|
125
|
+
failed_searches = 0
|
126
|
+
|
127
|
+
for i, bucket_name in enumerate(bucket_names, 1):
|
128
|
+
logger.info(f"Searching bucket {i}/{len(bucket_names)}: {bucket_name}")
|
129
|
+
|
130
|
+
found_objects = search_objects_in_bucket(s3_client, bucket_name, search_term, case_sensitive, exact_match)
|
131
|
+
|
132
|
+
if found_objects is not None: # None indicates error, empty list is valid
|
133
|
+
all_found_objects.extend(found_objects)
|
134
|
+
successful_searches += 1
|
135
|
+
else:
|
136
|
+
failed_searches += 1
|
137
|
+
|
138
|
+
# Process results
|
139
|
+
if all_found_objects:
|
140
|
+
logger.info(f"\n🎯 SEARCH RESULTS: Found {len(all_found_objects)} matching objects")
|
141
|
+
|
142
|
+
# Group by bucket for summary
|
143
|
+
bucket_counts = {}
|
144
|
+
total_size = 0
|
145
|
+
|
146
|
+
for obj in all_found_objects:
|
147
|
+
bucket = obj["bucket"]
|
148
|
+
bucket_counts[bucket] = bucket_counts.get(bucket, 0) + 1
|
149
|
+
total_size += obj.get("size", 0)
|
150
|
+
|
151
|
+
logger.info("Objects found per bucket:")
|
152
|
+
for bucket, count in sorted(bucket_counts.items()):
|
153
|
+
logger.info(f" {bucket}: {count} objects")
|
154
|
+
|
155
|
+
logger.info(f"Total size: {total_size:,} bytes")
|
156
|
+
|
157
|
+
# Prepare data for CSV export
|
158
|
+
csv_data = []
|
159
|
+
for obj in all_found_objects:
|
160
|
+
csv_data.append(
|
161
|
+
{
|
162
|
+
"Bucket": obj["bucket"],
|
163
|
+
"ObjectKey": obj["key"],
|
164
|
+
"Filename": obj["key"].split("/")[-1],
|
165
|
+
"Size": obj["size"],
|
166
|
+
"LastModified": obj["last_modified"].strftime("%Y-%m-%d %H:%M:%S")
|
167
|
+
if obj["last_modified"]
|
168
|
+
else "Unknown",
|
169
|
+
"StorageClass": obj["storage_class"],
|
170
|
+
"Directory": "/".join(obj["key"].split("/")[:-1]) if "/" in obj["key"] else "",
|
171
|
+
}
|
172
|
+
)
|
173
|
+
|
174
|
+
# Export results
|
175
|
+
write_to_csv(csv_data, output_file)
|
176
|
+
logger.info(f"Search results exported to: {output_file}")
|
177
|
+
|
178
|
+
# Show sample results
|
179
|
+
logger.info("\nSample results (first 5):")
|
180
|
+
for i, obj in enumerate(all_found_objects[:5]):
|
181
|
+
logger.info(f" {i + 1}. s3://{obj['bucket']}/{obj['key']} ({obj['size']:,} bytes)")
|
182
|
+
|
183
|
+
else:
|
184
|
+
logger.info(f"❌ No objects found matching '{search_term}'")
|
185
|
+
|
186
|
+
# Summary
|
187
|
+
logger.info("\n=== SEARCH SUMMARY ===")
|
188
|
+
logger.info(f"Buckets searched: {successful_searches}")
|
189
|
+
logger.info(f"Search failures: {failed_searches}")
|
190
|
+
logger.info(f"Total objects found: {len(all_found_objects)}")
|
191
|
+
|
192
|
+
if failed_searches > 0:
|
193
|
+
logger.warning(f"⚠ {failed_searches} buckets could not be searched (access denied or errors)")
|
194
|
+
|
195
|
+
except Exception as e:
|
196
|
+
logger.error(f"Failed to search S3 objects: {e}")
|
197
|
+
raise
|
198
|
+
|
199
|
+
|
200
|
+
if __name__ == "__main__":
|
201
|
+
s3_object_search()
|