runbooks 0.7.9__py3-none-any.whl → 0.9.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (122) hide show
  1. runbooks/__init__.py +1 -1
  2. runbooks/cfat/README.md +12 -1
  3. runbooks/cfat/__init__.py +1 -1
  4. runbooks/cfat/assessment/compliance.py +4 -1
  5. runbooks/cfat/assessment/runner.py +42 -34
  6. runbooks/cfat/models.py +1 -1
  7. runbooks/cloudops/__init__.py +123 -0
  8. runbooks/cloudops/base.py +385 -0
  9. runbooks/cloudops/cost_optimizer.py +811 -0
  10. runbooks/cloudops/infrastructure_optimizer.py +29 -0
  11. runbooks/cloudops/interfaces.py +828 -0
  12. runbooks/cloudops/lifecycle_manager.py +29 -0
  13. runbooks/cloudops/mcp_cost_validation.py +678 -0
  14. runbooks/cloudops/models.py +251 -0
  15. runbooks/cloudops/monitoring_automation.py +29 -0
  16. runbooks/cloudops/notebook_framework.py +676 -0
  17. runbooks/cloudops/security_enforcer.py +449 -0
  18. runbooks/common/__init__.py +152 -0
  19. runbooks/common/accuracy_validator.py +1039 -0
  20. runbooks/common/context_logger.py +440 -0
  21. runbooks/common/cross_module_integration.py +594 -0
  22. runbooks/common/enhanced_exception_handler.py +1108 -0
  23. runbooks/common/enterprise_audit_integration.py +634 -0
  24. runbooks/common/mcp_cost_explorer_integration.py +900 -0
  25. runbooks/common/mcp_integration.py +548 -0
  26. runbooks/common/performance_monitor.py +387 -0
  27. runbooks/common/profile_utils.py +216 -0
  28. runbooks/common/rich_utils.py +172 -1
  29. runbooks/feedback/user_feedback_collector.py +440 -0
  30. runbooks/finops/README.md +377 -458
  31. runbooks/finops/__init__.py +4 -21
  32. runbooks/finops/account_resolver.py +279 -0
  33. runbooks/finops/accuracy_cross_validator.py +638 -0
  34. runbooks/finops/aws_client.py +721 -36
  35. runbooks/finops/budget_integration.py +313 -0
  36. runbooks/finops/cli.py +59 -5
  37. runbooks/finops/cost_optimizer.py +1340 -0
  38. runbooks/finops/cost_processor.py +211 -37
  39. runbooks/finops/dashboard_router.py +900 -0
  40. runbooks/finops/dashboard_runner.py +990 -232
  41. runbooks/finops/embedded_mcp_validator.py +288 -0
  42. runbooks/finops/enhanced_dashboard_runner.py +8 -7
  43. runbooks/finops/enhanced_progress.py +327 -0
  44. runbooks/finops/enhanced_trend_visualization.py +423 -0
  45. runbooks/finops/finops_dashboard.py +184 -1829
  46. runbooks/finops/helpers.py +509 -196
  47. runbooks/finops/iam_guidance.py +400 -0
  48. runbooks/finops/markdown_exporter.py +466 -0
  49. runbooks/finops/multi_dashboard.py +1502 -0
  50. runbooks/finops/optimizer.py +15 -15
  51. runbooks/finops/profile_processor.py +2 -2
  52. runbooks/finops/runbooks.inventory.organizations_discovery.log +0 -0
  53. runbooks/finops/runbooks.security.report_generator.log +0 -0
  54. runbooks/finops/runbooks.security.run_script.log +0 -0
  55. runbooks/finops/runbooks.security.security_export.log +0 -0
  56. runbooks/finops/schemas.py +589 -0
  57. runbooks/finops/service_mapping.py +195 -0
  58. runbooks/finops/single_dashboard.py +710 -0
  59. runbooks/finops/tests/test_reference_images_validation.py +1 -1
  60. runbooks/inventory/README.md +12 -1
  61. runbooks/inventory/core/collector.py +157 -29
  62. runbooks/inventory/list_ec2_instances.py +9 -6
  63. runbooks/inventory/list_ssm_parameters.py +10 -10
  64. runbooks/inventory/organizations_discovery.py +210 -164
  65. runbooks/inventory/rich_inventory_display.py +74 -107
  66. runbooks/inventory/run_on_multi_accounts.py +13 -13
  67. runbooks/inventory/runbooks.inventory.organizations_discovery.log +0 -0
  68. runbooks/inventory/runbooks.security.security_export.log +0 -0
  69. runbooks/main.py +1371 -240
  70. runbooks/metrics/dora_metrics_engine.py +711 -17
  71. runbooks/monitoring/performance_monitor.py +433 -0
  72. runbooks/operate/README.md +394 -0
  73. runbooks/operate/base.py +215 -47
  74. runbooks/operate/ec2_operations.py +435 -5
  75. runbooks/operate/iam_operations.py +598 -3
  76. runbooks/operate/privatelink_operations.py +1 -1
  77. runbooks/operate/rds_operations.py +508 -0
  78. runbooks/operate/s3_operations.py +508 -0
  79. runbooks/operate/vpc_endpoints.py +1 -1
  80. runbooks/remediation/README.md +489 -13
  81. runbooks/remediation/base.py +5 -3
  82. runbooks/remediation/commons.py +8 -4
  83. runbooks/security/ENTERPRISE_SECURITY_FRAMEWORK.md +506 -0
  84. runbooks/security/README.md +12 -1
  85. runbooks/security/__init__.py +265 -33
  86. runbooks/security/cloudops_automation_security_validator.py +1164 -0
  87. runbooks/security/compliance_automation.py +12 -10
  88. runbooks/security/compliance_automation_engine.py +1021 -0
  89. runbooks/security/enterprise_security_framework.py +930 -0
  90. runbooks/security/enterprise_security_policies.json +293 -0
  91. runbooks/security/executive_security_dashboard.py +1247 -0
  92. runbooks/security/integration_test_enterprise_security.py +879 -0
  93. runbooks/security/module_security_integrator.py +641 -0
  94. runbooks/security/multi_account_security_controls.py +2254 -0
  95. runbooks/security/real_time_security_monitor.py +1196 -0
  96. runbooks/security/report_generator.py +1 -1
  97. runbooks/security/run_script.py +4 -8
  98. runbooks/security/security_baseline_tester.py +39 -52
  99. runbooks/security/security_export.py +99 -120
  100. runbooks/sre/README.md +472 -0
  101. runbooks/sre/__init__.py +33 -0
  102. runbooks/sre/mcp_reliability_engine.py +1049 -0
  103. runbooks/sre/performance_optimization_engine.py +1032 -0
  104. runbooks/sre/production_monitoring_framework.py +584 -0
  105. runbooks/sre/reliability_monitoring_framework.py +1011 -0
  106. runbooks/validation/__init__.py +2 -2
  107. runbooks/validation/benchmark.py +154 -149
  108. runbooks/validation/cli.py +159 -147
  109. runbooks/validation/mcp_validator.py +291 -248
  110. runbooks/vpc/README.md +478 -0
  111. runbooks/vpc/__init__.py +2 -2
  112. runbooks/vpc/manager_interface.py +366 -351
  113. runbooks/vpc/networking_wrapper.py +68 -36
  114. runbooks/vpc/rich_formatters.py +22 -8
  115. runbooks-0.9.1.dist-info/METADATA +308 -0
  116. {runbooks-0.7.9.dist-info → runbooks-0.9.1.dist-info}/RECORD +120 -59
  117. {runbooks-0.7.9.dist-info → runbooks-0.9.1.dist-info}/entry_points.txt +1 -1
  118. runbooks/finops/cross_validation.py +0 -375
  119. runbooks-0.7.9.dist-info/METADATA +0 -636
  120. {runbooks-0.7.9.dist-info → runbooks-0.9.1.dist-info}/WHEEL +0 -0
  121. {runbooks-0.7.9.dist-info → runbooks-0.9.1.dist-info}/licenses/LICENSE +0 -0
  122. {runbooks-0.7.9.dist-info → runbooks-0.9.1.dist-info}/top_level.txt +0 -0
@@ -59,6 +59,10 @@ class S3Operations(BaseOperation):
59
59
  "set_public_access_block",
60
60
  "get_public_access_block",
61
61
  "sync_objects",
62
+ "find_buckets_without_lifecycle",
63
+ "add_lifecycle_policy_bulk",
64
+ "get_bucket_lifecycle",
65
+ "analyze_lifecycle_compliance",
62
66
  }
63
67
  requires_confirmation = True
64
68
 
@@ -174,6 +178,14 @@ class S3Operations(BaseOperation):
174
178
  return self.get_public_access_block(context, kwargs.get("account_id"))
175
179
  elif operation_type == "sync_objects":
176
180
  return self.sync_objects(context, **kwargs)
181
+ elif operation_type == "find_buckets_without_lifecycle":
182
+ return self.find_buckets_without_lifecycle(context, **kwargs)
183
+ elif operation_type == "add_lifecycle_policy_bulk":
184
+ return self.add_lifecycle_policy_bulk(context, **kwargs)
185
+ elif operation_type == "get_bucket_lifecycle":
186
+ return self.get_bucket_lifecycle(context, **kwargs)
187
+ elif operation_type == "analyze_lifecycle_compliance":
188
+ return self.analyze_lifecycle_compliance(context, **kwargs)
177
189
  else:
178
190
  raise ValueError(f"Unsupported operation: {operation_type}")
179
191
 
@@ -723,6 +735,502 @@ class S3Operations(BaseOperation):
723
735
 
724
736
  return [result]
725
737
 
738
+ def find_buckets_without_lifecycle(
739
+ self,
740
+ context: OperationContext,
741
+ region: Optional[str] = None,
742
+ bucket_names: Optional[List[str]] = None
743
+ ) -> List[OperationResult]:
744
+ """
745
+ Find S3 buckets without lifecycle policies.
746
+
747
+ Enhanced from unSkript notebook: AWS_Add_Lifecycle_Policy_To_S3_Buckets.ipynb
748
+ Identifies buckets that do not have any configured lifecycle rules for
749
+ managing object lifecycle, valuable for optimizing storage costs.
750
+
751
+ Args:
752
+ context: Operation context
753
+ region: AWS region to search (if None, searches all regions)
754
+ bucket_names: Specific bucket names to check (if None, checks all buckets)
755
+
756
+ Returns:
757
+ List of operation results with buckets without lifecycle policies
758
+ """
759
+ result = self.create_operation_result(
760
+ context, "find_buckets_without_lifecycle", "s3:bucket", "lifecycle-audit"
761
+ )
762
+
763
+ try:
764
+ console.print(f"[bold blue]🔍 Scanning for S3 buckets without lifecycle policies...[/bold blue]")
765
+
766
+ buckets_without_policy = []
767
+
768
+ # Get list of regions to search
769
+ search_regions = []
770
+ if region:
771
+ search_regions = [region]
772
+ elif bucket_names and region:
773
+ search_regions = [region]
774
+ else:
775
+ # Get all regions if not specified - use boto3 to get all regions
776
+ try:
777
+ ec2_client = self.get_client("ec2", "us-east-1") # Use us-east-1 to get all regions
778
+ regions_response = self.execute_aws_call(ec2_client, "describe_regions")
779
+ search_regions = [r["RegionName"] for r in regions_response.get("Regions", [])]
780
+ except Exception:
781
+ # Fallback to common regions
782
+ search_regions = ["us-east-1", "us-west-2", "eu-west-1", "ap-southeast-1"]
783
+
784
+ # Process each region
785
+ for reg in search_regions:
786
+ try:
787
+ s3_client = self.get_client("s3", reg)
788
+
789
+ # Get buckets to check
790
+ if bucket_names:
791
+ # Check specific buckets
792
+ buckets_to_check = bucket_names
793
+ else:
794
+ # Get all buckets in region
795
+ response = self.execute_aws_call(s3_client, "list_buckets")
796
+ buckets_to_check = [bucket["Name"] for bucket in response.get("Buckets", [])]
797
+
798
+ # Check each bucket for lifecycle policies
799
+ for bucket_name in buckets_to_check:
800
+ try:
801
+ # Get bucket location to ensure it's in the current region
802
+ try:
803
+ bucket_location = self.execute_aws_call(s3_client, "get_bucket_location", Bucket=bucket_name)
804
+ bucket_region = bucket_location.get("LocationConstraint")
805
+
806
+ # us-east-1 returns None for LocationConstraint
807
+ if bucket_region is None:
808
+ bucket_region = "us-east-1"
809
+
810
+ # Skip if bucket is not in current region (when checking all regions)
811
+ if not bucket_names and bucket_region != reg:
812
+ continue
813
+
814
+ except ClientError as e:
815
+ if e.response["Error"]["Code"] in ["NoSuchBucket", "AccessDenied"]:
816
+ continue
817
+ raise
818
+
819
+ # Check for lifecycle configuration
820
+ try:
821
+ lifecycle_response = self.execute_aws_call(
822
+ s3_client, "get_bucket_lifecycle_configuration", Bucket=bucket_name
823
+ )
824
+
825
+ # If we get here, bucket has lifecycle rules
826
+ rules = lifecycle_response.get("Rules", [])
827
+ if not rules:
828
+ # Empty rules list means no active lifecycle
829
+ buckets_without_policy.append({
830
+ "bucket_name": bucket_name,
831
+ "region": bucket_region,
832
+ "issue": "Empty lifecycle configuration"
833
+ })
834
+
835
+ except ClientError as e:
836
+ if e.response["Error"]["Code"] == "NoSuchLifecycleConfiguration":
837
+ # No lifecycle configuration found
838
+ buckets_without_policy.append({
839
+ "bucket_name": bucket_name,
840
+ "region": bucket_region,
841
+ "issue": "No lifecycle configuration"
842
+ })
843
+ else:
844
+ logger.warning(f"Could not check lifecycle for bucket {bucket_name}: {e}")
845
+
846
+ except Exception as bucket_error:
847
+ logger.warning(f"Error checking bucket {bucket_name}: {bucket_error}")
848
+ continue
849
+
850
+ except Exception as region_error:
851
+ logger.warning(f"Error processing region {reg}: {region_error}")
852
+ continue
853
+
854
+ # Format results with Rich console output
855
+ if buckets_without_policy:
856
+ console.print(f"[bold yellow]⚠️ Found {len(buckets_without_policy)} bucket(s) without lifecycle policies:[/bold yellow]")
857
+
858
+ from rich.table import Table
859
+ table = Table(show_header=True, header_style="bold magenta")
860
+ table.add_column("Bucket Name", style="cyan")
861
+ table.add_column("Region", style="green")
862
+ table.add_column("Issue", style="yellow")
863
+
864
+ for bucket_info in buckets_without_policy:
865
+ table.add_row(
866
+ bucket_info["bucket_name"],
867
+ bucket_info["region"],
868
+ bucket_info["issue"]
869
+ )
870
+
871
+ console.print(table)
872
+
873
+ result.response_data = {
874
+ "buckets_without_lifecycle": buckets_without_policy,
875
+ "total_count": len(buckets_without_policy),
876
+ "regions_scanned": search_regions
877
+ }
878
+ result.mark_completed(OperationStatus.SUCCESS)
879
+ else:
880
+ console.print("[bold green]✅ All buckets have lifecycle policies configured![/bold green]")
881
+ result.response_data = {
882
+ "buckets_without_lifecycle": [],
883
+ "total_count": 0,
884
+ "regions_scanned": search_regions,
885
+ "message": "All buckets have lifecycle policies"
886
+ }
887
+ result.mark_completed(OperationStatus.SUCCESS)
888
+
889
+ except Exception as e:
890
+ error_msg = f"Failed to scan for buckets without lifecycle policies: {e}"
891
+ logger.error(error_msg)
892
+ result.mark_completed(OperationStatus.FAILED, error_msg)
893
+
894
+ return [result]
895
+
896
+ def add_lifecycle_policy_bulk(
897
+ self,
898
+ context: OperationContext,
899
+ bucket_list: List[Dict[str, str]],
900
+ expiration_days: int = 30,
901
+ prefix: str = "",
902
+ noncurrent_days: int = 30,
903
+ transition_ia_days: Optional[int] = None,
904
+ transition_glacier_days: Optional[int] = None,
905
+ ) -> List[OperationResult]:
906
+ """
907
+ Add lifecycle policies to multiple S3 buckets in bulk.
908
+
909
+ Enhanced from unSkript notebook: AWS_Add_Lifecycle_Policy_To_S3_Buckets.ipynb
910
+ Applies optimized lifecycle configuration for cost management.
911
+
912
+ Args:
913
+ context: Operation context
914
+ bucket_list: List of dictionaries with 'bucket_name' and 'region' keys
915
+ expiration_days: Days after which objects expire
916
+ prefix: Object prefix filter for lifecycle rule
917
+ noncurrent_days: Days before noncurrent versions are deleted
918
+ transition_ia_days: Days before transition to IA storage class
919
+ transition_glacier_days: Days before transition to Glacier
920
+
921
+ Returns:
922
+ List of operation results for each bucket processed
923
+ """
924
+ results = []
925
+
926
+ console.print(f"[bold blue]📋 Adding lifecycle policies to {len(bucket_list)} bucket(s)...[/bold blue]")
927
+
928
+ if context.dry_run:
929
+ console.print("[yellow]🧪 DRY-RUN MODE: No actual changes will be made[/yellow]")
930
+
931
+ for i, bucket_info in enumerate(bucket_list, 1):
932
+ bucket_name = bucket_info.get("bucket_name")
933
+ bucket_region = bucket_info.get("region")
934
+
935
+ if not bucket_name or not bucket_region:
936
+ logger.error(f"Invalid bucket info: {bucket_info}")
937
+ continue
938
+
939
+ console.print(f"[cyan]({i}/{len(bucket_list)}) Processing bucket: {bucket_name}[/cyan]")
940
+
941
+ result = self.create_operation_result(
942
+ context, "add_lifecycle_policy", "s3:bucket", bucket_name
943
+ )
944
+
945
+ try:
946
+ s3_client = self.get_client("s3", bucket_region)
947
+
948
+ # Build lifecycle configuration
949
+ lifecycle_rules = []
950
+
951
+ # Main lifecycle rule
952
+ rule = {
953
+ "ID": f"lifecycle-rule-{int(datetime.now().timestamp())}",
954
+ "Status": "Enabled",
955
+ "Filter": {"Prefix": prefix} if prefix else {},
956
+ "Expiration": {"Days": expiration_days},
957
+ }
958
+
959
+ # Add noncurrent version expiration
960
+ if noncurrent_days:
961
+ rule["NoncurrentVersionExpiration"] = {"NoncurrentDays": noncurrent_days}
962
+
963
+ # Add storage class transitions
964
+ transitions = []
965
+ if transition_ia_days and transition_ia_days < expiration_days:
966
+ transitions.append({
967
+ "Days": transition_ia_days,
968
+ "StorageClass": "STANDARD_IA"
969
+ })
970
+
971
+ if transition_glacier_days and transition_glacier_days < expiration_days:
972
+ transitions.append({
973
+ "Days": transition_glacier_days,
974
+ "StorageClass": "GLACIER"
975
+ })
976
+
977
+ if transitions:
978
+ rule["Transitions"] = transitions
979
+
980
+ lifecycle_rules.append(rule)
981
+
982
+ lifecycle_config = {"Rules": lifecycle_rules}
983
+
984
+ if context.dry_run:
985
+ console.print(f"[yellow] [DRY-RUN] Would apply lifecycle policy to {bucket_name}[/yellow]")
986
+ result.response_data = {
987
+ "bucket_name": bucket_name,
988
+ "region": bucket_region,
989
+ "lifecycle_config": lifecycle_config,
990
+ "dry_run": True
991
+ }
992
+ result.mark_completed(OperationStatus.DRY_RUN)
993
+ else:
994
+ # Apply lifecycle configuration
995
+ response = self.execute_aws_call(
996
+ s3_client,
997
+ "put_bucket_lifecycle_configuration",
998
+ Bucket=bucket_name,
999
+ LifecycleConfiguration=lifecycle_config
1000
+ )
1001
+
1002
+ console.print(f"[green] ✅ Successfully applied lifecycle policy to {bucket_name}[/green]")
1003
+
1004
+ result.response_data = {
1005
+ "bucket_name": bucket_name,
1006
+ "region": bucket_region,
1007
+ "lifecycle_config": lifecycle_config,
1008
+ "aws_response": response
1009
+ }
1010
+ result.mark_completed(OperationStatus.SUCCESS)
1011
+
1012
+ except ClientError as e:
1013
+ error_msg = f"Failed to set lifecycle policy on {bucket_name}: {e}"
1014
+ console.print(f"[red] ❌ {error_msg}[/red]")
1015
+ logger.error(error_msg)
1016
+ result.mark_completed(OperationStatus.FAILED, error_msg)
1017
+ except Exception as e:
1018
+ error_msg = f"Unexpected error for bucket {bucket_name}: {e}"
1019
+ console.print(f"[red] ❌ {error_msg}[/red]")
1020
+ logger.error(error_msg)
1021
+ result.mark_completed(OperationStatus.FAILED, error_msg)
1022
+
1023
+ results.append(result)
1024
+
1025
+ # Summary report
1026
+ successful = len([r for r in results if r.success])
1027
+ failed = len(results) - successful
1028
+
1029
+ if context.dry_run:
1030
+ console.print(f"[yellow]🧪 DRY-RUN SUMMARY: Would process {len(results)} bucket(s)[/yellow]")
1031
+ else:
1032
+ console.print(f"[bold blue]📊 BULK OPERATION SUMMARY:[/bold blue]")
1033
+ console.print(f"[green] ✅ Successful: {successful}[/green]")
1034
+ if failed > 0:
1035
+ console.print(f"[red] ❌ Failed: {failed}[/red]")
1036
+
1037
+ return results
1038
+
1039
+ def get_bucket_lifecycle(
1040
+ self, context: OperationContext, bucket_name: str
1041
+ ) -> List[OperationResult]:
1042
+ """
1043
+ Get current lifecycle configuration for an S3 bucket.
1044
+
1045
+ Args:
1046
+ context: Operation context
1047
+ bucket_name: Name of bucket to check
1048
+
1049
+ Returns:
1050
+ List of operation results with current lifecycle configuration
1051
+ """
1052
+ s3_client = self.get_client("s3")
1053
+
1054
+ result = self.create_operation_result(
1055
+ context, "get_bucket_lifecycle", "s3:bucket", bucket_name
1056
+ )
1057
+
1058
+ try:
1059
+ console.print(f"[blue]🔍 Checking lifecycle configuration for bucket: {bucket_name}[/blue]")
1060
+
1061
+ try:
1062
+ response = self.execute_aws_call(
1063
+ s3_client, "get_bucket_lifecycle_configuration", Bucket=bucket_name
1064
+ )
1065
+
1066
+ rules = response.get("Rules", [])
1067
+ console.print(f"[green]✅ Found {len(rules)} lifecycle rule(s) for bucket {bucket_name}[/green]")
1068
+
1069
+ # Display rules in a formatted table
1070
+ if rules:
1071
+ from rich.table import Table
1072
+ table = Table(show_header=True, header_style="bold magenta")
1073
+ table.add_column("Rule ID", style="cyan")
1074
+ table.add_column("Status", style="green")
1075
+ table.add_column("Prefix", style="yellow")
1076
+ table.add_column("Expiration", style="red")
1077
+
1078
+ for rule in rules:
1079
+ rule_id = rule.get("ID", "N/A")
1080
+ status = rule.get("Status", "N/A")
1081
+
1082
+ # Handle different filter formats
1083
+ filter_info = rule.get("Filter", {})
1084
+ if isinstance(filter_info, dict):
1085
+ prefix = filter_info.get("Prefix", "")
1086
+ else:
1087
+ prefix = ""
1088
+
1089
+ expiration = rule.get("Expiration", {})
1090
+ exp_days = expiration.get("Days", "N/A")
1091
+
1092
+ table.add_row(rule_id, status, prefix or "All objects", str(exp_days))
1093
+
1094
+ console.print(table)
1095
+
1096
+ result.response_data = {
1097
+ "bucket_name": bucket_name,
1098
+ "lifecycle_rules": rules,
1099
+ "rules_count": len(rules)
1100
+ }
1101
+ result.mark_completed(OperationStatus.SUCCESS)
1102
+
1103
+ except ClientError as e:
1104
+ if e.response["Error"]["Code"] == "NoSuchLifecycleConfiguration":
1105
+ console.print(f"[yellow]⚠️ No lifecycle configuration found for bucket {bucket_name}[/yellow]")
1106
+ result.response_data = {
1107
+ "bucket_name": bucket_name,
1108
+ "lifecycle_rules": [],
1109
+ "rules_count": 0,
1110
+ "message": "No lifecycle configuration"
1111
+ }
1112
+ result.mark_completed(OperationStatus.SUCCESS)
1113
+ else:
1114
+ raise e
1115
+
1116
+ except ClientError as e:
1117
+ error_msg = f"Failed to get lifecycle configuration for {bucket_name}: {e}"
1118
+ logger.error(error_msg)
1119
+ result.mark_completed(OperationStatus.FAILED, error_msg)
1120
+
1121
+ return [result]
1122
+
1123
+ def analyze_lifecycle_compliance(
1124
+ self, context: OperationContext, region: Optional[str] = None
1125
+ ) -> List[OperationResult]:
1126
+ """
1127
+ Analyze lifecycle compliance across S3 buckets and provide cost optimization recommendations.
1128
+
1129
+ Args:
1130
+ context: Operation context
1131
+ region: AWS region to analyze (if None, analyzes all regions)
1132
+
1133
+ Returns:
1134
+ List of operation results with compliance analysis and recommendations
1135
+ """
1136
+ result = self.create_operation_result(
1137
+ context, "analyze_lifecycle_compliance", "s3:account", "compliance-analysis"
1138
+ )
1139
+
1140
+ try:
1141
+ console.print("[bold blue]📊 Analyzing S3 lifecycle compliance and cost optimization opportunities...[/bold blue]")
1142
+
1143
+ # Find buckets without lifecycle policies
1144
+ find_results = self.find_buckets_without_lifecycle(context, region=region)
1145
+
1146
+ if not find_results or not find_results[0].success:
1147
+ result.mark_completed(OperationStatus.FAILED, "Failed to analyze buckets")
1148
+ return [result]
1149
+
1150
+ buckets_data = find_results[0].response_data
1151
+ non_compliant_buckets = buckets_data.get("buckets_without_lifecycle", [])
1152
+ total_buckets_scanned = buckets_data.get("total_count", 0)
1153
+
1154
+ # Calculate compliance metrics
1155
+ s3_client = self.get_client("s3")
1156
+
1157
+ # Get total bucket count for compliance percentage
1158
+ list_response = self.execute_aws_call(s3_client, "list_buckets")
1159
+ total_buckets = len(list_response.get("Buckets", []))
1160
+
1161
+ compliance_percentage = ((total_buckets - len(non_compliant_buckets)) / total_buckets * 100) if total_buckets > 0 else 100
1162
+
1163
+ # Generate recommendations
1164
+ recommendations = []
1165
+ potential_savings = 0
1166
+
1167
+ if non_compliant_buckets:
1168
+ recommendations.extend([
1169
+ "🎯 Implement lifecycle policies to automatically transition objects to cheaper storage classes",
1170
+ "💰 Configure automatic deletion of old object versions to reduce storage costs",
1171
+ "📈 Set up transitions: Standard → IA (30 days) → Glacier (90 days) → Deep Archive (365 days)",
1172
+ "🔧 Use prefixes to apply different policies to different object types",
1173
+ "📊 Monitor lifecycle rule effectiveness with CloudWatch metrics"
1174
+ ])
1175
+
1176
+ # Estimate potential savings (rough calculation)
1177
+ estimated_savings_per_bucket = 25 # Estimated 25% savings per bucket
1178
+ potential_savings = len(non_compliant_buckets) * estimated_savings_per_bucket
1179
+
1180
+ # Create summary report
1181
+ from rich.panel import Panel
1182
+ from rich.table import Table
1183
+
1184
+ # Compliance summary table
1185
+ summary_table = Table(show_header=True, header_style="bold magenta")
1186
+ summary_table.add_column("Metric", style="cyan")
1187
+ summary_table.add_column("Value", style="green")
1188
+
1189
+ summary_table.add_row("Total Buckets", str(total_buckets))
1190
+ summary_table.add_row("Compliant Buckets", str(total_buckets - len(non_compliant_buckets)))
1191
+ summary_table.add_row("Non-Compliant Buckets", str(len(non_compliant_buckets)))
1192
+ summary_table.add_row("Compliance Percentage", f"{compliance_percentage:.1f}%")
1193
+ summary_table.add_row("Potential Cost Savings", f"~{potential_savings}%")
1194
+
1195
+ console.print(Panel(summary_table, title="S3 Lifecycle Compliance Report", border_style="blue"))
1196
+
1197
+ # Display recommendations if any
1198
+ if recommendations:
1199
+ console.print("\n[bold yellow]💡 Cost Optimization Recommendations:[/bold yellow]")
1200
+ for i, rec in enumerate(recommendations, 1):
1201
+ console.print(f" {i}. {rec}")
1202
+
1203
+ # Compliance status color coding
1204
+ if compliance_percentage >= 90:
1205
+ compliance_status = "[bold green]EXCELLENT[/bold green]"
1206
+ elif compliance_percentage >= 75:
1207
+ compliance_status = "[bold yellow]GOOD[/bold yellow]"
1208
+ elif compliance_percentage >= 50:
1209
+ compliance_status = "[bold orange]NEEDS IMPROVEMENT[/bold orange]"
1210
+ else:
1211
+ compliance_status = "[bold red]POOR[/bold red]"
1212
+
1213
+ console.print(f"\n[bold blue]Overall Compliance Status: {compliance_status}[/bold blue]")
1214
+
1215
+ result.response_data = {
1216
+ "compliance_percentage": compliance_percentage,
1217
+ "total_buckets": total_buckets,
1218
+ "compliant_buckets": total_buckets - len(non_compliant_buckets),
1219
+ "non_compliant_buckets": len(non_compliant_buckets),
1220
+ "non_compliant_details": non_compliant_buckets,
1221
+ "recommendations": recommendations,
1222
+ "potential_savings_percentage": potential_savings,
1223
+ "compliance_status": compliance_status.replace("[bold ", "").replace("[/bold ", "").replace("]", "")
1224
+ }
1225
+ result.mark_completed(OperationStatus.SUCCESS)
1226
+
1227
+ except Exception as e:
1228
+ error_msg = f"Failed to analyze lifecycle compliance: {e}"
1229
+ logger.error(error_msg)
1230
+ result.mark_completed(OperationStatus.FAILED, error_msg)
1231
+
1232
+ return [result]
1233
+
726
1234
  def delete_bucket_and_objects(self, context: OperationContext, bucket_name: str) -> List[OperationResult]:
727
1235
  """
728
1236
  Delete S3 bucket and all its objects/versions (complete cleanup).
@@ -29,7 +29,6 @@ from typing import Any, Dict, List, Optional, Tuple
29
29
  import boto3
30
30
  from botocore.exceptions import BotoCoreError, ClientError
31
31
 
32
- from runbooks.operate.base import BaseOperation, OperationResult
33
32
  from runbooks.common.rich_utils import (
34
33
  console,
35
34
  create_panel,
@@ -39,6 +38,7 @@ from runbooks.common.rich_utils import (
39
38
  print_status,
40
39
  print_success,
41
40
  )
41
+ from runbooks.operate.base import BaseOperation, OperationResult
42
42
 
43
43
  logger = logging.getLogger(__name__)
44
44