runbooks 1.1.1__py3-none-any.whl → 1.1.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (32) hide show
  1. runbooks/__init__.py +1 -1
  2. runbooks/cfat/assessment/collectors.py +3 -2
  3. runbooks/cloudops/cost_optimizer.py +77 -61
  4. runbooks/cloudops/models.py +8 -2
  5. runbooks/common/aws_pricing.py +12 -0
  6. runbooks/common/profile_utils.py +213 -310
  7. runbooks/common/rich_utils.py +10 -16
  8. runbooks/finops/__init__.py +13 -5
  9. runbooks/finops/business_case_config.py +5 -5
  10. runbooks/finops/cli.py +24 -15
  11. runbooks/finops/cost_optimizer.py +2 -1
  12. runbooks/finops/cost_processor.py +69 -22
  13. runbooks/finops/dashboard_router.py +3 -3
  14. runbooks/finops/dashboard_runner.py +3 -4
  15. runbooks/finops/enhanced_progress.py +213 -0
  16. runbooks/finops/markdown_exporter.py +4 -2
  17. runbooks/finops/multi_dashboard.py +1 -1
  18. runbooks/finops/nat_gateway_optimizer.py +85 -57
  19. runbooks/finops/scenario_cli_integration.py +212 -22
  20. runbooks/finops/scenarios.py +41 -25
  21. runbooks/finops/single_dashboard.py +68 -9
  22. runbooks/finops/tests/run_tests.py +5 -3
  23. runbooks/finops/workspaces_analyzer.py +10 -4
  24. runbooks/main.py +86 -25
  25. runbooks/operate/executive_dashboard.py +4 -3
  26. runbooks/remediation/rds_snapshot_list.py +13 -0
  27. {runbooks-1.1.1.dist-info → runbooks-1.1.2.dist-info}/METADATA +234 -40
  28. {runbooks-1.1.1.dist-info → runbooks-1.1.2.dist-info}/RECORD +32 -32
  29. {runbooks-1.1.1.dist-info → runbooks-1.1.2.dist-info}/WHEEL +0 -0
  30. {runbooks-1.1.1.dist-info → runbooks-1.1.2.dist-info}/entry_points.txt +0 -0
  31. {runbooks-1.1.1.dist-info → runbooks-1.1.2.dist-info}/licenses/LICENSE +0 -0
  32. {runbooks-1.1.1.dist-info → runbooks-1.1.2.dist-info}/top_level.txt +0 -0
runbooks/__init__.py CHANGED
@@ -61,7 +61,7 @@ s3_ops = S3Operations()
61
61
 
62
62
  # Centralized Version Management - Single Source of Truth
63
63
  # All modules MUST import __version__ from this location
64
- __version__ = "1.1.1"
64
+ __version__ = "1.1.2"
65
65
 
66
66
  # Fallback for legacy importlib.metadata usage during transition
67
67
  try:
@@ -20,6 +20,7 @@ from typing import Any, Dict, List, Optional
20
20
 
21
21
  from loguru import logger
22
22
 
23
+ from runbooks import __version__
23
24
  from runbooks.base import CloudFoundationsBase
24
25
 
25
26
 
@@ -147,7 +148,7 @@ class VPCCollector(BaseCollector):
147
148
  "internet_gateways": internet_gateways,
148
149
  "route_tables": route_tables,
149
150
  "assessment_metadata": {
150
- "collector_version": "v0.7.8-vpc-enhanced",
151
+ "collector_version": f"v{__version__}-vpc-enhanced",
151
152
  "github_issue": "#96",
152
153
  "cost_optimization_enabled": True,
153
154
  },
@@ -165,7 +166,7 @@ class VPCCollector(BaseCollector):
165
166
  "flow_logs": [],
166
167
  "internet_gateways": [],
167
168
  "route_tables": [],
168
- "assessment_metadata": {"collector_version": "v0.7.8-vpc-enhanced", "error": str(e)},
169
+ "assessment_metadata": {"collector_version": f"v{__version__}-vpc-enhanced", "error": str(e)},
169
170
  }
170
171
 
171
172
  def _analyze_nat_optimization(self, nat_gateways: List[Dict], subnets: List[Dict]) -> int:
@@ -1201,19 +1201,21 @@ class CostOptimizer(CloudOpsBase):
1201
1201
  )
1202
1202
 
1203
1203
  async def optimize_workspaces(
1204
- self,
1204
+ self,
1205
1205
  usage_threshold_days: int = 180,
1206
+ analysis_days: int = 30,
1206
1207
  dry_run: bool = True
1207
1208
  ) -> CostOptimizationResult:
1208
1209
  """
1209
1210
  Business Scenario: Cleanup unused WorkSpaces with zero usage in last 6 months
1210
1211
  JIRA Reference: FinOps-24
1211
1212
  Expected Savings: USD $12,518 annually
1212
-
1213
+
1213
1214
  Args:
1214
- usage_threshold_days: Days of zero usage to consider for deletion
1215
+ usage_threshold_days: Days of zero usage to consider for deletion (default: 180)
1216
+ analysis_days: Period for usage analysis in days, configurable 30/60 (default: 30)
1215
1217
  dry_run: If True, only analyze without deletion
1216
-
1218
+
1217
1219
  Returns:
1218
1220
  CostOptimizationResult with WorkSpaces cleanup analysis
1219
1221
  """
@@ -1222,20 +1224,21 @@ class CostOptimizer(CloudOpsBase):
1222
1224
 
1223
1225
  # Import existing workspaces analyzer
1224
1226
  try:
1225
- from runbooks.finops.workspaces_analyzer import WorkSpacesAnalyzer
1226
- except ImportError:
1227
- print_error("WorkSpaces analyzer not available - implementing basic analysis")
1227
+ from runbooks.finops.workspaces_analyzer import WorkSpacesCostAnalyzer, analyze_workspaces
1228
+ except ImportError as e:
1229
+ print_error(f"WorkSpaces analyzer not available: {e}")
1230
+ print_warning("This is likely due to missing dependencies or import issues")
1228
1231
  return CostOptimizationResult(
1229
1232
  scenario=BusinessScenario.COST_OPTIMIZATION,
1230
1233
  scenario_name=operation_name,
1231
1234
  execution_timestamp=datetime.now(),
1232
1235
  execution_mode=self.execution_mode,
1233
1236
  success=False,
1234
- error_message="WorkSpaces analyzer module not found",
1237
+ error_message=f"WorkSpaces analyzer import failed: {e}",
1235
1238
  # Add required fields to prevent Pydantic validation errors
1236
1239
  execution_time=0.0,
1237
1240
  resources_analyzed=0,
1238
- resources_impacted=[], # Must be a list, not an integer
1241
+ resources_impacted=[],
1239
1242
  business_metrics={
1240
1243
  "total_monthly_savings": 0.0,
1241
1244
  "overall_risk_level": "low"
@@ -1244,42 +1247,38 @@ class CostOptimizer(CloudOpsBase):
1244
1247
  aws_profile_used=self.profile or "default",
1245
1248
  current_monthly_spend=0.0,
1246
1249
  optimized_monthly_spend=0.0,
1247
- savings_percentage=0.0
1248
- )
1249
-
1250
- with create_progress_bar() as progress:
1251
- task = progress.add_task("Analyzing WorkSpaces usage...", total=100)
1252
-
1253
- # Step 1: Initialize WorkSpaces analyzer
1254
- workspaces_analyzer = WorkSpacesAnalyzer(
1255
- session=self.session,
1256
- region=self.region
1257
- )
1258
- progress.update(task, advance=25)
1259
-
1260
- # Step 2: Analyze unused WorkSpaces
1261
- unused_workspaces = await workspaces_analyzer.find_unused_workspaces(
1262
- usage_threshold_days=usage_threshold_days
1250
+ savings_percentage=0.0,
1251
+ annual_savings=0.0
1263
1252
  )
1264
- progress.update(task, advance=50)
1265
-
1266
- # Step 3: Calculate cost savings
1267
- estimated_savings = len(unused_workspaces) * 45 # ~$45/month per WorkSpace
1268
- progress.update(task, advance=75)
1269
-
1270
- # Step 4: Execute cleanup if not dry_run
1271
- if not dry_run and unused_workspaces:
1272
- await self._execute_workspaces_cleanup(unused_workspaces)
1273
- progress.update(task, advance=100)
1274
1253
 
1275
- # Display results
1276
- results_table = create_table("WorkSpaces Optimization Results")
1277
- results_table.add_row("Unused WorkSpaces Found", str(len(unused_workspaces)))
1278
- results_table.add_row("Monthly Savings", format_cost(estimated_savings))
1279
- results_table.add_row("Annual Savings", format_cost(estimated_savings * 12))
1280
- results_table.add_row("Execution Mode", "Analysis Only" if dry_run else "Cleanup Executed")
1281
- console.print(results_table)
1254
+ # Execute WorkSpaces analysis using proven finops function
1255
+ analysis_results = analyze_workspaces(
1256
+ profile=self.profile,
1257
+ unused_days=usage_threshold_days,
1258
+ analysis_days=analysis_days,
1259
+ output_format="json",
1260
+ dry_run=dry_run
1261
+ )
1262
+
1263
+ # Extract analysis results
1264
+ if analysis_results.get("status") == "success":
1265
+ summary = analysis_results.get("summary", {})
1266
+ estimated_monthly_savings = summary.get("unused_monthly_cost", 0.0)
1267
+ estimated_annual_savings = summary.get("potential_annual_savings", 0.0)
1268
+ unused_workspaces_count = summary.get("unused_workspaces", 0)
1269
+ total_workspaces = summary.get("total_workspaces", 0)
1270
+ else:
1271
+ print_error(f"WorkSpaces analysis failed: {analysis_results.get('error', 'Unknown error')}")
1272
+ estimated_monthly_savings = 0.0
1273
+ estimated_annual_savings = 0.0
1274
+ unused_workspaces_count = 0
1275
+ total_workspaces = 0
1282
1276
 
1277
+ # Calculate savings percentage if we have baseline cost data
1278
+ savings_percentage = 0.0
1279
+ if summary.get("total_monthly_cost", 0) > 0:
1280
+ savings_percentage = (estimated_monthly_savings / summary.get("total_monthly_cost", 1)) * 100
1281
+
1283
1282
  return CostOptimizationResult(
1284
1283
  scenario=BusinessScenario.COST_OPTIMIZATION,
1285
1284
  scenario_name=operation_name,
@@ -1287,30 +1286,44 @@ class CostOptimizer(CloudOpsBase):
1287
1286
  execution_mode=self.execution_mode,
1288
1287
  execution_time=15.0,
1289
1288
  success=True,
1290
- total_monthly_savings=estimated_savings,
1291
- annual_savings=estimated_savings * 12,
1292
- savings_percentage=0.0, # Would need baseline cost to calculate
1293
- affected_resources=len(unused_workspaces),
1289
+ # Core cost metrics using correct variable names
1290
+ current_monthly_spend=summary.get("total_monthly_cost", 0.0),
1291
+ optimized_monthly_spend=summary.get("total_monthly_cost", 0.0) - estimated_monthly_savings,
1292
+ total_monthly_savings=estimated_monthly_savings,
1293
+ annual_savings=estimated_annual_savings,
1294
+ savings_percentage=savings_percentage,
1295
+ # Resource metrics
1296
+ affected_resources=unused_workspaces_count,
1297
+ resources_analyzed=total_workspaces,
1298
+ resources_impacted=[], # Must be a list
1294
1299
  resource_impacts=[
1295
1300
  ResourceImpact(
1296
- resource_id=f"workspaces-cleanup-{len(unused_workspaces)}",
1301
+ resource_id=f"workspaces-optimization-{unused_workspaces_count}",
1297
1302
  resource_type="AWS::WorkSpaces::Workspace",
1298
- action="terminate",
1299
- monthly_savings=estimated_savings,
1300
- risk_level=RiskLevel.LOW
1303
+ resource_name=f"{unused_workspaces_count} unused WorkSpaces",
1304
+ region=self.session.region_name or "us-east-1",
1305
+ account_id=self.account_id,
1306
+ estimated_monthly_cost=summary.get("unused_monthly_cost", 0.0),
1307
+ projected_savings=estimated_monthly_savings,
1308
+ risk_level=RiskLevel.LOW,
1309
+ business_criticality="low",
1310
+ modification_required=not dry_run
1301
1311
  )
1302
1312
  ],
1303
- # Add missing required fields
1304
- resources_analyzed=len(unused_workspaces),
1305
- resources_impacted=[], # Must be a list
1313
+ # Business metrics for executive reporting
1306
1314
  business_metrics={
1307
- "total_monthly_savings": estimated_savings,
1308
- "overall_risk_level": "low"
1315
+ "total_monthly_savings": estimated_monthly_savings,
1316
+ "overall_risk_level": "low",
1317
+ "unused_workspaces_count": unused_workspaces_count,
1318
+ "total_workspaces_analyzed": total_workspaces
1309
1319
  },
1310
- recommendations=[],
1311
- aws_profile_used=self.profile or "default",
1312
- current_monthly_spend=0.0,
1313
- optimized_monthly_spend=0.0
1320
+ recommendations=[
1321
+ f"Terminate {unused_workspaces_count} unused WorkSpaces to save ${estimated_monthly_savings:.2f}/month",
1322
+ f"Estimated annual savings: ${estimated_annual_savings:.2f}",
1323
+ "Verify WorkSpaces are truly unused before termination",
1324
+ "Consider implementing usage monitoring for remaining WorkSpaces"
1325
+ ],
1326
+ aws_profile_used=self.profile or "default"
1314
1327
  )
1315
1328
 
1316
1329
  async def optimize_rds_snapshots(
@@ -1399,8 +1412,11 @@ class CostOptimizer(CloudOpsBase):
1399
1412
  ResourceImpact(
1400
1413
  resource_id=f"rds-snapshots-cleanup-{len(old_snapshots)}",
1401
1414
  resource_type="AWS::RDS::DBSnapshot",
1402
- action="delete",
1403
- monthly_savings=estimated_monthly_savings,
1415
+ resource_name=f"RDS Manual Snapshots Cleanup ({len(old_snapshots)} snapshots)",
1416
+ region=self.region,
1417
+ account_id=self.account_id,
1418
+ estimated_monthly_cost=estimated_monthly_savings,
1419
+ projected_savings=estimated_monthly_savings,
1404
1420
  risk_level=RiskLevel.MEDIUM
1405
1421
  )
1406
1422
  ],
@@ -164,16 +164,22 @@ class CloudOpsExecutionResult(BaseModel):
164
164
 
165
165
  class CostOptimizationResult(CloudOpsExecutionResult):
166
166
  """Specialized result for cost optimization scenarios."""
167
-
167
+
168
168
  # Cost-Specific Metrics
169
169
  current_monthly_spend: float = Field(description="Current monthly spend for analyzed resources")
170
170
  optimized_monthly_spend: float = Field(description="Projected monthly spend after optimization")
171
171
  savings_percentage: float = Field(ge=0, le=100, description="Savings percentage")
172
-
172
+ annual_savings: float = Field(description="Annual savings projection for business scenarios", default=0.0)
173
+ total_monthly_savings: float = Field(description="Total projected monthly savings", default=0.0)
174
+
173
175
  # Resource Categories
174
176
  idle_resources: List[ResourceImpact] = Field(description="Identified idle resources", default=[])
175
177
  oversized_resources: List[ResourceImpact] = Field(description="Identified oversized resources", default=[])
176
178
  unattached_resources: List[ResourceImpact] = Field(description="Identified unattached resources", default=[])
179
+
180
+ # Additional fields used by cost_optimizer.py
181
+ affected_resources: int = Field(description="Number of resources affected by optimization", default=0)
182
+ resource_impacts: List[ResourceImpact] = Field(description="Detailed resource impact analysis", default=[])
177
183
 
178
184
  @field_validator('optimized_monthly_spend')
179
185
  @classmethod
@@ -771,6 +771,18 @@ class DynamicAWSPricing:
771
771
  ]
772
772
  }
773
773
 
774
+ # Handle data_transfer service with graceful fallback
775
+ if service_key == "data_transfer":
776
+ print_warning("data_transfer service not supported by AWS Pricing API - using standard rates")
777
+ # Return standard AWS data transfer pricing structure
778
+ return AWSPricingResult(
779
+ service_key="data_transfer",
780
+ region=region,
781
+ monthly_cost=0.045, # $0.045/GB for NAT Gateway data processing
782
+ pricing_source="aws_standard_rates",
783
+ last_updated=datetime.now()
784
+ )
785
+
774
786
  if service_key not in service_mapping:
775
787
  raise ValueError(f"Service {service_key} not supported by AWS Pricing API integration")
776
788