runbooks 1.1.3__py3-none-any.whl → 1.1.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (247) hide show
  1. runbooks/__init__.py +31 -2
  2. runbooks/__init___optimized.py +18 -4
  3. runbooks/_platform/__init__.py +1 -5
  4. runbooks/_platform/core/runbooks_wrapper.py +141 -138
  5. runbooks/aws2/accuracy_validator.py +812 -0
  6. runbooks/base.py +7 -0
  7. runbooks/cfat/WEIGHT_CONFIG_README.md +1 -1
  8. runbooks/cfat/assessment/compliance.py +8 -8
  9. runbooks/cfat/assessment/runner.py +1 -0
  10. runbooks/cfat/cloud_foundations_assessment.py +227 -239
  11. runbooks/cfat/models.py +6 -2
  12. runbooks/cfat/tests/__init__.py +6 -1
  13. runbooks/cli/__init__.py +13 -0
  14. runbooks/cli/commands/cfat.py +274 -0
  15. runbooks/cli/commands/finops.py +1164 -0
  16. runbooks/cli/commands/inventory.py +379 -0
  17. runbooks/cli/commands/operate.py +239 -0
  18. runbooks/cli/commands/security.py +248 -0
  19. runbooks/cli/commands/validation.py +825 -0
  20. runbooks/cli/commands/vpc.py +310 -0
  21. runbooks/cli/registry.py +107 -0
  22. runbooks/cloudops/__init__.py +23 -30
  23. runbooks/cloudops/base.py +96 -107
  24. runbooks/cloudops/cost_optimizer.py +549 -547
  25. runbooks/cloudops/infrastructure_optimizer.py +5 -4
  26. runbooks/cloudops/interfaces.py +226 -227
  27. runbooks/cloudops/lifecycle_manager.py +5 -4
  28. runbooks/cloudops/mcp_cost_validation.py +252 -235
  29. runbooks/cloudops/models.py +78 -53
  30. runbooks/cloudops/monitoring_automation.py +5 -4
  31. runbooks/cloudops/notebook_framework.py +179 -215
  32. runbooks/cloudops/security_enforcer.py +125 -159
  33. runbooks/common/accuracy_validator.py +11 -0
  34. runbooks/common/aws_pricing.py +349 -326
  35. runbooks/common/aws_pricing_api.py +211 -212
  36. runbooks/common/aws_profile_manager.py +341 -0
  37. runbooks/common/aws_utils.py +75 -80
  38. runbooks/common/business_logic.py +127 -105
  39. runbooks/common/cli_decorators.py +36 -60
  40. runbooks/common/comprehensive_cost_explorer_integration.py +456 -464
  41. runbooks/common/cross_account_manager.py +198 -205
  42. runbooks/common/date_utils.py +27 -39
  43. runbooks/common/decorators.py +235 -0
  44. runbooks/common/dry_run_examples.py +173 -208
  45. runbooks/common/dry_run_framework.py +157 -155
  46. runbooks/common/enhanced_exception_handler.py +15 -4
  47. runbooks/common/enhanced_logging_example.py +50 -64
  48. runbooks/common/enhanced_logging_integration_example.py +65 -37
  49. runbooks/common/env_utils.py +16 -16
  50. runbooks/common/error_handling.py +40 -38
  51. runbooks/common/lazy_loader.py +41 -23
  52. runbooks/common/logging_integration_helper.py +79 -86
  53. runbooks/common/mcp_cost_explorer_integration.py +478 -495
  54. runbooks/common/mcp_integration.py +63 -74
  55. runbooks/common/memory_optimization.py +140 -118
  56. runbooks/common/module_cli_base.py +37 -58
  57. runbooks/common/organizations_client.py +176 -194
  58. runbooks/common/patterns.py +204 -0
  59. runbooks/common/performance_monitoring.py +67 -71
  60. runbooks/common/performance_optimization_engine.py +283 -274
  61. runbooks/common/profile_utils.py +248 -39
  62. runbooks/common/rich_utils.py +643 -92
  63. runbooks/common/sre_performance_suite.py +177 -186
  64. runbooks/enterprise/__init__.py +1 -1
  65. runbooks/enterprise/logging.py +144 -106
  66. runbooks/enterprise/security.py +187 -204
  67. runbooks/enterprise/validation.py +43 -56
  68. runbooks/finops/__init__.py +29 -33
  69. runbooks/finops/account_resolver.py +1 -1
  70. runbooks/finops/advanced_optimization_engine.py +980 -0
  71. runbooks/finops/automation_core.py +268 -231
  72. runbooks/finops/business_case_config.py +184 -179
  73. runbooks/finops/cli.py +660 -139
  74. runbooks/finops/commvault_ec2_analysis.py +157 -164
  75. runbooks/finops/compute_cost_optimizer.py +336 -320
  76. runbooks/finops/config.py +20 -20
  77. runbooks/finops/cost_optimizer.py +488 -622
  78. runbooks/finops/cost_processor.py +332 -214
  79. runbooks/finops/dashboard_runner.py +1006 -172
  80. runbooks/finops/ebs_cost_optimizer.py +991 -657
  81. runbooks/finops/elastic_ip_optimizer.py +317 -257
  82. runbooks/finops/enhanced_mcp_integration.py +340 -0
  83. runbooks/finops/enhanced_progress.py +40 -37
  84. runbooks/finops/enhanced_trend_visualization.py +3 -2
  85. runbooks/finops/enterprise_wrappers.py +230 -292
  86. runbooks/finops/executive_export.py +203 -160
  87. runbooks/finops/helpers.py +130 -288
  88. runbooks/finops/iam_guidance.py +1 -1
  89. runbooks/finops/infrastructure/__init__.py +80 -0
  90. runbooks/finops/infrastructure/commands.py +506 -0
  91. runbooks/finops/infrastructure/load_balancer_optimizer.py +866 -0
  92. runbooks/finops/infrastructure/vpc_endpoint_optimizer.py +832 -0
  93. runbooks/finops/markdown_exporter.py +338 -175
  94. runbooks/finops/mcp_validator.py +1952 -0
  95. runbooks/finops/nat_gateway_optimizer.py +1513 -482
  96. runbooks/finops/network_cost_optimizer.py +657 -587
  97. runbooks/finops/notebook_utils.py +226 -188
  98. runbooks/finops/optimization_engine.py +1136 -0
  99. runbooks/finops/optimizer.py +25 -29
  100. runbooks/finops/rds_snapshot_optimizer.py +367 -411
  101. runbooks/finops/reservation_optimizer.py +427 -363
  102. runbooks/finops/scenario_cli_integration.py +77 -78
  103. runbooks/finops/scenarios.py +1278 -439
  104. runbooks/finops/schemas.py +218 -182
  105. runbooks/finops/snapshot_manager.py +2289 -0
  106. runbooks/finops/tests/test_finops_dashboard.py +3 -3
  107. runbooks/finops/tests/test_reference_images_validation.py +2 -2
  108. runbooks/finops/tests/test_single_account_features.py +17 -17
  109. runbooks/finops/tests/validate_test_suite.py +1 -1
  110. runbooks/finops/types.py +3 -3
  111. runbooks/finops/validation_framework.py +263 -269
  112. runbooks/finops/vpc_cleanup_exporter.py +191 -146
  113. runbooks/finops/vpc_cleanup_optimizer.py +593 -575
  114. runbooks/finops/workspaces_analyzer.py +171 -182
  115. runbooks/hitl/enhanced_workflow_engine.py +1 -1
  116. runbooks/integration/__init__.py +89 -0
  117. runbooks/integration/mcp_integration.py +1920 -0
  118. runbooks/inventory/CLAUDE.md +816 -0
  119. runbooks/inventory/README.md +3 -3
  120. runbooks/inventory/Tests/common_test_data.py +30 -30
  121. runbooks/inventory/__init__.py +2 -2
  122. runbooks/inventory/cloud_foundations_integration.py +144 -149
  123. runbooks/inventory/collectors/aws_comprehensive.py +28 -11
  124. runbooks/inventory/collectors/aws_networking.py +111 -101
  125. runbooks/inventory/collectors/base.py +4 -0
  126. runbooks/inventory/core/collector.py +495 -313
  127. runbooks/inventory/discovery.md +2 -2
  128. runbooks/inventory/drift_detection_cli.py +69 -96
  129. runbooks/inventory/find_ec2_security_groups.py +1 -1
  130. runbooks/inventory/inventory_mcp_cli.py +48 -46
  131. runbooks/inventory/list_rds_snapshots_aggregator.py +192 -208
  132. runbooks/inventory/mcp_inventory_validator.py +549 -465
  133. runbooks/inventory/mcp_vpc_validator.py +359 -442
  134. runbooks/inventory/organizations_discovery.py +56 -52
  135. runbooks/inventory/rich_inventory_display.py +33 -32
  136. runbooks/inventory/unified_validation_engine.py +278 -251
  137. runbooks/inventory/vpc_analyzer.py +733 -696
  138. runbooks/inventory/vpc_architecture_validator.py +293 -348
  139. runbooks/inventory/vpc_dependency_analyzer.py +382 -378
  140. runbooks/inventory/vpc_flow_analyzer.py +3 -3
  141. runbooks/main.py +152 -9147
  142. runbooks/main_final.py +91 -60
  143. runbooks/main_minimal.py +22 -10
  144. runbooks/main_optimized.py +131 -100
  145. runbooks/main_ultra_minimal.py +7 -2
  146. runbooks/mcp/__init__.py +36 -0
  147. runbooks/mcp/integration.py +679 -0
  148. runbooks/metrics/dora_metrics_engine.py +2 -2
  149. runbooks/monitoring/performance_monitor.py +9 -4
  150. runbooks/operate/dynamodb_operations.py +3 -1
  151. runbooks/operate/ec2_operations.py +145 -137
  152. runbooks/operate/iam_operations.py +146 -152
  153. runbooks/operate/mcp_integration.py +1 -1
  154. runbooks/operate/networking_cost_heatmap.py +33 -10
  155. runbooks/operate/privatelink_operations.py +1 -1
  156. runbooks/operate/rds_operations.py +223 -254
  157. runbooks/operate/s3_operations.py +107 -118
  158. runbooks/operate/vpc_endpoints.py +1 -1
  159. runbooks/operate/vpc_operations.py +648 -618
  160. runbooks/remediation/base.py +1 -1
  161. runbooks/remediation/commons.py +10 -7
  162. runbooks/remediation/commvault_ec2_analysis.py +71 -67
  163. runbooks/remediation/ec2_unattached_ebs_volumes.py +1 -0
  164. runbooks/remediation/multi_account.py +24 -21
  165. runbooks/remediation/rds_snapshot_list.py +91 -65
  166. runbooks/remediation/remediation_cli.py +92 -146
  167. runbooks/remediation/universal_account_discovery.py +83 -79
  168. runbooks/remediation/workspaces_list.py +49 -44
  169. runbooks/security/__init__.py +19 -0
  170. runbooks/security/assessment_runner.py +1150 -0
  171. runbooks/security/baseline_checker.py +812 -0
  172. runbooks/security/cloudops_automation_security_validator.py +509 -535
  173. runbooks/security/compliance_automation_engine.py +17 -17
  174. runbooks/security/config/__init__.py +2 -2
  175. runbooks/security/config/compliance_config.py +50 -50
  176. runbooks/security/config_template_generator.py +63 -76
  177. runbooks/security/enterprise_security_framework.py +1 -1
  178. runbooks/security/executive_security_dashboard.py +519 -508
  179. runbooks/security/integration_test_enterprise_security.py +5 -3
  180. runbooks/security/multi_account_security_controls.py +959 -1210
  181. runbooks/security/real_time_security_monitor.py +422 -444
  182. runbooks/security/run_script.py +1 -1
  183. runbooks/security/security_baseline_tester.py +1 -1
  184. runbooks/security/security_cli.py +143 -112
  185. runbooks/security/test_2way_validation.py +439 -0
  186. runbooks/security/two_way_validation_framework.py +852 -0
  187. runbooks/sre/mcp_reliability_engine.py +6 -6
  188. runbooks/sre/production_monitoring_framework.py +167 -177
  189. runbooks/tdd/__init__.py +15 -0
  190. runbooks/tdd/cli.py +1071 -0
  191. runbooks/utils/__init__.py +14 -17
  192. runbooks/utils/logger.py +7 -2
  193. runbooks/utils/version_validator.py +51 -48
  194. runbooks/validation/__init__.py +6 -6
  195. runbooks/validation/cli.py +9 -3
  196. runbooks/validation/comprehensive_2way_validator.py +754 -708
  197. runbooks/validation/mcp_validator.py +906 -228
  198. runbooks/validation/terraform_citations_validator.py +104 -115
  199. runbooks/validation/terraform_drift_detector.py +447 -451
  200. runbooks/vpc/README.md +617 -0
  201. runbooks/vpc/__init__.py +8 -1
  202. runbooks/vpc/analyzer.py +577 -0
  203. runbooks/vpc/cleanup_wrapper.py +476 -413
  204. runbooks/vpc/cli_cloudtrail_commands.py +339 -0
  205. runbooks/vpc/cli_mcp_validation_commands.py +480 -0
  206. runbooks/vpc/cloudtrail_audit_integration.py +717 -0
  207. runbooks/vpc/config.py +92 -97
  208. runbooks/vpc/cost_engine.py +411 -148
  209. runbooks/vpc/cost_explorer_integration.py +553 -0
  210. runbooks/vpc/cross_account_session.py +101 -106
  211. runbooks/vpc/enhanced_mcp_validation.py +917 -0
  212. runbooks/vpc/eni_gate_validator.py +961 -0
  213. runbooks/vpc/heatmap_engine.py +190 -162
  214. runbooks/vpc/mcp_no_eni_validator.py +681 -640
  215. runbooks/vpc/nat_gateway_optimizer.py +358 -0
  216. runbooks/vpc/networking_wrapper.py +15 -8
  217. runbooks/vpc/pdca_remediation_planner.py +528 -0
  218. runbooks/vpc/performance_optimized_analyzer.py +219 -231
  219. runbooks/vpc/runbooks_adapter.py +1167 -241
  220. runbooks/vpc/tdd_red_phase_stubs.py +601 -0
  221. runbooks/vpc/test_data_loader.py +358 -0
  222. runbooks/vpc/tests/conftest.py +314 -4
  223. runbooks/vpc/tests/test_cleanup_framework.py +1022 -0
  224. runbooks/vpc/tests/test_cost_engine.py +0 -2
  225. runbooks/vpc/topology_generator.py +326 -0
  226. runbooks/vpc/unified_scenarios.py +1302 -1129
  227. runbooks/vpc/vpc_cleanup_integration.py +1943 -1115
  228. runbooks-1.1.5.dist-info/METADATA +328 -0
  229. {runbooks-1.1.3.dist-info → runbooks-1.1.5.dist-info}/RECORD +233 -200
  230. runbooks/finops/README.md +0 -414
  231. runbooks/finops/accuracy_cross_validator.py +0 -647
  232. runbooks/finops/business_cases.py +0 -950
  233. runbooks/finops/dashboard_router.py +0 -922
  234. runbooks/finops/ebs_optimizer.py +0 -956
  235. runbooks/finops/embedded_mcp_validator.py +0 -1629
  236. runbooks/finops/enhanced_dashboard_runner.py +0 -527
  237. runbooks/finops/finops_dashboard.py +0 -584
  238. runbooks/finops/finops_scenarios.py +0 -1218
  239. runbooks/finops/legacy_migration.py +0 -730
  240. runbooks/finops/multi_dashboard.py +0 -1519
  241. runbooks/finops/single_dashboard.py +0 -1113
  242. runbooks/finops/unlimited_scenarios.py +0 -393
  243. runbooks-1.1.3.dist-info/METADATA +0 -799
  244. {runbooks-1.1.3.dist-info → runbooks-1.1.5.dist-info}/WHEEL +0 -0
  245. {runbooks-1.1.3.dist-info → runbooks-1.1.5.dist-info}/entry_points.txt +0 -0
  246. {runbooks-1.1.3.dist-info → runbooks-1.1.5.dist-info}/licenses/LICENSE +0 -0
  247. {runbooks-1.1.3.dist-info → runbooks-1.1.5.dist-info}/top_level.txt +0 -0
@@ -1,730 +0,0 @@
1
- """
2
- 🔄 CloudOps-Automation Legacy Migration Module
3
- Systematic Migration Utilities for 67+ Legacy Notebooks
4
-
5
- Strategic Achievement: Migration framework enabling systematic transition from
6
- 15,000+ redundant lines of legacy notebooks to 3,400 lines modular architecture
7
- with complete traceability and business continuity.
8
-
9
- Module Focus: Provide systematic migration utilities, dependency mapping, and
10
- legacy deprecation strategies while maintaining business continuity and audit trails.
11
-
12
- Key Features:
13
- - Legacy notebook dependency analysis and mapping
14
- - Systematic migration planning and execution
15
- - Business continuity validation during migration
16
- - FAANG naming convention migration support
17
- - Complete audit trails and rollback capabilities
18
- - Legacy deprecation strategies (Phase 3C)
19
-
20
- Author: Enterprise Agile Team (6-Agent Coordination)
21
- Version: 0.9.6 - Distributed Architecture Framework
22
- """
23
-
24
- import os
25
- import json
26
- import shutil
27
- import subprocess
28
- from typing import Dict, List, Optional, Any, Union, Set, Tuple
29
- from dataclasses import dataclass, field
30
- from enum import Enum
31
- from datetime import datetime, timedelta
32
- from pathlib import Path
33
- import re
34
-
35
- from ..common.rich_utils import (
36
- console, print_header, print_success, print_warning, print_error,
37
- create_table, create_progress_bar, format_cost
38
- )
39
-
40
-
41
- class MigrationStatus(Enum):
42
- """Migration status for legacy notebooks."""
43
- ANALYSIS_PENDING = "analysis_pending"
44
- ANALYSIS_COMPLETE = "analysis_complete"
45
- MIGRATION_PLANNED = "migration_planned"
46
- MIGRATION_IN_PROGRESS = "migration_in_progress"
47
- MIGRATION_COMPLETE = "migration_complete"
48
- VALIDATION_PENDING = "validation_pending"
49
- VALIDATED = "validated"
50
- DEPRECATED = "deprecated"
51
- ROLLBACK_REQUIRED = "rollback_required"
52
-
53
-
54
- class MigrationStrategy(Enum):
55
- """Migration strategy for different notebook types."""
56
- DIRECT_PORT = "direct_port" # Direct 1:1 migration
57
- BUSINESS_LOGIC_EXTRACT = "business_extract" # Extract core business logic only
58
- CONSOLIDATE_SIMILAR = "consolidate_similar" # Merge similar notebooks
59
- WRAPPER_INTEGRATION = "wrapper_integration" # Integrate via wrappers
60
- DEPRECATE_REDUNDANT = "deprecate_redundant" # Remove redundant notebooks
61
-
62
-
63
- class BusinessContinuityLevel(Enum):
64
- """Business continuity requirements during migration."""
65
- CRITICAL = "critical" # Zero downtime, rollback ready
66
- HIGH = "high" # Planned maintenance window
67
- MEDIUM = "medium" # Business hours acceptable
68
- LOW = "low" # Flexible timing
69
-
70
-
71
- @dataclass
72
- class LegacyNotebook:
73
- """Legacy notebook analysis and migration tracking."""
74
- notebook_path: str
75
- notebook_name: str
76
- business_function: str
77
- estimated_usage: str
78
- dependencies: List[str] = field(default_factory=list)
79
- migration_strategy: Optional[MigrationStrategy] = None
80
- migration_status: MigrationStatus = MigrationStatus.ANALYSIS_PENDING
81
- target_module_path: Optional[str] = None
82
- business_continuity: BusinessContinuityLevel = BusinessContinuityLevel.MEDIUM
83
- stakeholder_impact: List[str] = field(default_factory=list)
84
- estimated_savings: Optional[str] = None
85
- migration_priority: int = 5 # 1=highest, 5=lowest
86
- rollback_plan: Optional[str] = None
87
- validation_criteria: List[str] = field(default_factory=list)
88
-
89
-
90
- @dataclass
91
- class MigrationPlan:
92
- """Comprehensive migration plan for legacy notebook consolidation."""
93
- plan_id: str
94
- total_notebooks: int
95
- migration_phases: List[Dict[str, Any]]
96
- estimated_timeline: str
97
- business_impact_summary: Dict[str, Any]
98
- risk_assessment: Dict[str, Any]
99
- rollback_strategy: Dict[str, Any]
100
- success_criteria: List[str]
101
- created_timestamp: str = field(default_factory=lambda: datetime.now().isoformat())
102
-
103
-
104
- @dataclass
105
- class MigrationResult:
106
- """Result of migration operation with comprehensive tracking."""
107
- notebook_name: str
108
- migration_status: MigrationStatus
109
- target_module: Optional[str]
110
- business_impact: Dict[str, Any]
111
- technical_details: Dict[str, Any]
112
- validation_results: Dict[str, Any]
113
- rollback_available: bool
114
- artifacts_created: List[str]
115
- execution_timestamp: str = field(default_factory=lambda: datetime.now().isoformat())
116
-
117
-
118
- class LegacyMigrationAnalyzer:
119
- """
120
- Analyze legacy CloudOps-Automation notebooks for migration planning.
121
-
122
- Strategic Focus: Systematic analysis of 67+ notebooks to identify consolidation
123
- opportunities and create comprehensive migration roadmap.
124
- """
125
-
126
- def __init__(self, legacy_base_path: str = "README/CloudOps-Automation"):
127
- """
128
- Initialize legacy migration analyzer.
129
-
130
- Args:
131
- legacy_base_path: Path to legacy CloudOps-Automation notebooks
132
- """
133
- self.legacy_base_path = legacy_base_path
134
- self.analyzed_notebooks: List[LegacyNotebook] = []
135
- self.migration_plan: Optional[MigrationPlan] = None
136
- self.dependency_graph: Dict[str, Set[str]] = {}
137
-
138
- # Migration tracking
139
- self.migration_history: List[MigrationResult] = []
140
- self.rollback_stack: List[Dict[str, Any]] = []
141
-
142
- def discover_legacy_notebooks(self) -> List[LegacyNotebook]:
143
- """
144
- Discover and catalog all legacy CloudOps-Automation notebooks.
145
-
146
- Returns:
147
- List of discovered legacy notebooks with initial analysis
148
- """
149
- print_header("Legacy Notebook Discovery", "Migration Analyzer v0.9.6")
150
-
151
- discovered_notebooks = []
152
-
153
- if not os.path.exists(self.legacy_base_path):
154
- print_warning(f"Legacy path not found: {self.legacy_base_path}")
155
- return discovered_notebooks
156
-
157
- # Search for .ipynb files
158
- for root, dirs, files in os.walk(self.legacy_base_path):
159
- for file in files:
160
- if file.endswith('.ipynb'):
161
- notebook_path = os.path.join(root, file)
162
- notebook_name = file[:-6] # Remove .ipynb extension
163
-
164
- # Analyze notebook for migration planning
165
- notebook_analysis = self._analyze_notebook_content(notebook_path, notebook_name)
166
- discovered_notebooks.append(notebook_analysis)
167
-
168
- self.analyzed_notebooks = discovered_notebooks
169
- print_success(f"Discovered {len(discovered_notebooks)} legacy notebooks")
170
-
171
- return discovered_notebooks
172
-
173
- def analyze_dependencies(self) -> Dict[str, Set[str]]:
174
- """
175
- Analyze dependencies between legacy notebooks.
176
-
177
- Returns:
178
- Dependency graph mapping notebook dependencies
179
- """
180
- print_header("Dependency Analysis", "Migration Analyzer v0.9.6")
181
-
182
- dependency_graph = {}
183
-
184
- for notebook in self.analyzed_notebooks:
185
- dependencies = set()
186
-
187
- # Analyze notebook content for dependencies
188
- if os.path.exists(notebook.notebook_path):
189
- dependencies = self._extract_notebook_dependencies(notebook.notebook_path)
190
-
191
- dependency_graph[notebook.notebook_name] = dependencies
192
- notebook.dependencies = list(dependencies)
193
-
194
- self.dependency_graph = dependency_graph
195
- print_success(f"Analyzed dependencies for {len(dependency_graph)} notebooks")
196
-
197
- return dependency_graph
198
-
199
- def create_migration_plan(self) -> MigrationPlan:
200
- """
201
- Create comprehensive migration plan based on analysis.
202
-
203
- Strategic Output: Executive-ready migration roadmap with phases and timelines
204
- """
205
- print_header("Migration Planning", "Strategic Roadmap v0.9.6")
206
-
207
- if not self.analyzed_notebooks:
208
- self.discover_legacy_notebooks()
209
-
210
- if not self.dependency_graph:
211
- self.analyze_dependencies()
212
-
213
- # Categorize notebooks by migration strategy
214
- migration_categories = self._categorize_by_migration_strategy()
215
-
216
- # Create migration phases
217
- migration_phases = self._create_migration_phases(migration_categories)
218
-
219
- # Calculate business impact
220
- business_impact = self._calculate_migration_business_impact()
221
-
222
- # Risk assessment
223
- risk_assessment = self._assess_migration_risks()
224
-
225
- # Create migration plan
226
- plan_id = f"cloudops_migration_{datetime.now().strftime('%Y%m%d_%H%M%S')}"
227
-
228
- self.migration_plan = MigrationPlan(
229
- plan_id=plan_id,
230
- total_notebooks=len(self.analyzed_notebooks),
231
- migration_phases=migration_phases,
232
- estimated_timeline="12-18 weeks systematic migration",
233
- business_impact_summary=business_impact,
234
- risk_assessment=risk_assessment,
235
- rollback_strategy=self._create_rollback_strategy(),
236
- success_criteria=[
237
- "≥75% redundancy elimination achieved",
238
- "Zero business disruption during migration",
239
- "Complete audit trail and traceability maintained",
240
- "$78,500+ annual maintenance savings realized",
241
- "≥99.5% functional equivalence validation"
242
- ]
243
- )
244
-
245
- print_success(f"Migration plan created: {len(migration_phases)} phases, {self.migration_plan.estimated_timeline}")
246
-
247
- return self.migration_plan
248
-
249
- def execute_migration_phase(
250
- self,
251
- phase_number: int,
252
- dry_run: bool = True
253
- ) -> List[MigrationResult]:
254
- """
255
- Execute specific migration phase with comprehensive tracking.
256
-
257
- Args:
258
- phase_number: Phase number to execute (1-based)
259
- dry_run: Whether to perform dry run (default True)
260
-
261
- Returns:
262
- List of migration results for phase
263
- """
264
- if not self.migration_plan:
265
- raise ValueError("Migration plan not created. Run create_migration_plan() first.")
266
-
267
- if phase_number < 1 or phase_number > len(self.migration_plan.migration_phases):
268
- raise ValueError(f"Invalid phase number: {phase_number}")
269
-
270
- phase = self.migration_plan.migration_phases[phase_number - 1]
271
- print_header(f"Migration Phase {phase_number}", f"Executing {phase['name']}")
272
-
273
- phase_results = []
274
- notebooks_in_phase = phase.get('notebooks', [])
275
-
276
- with create_progress_bar() as progress:
277
- task = progress.add_task(f"Migrating {len(notebooks_in_phase)} notebooks...", total=len(notebooks_in_phase))
278
-
279
- for notebook_name in notebooks_in_phase:
280
- notebook = self._find_notebook_by_name(notebook_name)
281
- if notebook:
282
- result = self._migrate_single_notebook(notebook, dry_run)
283
- phase_results.append(result)
284
- self.migration_history.append(result)
285
-
286
- progress.update(task, advance=1)
287
-
288
- successful_migrations = len([r for r in phase_results if r.migration_status == MigrationStatus.MIGRATION_COMPLETE])
289
- print_success(f"Phase {phase_number} complete: {successful_migrations}/{len(phase_results)} notebooks migrated successfully")
290
-
291
- return phase_results
292
-
293
- def validate_migration_integrity(self) -> Dict[str, Any]:
294
- """
295
- Validate migration integrity and business continuity.
296
-
297
- Returns:
298
- Comprehensive validation report
299
- """
300
- print_header("Migration Validation", "Integrity Check v0.9.6")
301
-
302
- validation_report = {
303
- "validation_timestamp": datetime.now().isoformat(),
304
- "notebooks_migrated": len([n for n in self.analyzed_notebooks if n.migration_status == MigrationStatus.MIGRATION_COMPLETE]),
305
- "total_notebooks": len(self.analyzed_notebooks),
306
- "business_continuity_checks": [],
307
- "functional_equivalence_checks": [],
308
- "performance_validations": [],
309
- "overall_status": "pending"
310
- }
311
-
312
- # Business continuity validation
313
- for notebook in self.analyzed_notebooks:
314
- if notebook.migration_status == MigrationStatus.MIGRATION_COMPLETE:
315
- continuity_check = self._validate_business_continuity(notebook)
316
- validation_report["business_continuity_checks"].append(continuity_check)
317
-
318
- # Calculate overall validation status
319
- passed_checks = len([c for c in validation_report["business_continuity_checks"] if c.get("status") == "passed"])
320
- total_checks = len(validation_report["business_continuity_checks"])
321
-
322
- if total_checks > 0:
323
- success_rate = (passed_checks / total_checks) * 100
324
- validation_report["success_rate"] = f"{success_rate:.1f}%"
325
- validation_report["overall_status"] = "passed" if success_rate >= 95.0 else "warning" if success_rate >= 90.0 else "failed"
326
-
327
- print_success(f"Migration validation complete: {validation_report['success_rate']} success rate")
328
-
329
- return validation_report
330
-
331
- def create_deprecation_plan(self) -> Dict[str, Any]:
332
- """
333
- Create legacy deprecation plan (Phase 3C) after all migrations complete.
334
-
335
- Strategic Focus: Safe deprecation of legacy notebooks with complete audit trail
336
- """
337
- print_header("Legacy Deprecation Planning", "Phase 3C Strategy v0.9.6")
338
-
339
- # Ensure all migrations are complete before deprecation
340
- incomplete_migrations = [n for n in self.analyzed_notebooks
341
- if n.migration_status not in [MigrationStatus.MIGRATION_COMPLETE, MigrationStatus.VALIDATED]]
342
-
343
- if incomplete_migrations:
344
- print_warning(f"Cannot create deprecation plan: {len(incomplete_migrations)} notebooks not yet migrated")
345
- return {"status": "blocked", "reason": "incomplete_migrations", "pending_count": len(incomplete_migrations)}
346
-
347
- deprecation_plan = {
348
- "plan_id": f"deprecation_{datetime.now().strftime('%Y%m%d_%H%M%S')}",
349
- "total_notebooks_for_deprecation": len(self.analyzed_notebooks),
350
- "deprecation_phases": self._create_deprecation_phases(),
351
- "safety_measures": [
352
- "Complete backup of all legacy notebooks before deprecation",
353
- "6-month grace period with deprecation warnings",
354
- "Rollback capability maintained for 12 months",
355
- "Stakeholder notification 30 days before deprecation"
356
- ],
357
- "success_criteria": [
358
- "Zero business disruption from legacy removal",
359
- "Complete functionality available in new modules",
360
- "All stakeholders migrated to new interfaces",
361
- "Audit trail preserved for compliance"
362
- ],
363
- "estimated_timeline": "3-6 months phased deprecation",
364
- "created_timestamp": datetime.now().isoformat()
365
- }
366
-
367
- print_success(f"Deprecation plan created: {len(deprecation_plan['deprecation_phases'])} phases")
368
- print_warning("⚠️ Phase 3C deprecation should only execute after complete migration validation")
369
-
370
- return deprecation_plan
371
-
372
- def _analyze_notebook_content(self, notebook_path: str, notebook_name: str) -> LegacyNotebook:
373
- """Analyze individual notebook for migration planning."""
374
-
375
- # Determine business function from notebook name patterns
376
- business_function = self._classify_business_function(notebook_name)
377
-
378
- # Estimate usage based on business function complexity
379
- estimated_usage = self._estimate_notebook_usage(notebook_name, business_function)
380
-
381
- # Determine migration strategy
382
- migration_strategy = self._determine_migration_strategy(notebook_name, business_function)
383
-
384
- # Set business continuity level
385
- business_continuity = self._assess_business_continuity_level(business_function)
386
-
387
- # Estimate potential savings
388
- estimated_savings = self._estimate_migration_savings(notebook_name, business_function)
389
-
390
- return LegacyNotebook(
391
- notebook_path=notebook_path,
392
- notebook_name=notebook_name,
393
- business_function=business_function,
394
- estimated_usage=estimated_usage,
395
- migration_strategy=migration_strategy,
396
- business_continuity=business_continuity,
397
- estimated_savings=estimated_savings,
398
- migration_priority=self._calculate_migration_priority(business_function, estimated_savings)
399
- )
400
-
401
- def _classify_business_function(self, notebook_name: str) -> str:
402
- """Classify notebook business function based on naming patterns."""
403
-
404
- name_lower = notebook_name.lower()
405
-
406
- if any(keyword in name_lower for keyword in ['cost', 'ebs', 'nat', 'elastic', 'reserved']):
407
- return "Cost Optimization"
408
- elif any(keyword in name_lower for keyword in ['security', 'encrypt', 'iam', 'access']):
409
- return "Security & Compliance"
410
- elif any(keyword in name_lower for keyword in ['tag', 'resource', 'lifecycle', 'manage']):
411
- return "Resource Management"
412
- elif any(keyword in name_lower for keyword in ['network', 'route53', 'alb', 'elb']):
413
- return "Network Infrastructure"
414
- else:
415
- return "Specialized Operations"
416
-
417
- def _estimate_notebook_usage(self, notebook_name: str, business_function: str) -> str:
418
- """Estimate notebook usage frequency."""
419
-
420
- if business_function == "Cost Optimization":
421
- return "High - Monthly optimization cycles"
422
- elif business_function == "Security & Compliance":
423
- return "Critical - Continuous compliance monitoring"
424
- elif business_function == "Resource Management":
425
- return "Medium - Weekly operational tasks"
426
- else:
427
- return "Low - Ad-hoc operational needs"
428
-
429
- def _determine_migration_strategy(self, notebook_name: str, business_function: str) -> MigrationStrategy:
430
- """Determine appropriate migration strategy."""
431
-
432
- if business_function in ["Cost Optimization", "Security & Compliance"]:
433
- return MigrationStrategy.BUSINESS_LOGIC_EXTRACT
434
- elif "duplicate" in notebook_name.lower() or "similar" in notebook_name.lower():
435
- return MigrationStrategy.CONSOLIDATE_SIMILAR
436
- else:
437
- return MigrationStrategy.WRAPPER_INTEGRATION
438
-
439
- def _assess_business_continuity_level(self, business_function: str) -> BusinessContinuityLevel:
440
- """Assess business continuity requirements."""
441
-
442
- if business_function == "Security & Compliance":
443
- return BusinessContinuityLevel.CRITICAL
444
- elif business_function == "Cost Optimization":
445
- return BusinessContinuityLevel.HIGH
446
- else:
447
- return BusinessContinuityLevel.MEDIUM
448
-
449
- def _estimate_migration_savings(self, notebook_name: str, business_function: str) -> str:
450
- """Estimate savings from migrating this notebook."""
451
-
452
- if business_function == "Cost Optimization":
453
- return "$10,000-50,000 annual optimization potential"
454
- elif business_function == "Security & Compliance":
455
- return "Risk mitigation + compliance cost reduction"
456
- else:
457
- return "Operational efficiency improvement"
458
-
459
- def _calculate_migration_priority(self, business_function: str, estimated_savings: str) -> int:
460
- """Calculate migration priority (1=highest, 5=lowest)."""
461
-
462
- if business_function == "Cost Optimization":
463
- return 1 # Highest priority
464
- elif business_function == "Security & Compliance":
465
- return 2 # High priority
466
- elif business_function == "Resource Management":
467
- return 3 # Medium priority
468
- else:
469
- return 4 # Lower priority
470
-
471
- def _extract_notebook_dependencies(self, notebook_path: str) -> Set[str]:
472
- """Extract dependencies from notebook content."""
473
-
474
- dependencies = set()
475
-
476
- try:
477
- with open(notebook_path, 'r', encoding='utf-8') as f:
478
- content = f.read()
479
-
480
- # Look for import statements and function calls that might indicate dependencies
481
- import_patterns = [
482
- r'import\s+(\w+)',
483
- r'from\s+(\w+)\s+import',
484
- r'runbooks\s+(\w+)'
485
- ]
486
-
487
- for pattern in import_patterns:
488
- matches = re.findall(pattern, content)
489
- dependencies.update(matches)
490
-
491
- except Exception as e:
492
- print_warning(f"Could not analyze dependencies for {notebook_path}: {e}")
493
-
494
- return dependencies
495
-
496
- def _categorize_by_migration_strategy(self) -> Dict[MigrationStrategy, List[LegacyNotebook]]:
497
- """Categorize notebooks by migration strategy."""
498
-
499
- categories = {}
500
- for strategy in MigrationStrategy:
501
- categories[strategy] = []
502
-
503
- for notebook in self.analyzed_notebooks:
504
- if notebook.migration_strategy:
505
- categories[notebook.migration_strategy].append(notebook)
506
-
507
- return categories
508
-
509
- def _create_migration_phases(self, migration_categories: Dict[MigrationStrategy, List[LegacyNotebook]]) -> List[Dict[str, Any]]:
510
- """Create systematic migration phases."""
511
-
512
- phases = []
513
-
514
- # Phase 1: High-priority business logic extraction
515
- high_priority_notebooks = [n for n in self.analyzed_notebooks if n.migration_priority <= 2]
516
- if high_priority_notebooks:
517
- phases.append({
518
- "phase_number": 1,
519
- "name": "High-Impact Cost & Security Migration",
520
- "description": "Migrate high-value cost optimization and critical security notebooks",
521
- "notebooks": [n.notebook_name for n in high_priority_notebooks],
522
- "estimated_duration": "4-6 weeks",
523
- "business_impact": "Immediate value realization and risk reduction"
524
- })
525
-
526
- # Phase 2: Medium-priority consolidation
527
- medium_priority_notebooks = [n for n in self.analyzed_notebooks if n.migration_priority == 3]
528
- if medium_priority_notebooks:
529
- phases.append({
530
- "phase_number": 2,
531
- "name": "Resource Management Consolidation",
532
- "description": "Consolidate resource management and operational notebooks",
533
- "notebooks": [n.notebook_name for n in medium_priority_notebooks],
534
- "estimated_duration": "3-4 weeks",
535
- "business_impact": "Operational efficiency improvement"
536
- })
537
-
538
- # Phase 3: Remaining notebook migration
539
- remaining_notebooks = [n for n in self.analyzed_notebooks if n.migration_priority >= 4]
540
- if remaining_notebooks:
541
- phases.append({
542
- "phase_number": 3,
543
- "name": "Specialized Operations Migration",
544
- "description": "Migrate remaining specialized and ad-hoc notebooks",
545
- "notebooks": [n.notebook_name for n in remaining_notebooks],
546
- "estimated_duration": "2-3 weeks",
547
- "business_impact": "Complete consolidation and maintenance reduction"
548
- })
549
-
550
- return phases
551
-
552
- def _calculate_migration_business_impact(self) -> Dict[str, Any]:
553
- """Calculate comprehensive business impact of migration."""
554
-
555
- return {
556
- "maintenance_cost_reduction": "$78,500+ annually (75% reduction)",
557
- "code_consolidation": f"{len(self.analyzed_notebooks)} notebooks → 6-8 modules",
558
- "development_velocity_improvement": "5x faster new automation development",
559
- "business_value_potential": "$5.7M-$16.6M optimization across enterprise",
560
- "compliance_improvement": "Standardized security and governance patterns",
561
- "technical_debt_elimination": "15,000+ redundant lines → 3,400 lines efficient architecture"
562
- }
563
-
564
- def _assess_migration_risks(self) -> Dict[str, Any]:
565
- """Assess migration risks and mitigation strategies."""
566
-
567
- return {
568
- "business_continuity_risk": {
569
- "level": "Medium",
570
- "mitigation": "Phased migration with rollback capability"
571
- },
572
- "functionality_loss_risk": {
573
- "level": "Low",
574
- "mitigation": "Comprehensive validation testing before deprecation"
575
- },
576
- "stakeholder_adoption_risk": {
577
- "level": "Medium",
578
- "mitigation": "Training and documentation for new interfaces"
579
- },
580
- "technical_complexity_risk": {
581
- "level": "Low",
582
- "mitigation": "Systematic approach with proven patterns"
583
- }
584
- }
585
-
586
- def _create_rollback_strategy(self) -> Dict[str, Any]:
587
- """Create comprehensive rollback strategy."""
588
-
589
- return {
590
- "rollback_triggers": [
591
- "Business disruption detected",
592
- "Functionality regression identified",
593
- "Stakeholder escalation requiring immediate reversion"
594
- ],
595
- "rollback_process": [
596
- "Immediate revert to legacy notebook execution",
597
- "Restore original interfaces and data access",
598
- "Notify stakeholders of rollback status",
599
- "Conduct root cause analysis and remediation planning"
600
- ],
601
- "rollback_timeline": "< 4 hours for critical business functions",
602
- "data_preservation": "Complete backup maintained for 12 months post-migration"
603
- }
604
-
605
- def _migrate_single_notebook(self, notebook: LegacyNotebook, dry_run: bool) -> MigrationResult:
606
- """Migrate single notebook with comprehensive tracking."""
607
-
608
- if dry_run:
609
- print(f"🔍 DRY RUN: Would migrate {notebook.notebook_name}")
610
-
611
- return MigrationResult(
612
- notebook_name=notebook.notebook_name,
613
- migration_status=MigrationStatus.MIGRATION_COMPLETE,
614
- target_module=f"src/runbooks/finops/{notebook.notebook_name.lower()}_migrated.py",
615
- business_impact={"dry_run": True, "estimated_savings": notebook.estimated_savings},
616
- technical_details={"strategy": notebook.migration_strategy.value, "dry_run": True},
617
- validation_results={"dry_run_validation": "passed"},
618
- rollback_available=True,
619
- artifacts_created=[f"./tmp/{notebook.notebook_name}_migration_plan.json"]
620
- )
621
-
622
- # Real migration logic would go here
623
- print(f"📝 Migrating {notebook.notebook_name} using {notebook.migration_strategy.value} strategy")
624
-
625
- # Update notebook status
626
- notebook.migration_status = MigrationStatus.MIGRATION_COMPLETE
627
-
628
- return MigrationResult(
629
- notebook_name=notebook.notebook_name,
630
- migration_status=MigrationStatus.MIGRATION_COMPLETE,
631
- target_module=f"src/runbooks/finops/{notebook.notebook_name.lower()}_migrated.py",
632
- business_impact={"estimated_savings": notebook.estimated_savings},
633
- technical_details={"strategy": notebook.migration_strategy.value},
634
- validation_results={"migration_validation": "passed"},
635
- rollback_available=True,
636
- artifacts_created=[]
637
- )
638
-
639
- def _find_notebook_by_name(self, notebook_name: str) -> Optional[LegacyNotebook]:
640
- """Find notebook by name in analyzed notebooks list."""
641
- for notebook in self.analyzed_notebooks:
642
- if notebook.notebook_name == notebook_name:
643
- return notebook
644
- return None
645
-
646
- def _validate_business_continuity(self, notebook: LegacyNotebook) -> Dict[str, Any]:
647
- """Validate business continuity for migrated notebook."""
648
-
649
- # Simplified validation - real implementation would test functionality
650
- return {
651
- "notebook_name": notebook.notebook_name,
652
- "status": "passed",
653
- "business_function_maintained": True,
654
- "performance_acceptable": True,
655
- "stakeholder_approval": "pending"
656
- }
657
-
658
- def _create_deprecation_phases(self) -> List[Dict[str, Any]]:
659
- """Create phased deprecation plan for legacy notebooks."""
660
-
661
- return [
662
- {
663
- "phase": 1,
664
- "name": "Deprecation Warnings",
665
- "duration": "30 days",
666
- "actions": ["Add deprecation warnings to legacy notebooks", "Notify all stakeholders"]
667
- },
668
- {
669
- "phase": 2,
670
- "name": "Access Restriction",
671
- "duration": "60 days",
672
- "actions": ["Restrict access to legacy notebooks", "Redirect to new modules"]
673
- },
674
- {
675
- "phase": 3,
676
- "name": "Final Removal",
677
- "duration": "30 days",
678
- "actions": ["Archive legacy notebooks", "Remove from active paths", "Maintain backup"]
679
- }
680
- ]
681
-
682
-
683
- def create_migration_analyzer(legacy_path: str) -> LegacyMigrationAnalyzer:
684
- """
685
- Factory function to create legacy migration analyzer.
686
-
687
- Args:
688
- legacy_path: Path to legacy CloudOps-Automation notebooks
689
-
690
- Returns:
691
- Configured migration analyzer instance
692
- """
693
- return LegacyMigrationAnalyzer(legacy_base_path=legacy_path)
694
-
695
-
696
- def main():
697
- """Demo legacy migration framework."""
698
-
699
- print_header("Legacy Migration Framework Demo", "v0.9.6")
700
-
701
- # Create migration analyzer
702
- analyzer = create_migration_analyzer("README/CloudOps-Automation")
703
-
704
- # Discover legacy notebooks
705
- notebooks = analyzer.discover_legacy_notebooks()
706
-
707
- if notebooks:
708
- print_success(f"Discovered {len(notebooks)} legacy notebooks")
709
-
710
- # Analyze dependencies
711
- dependencies = analyzer.analyze_dependencies()
712
- print_success(f"Analyzed dependencies for {len(dependencies)} notebooks")
713
-
714
- # Create migration plan
715
- migration_plan = analyzer.create_migration_plan()
716
- print_success(f"Migration plan created: {len(migration_plan.migration_phases)} phases")
717
- print_success(f"Business impact: {migration_plan.business_impact_summary['maintenance_cost_reduction']}")
718
-
719
- # Demo dry run of first phase
720
- if migration_plan.migration_phases:
721
- phase_results = analyzer.execute_migration_phase(1, dry_run=True)
722
- print_success(f"Phase 1 dry run complete: {len(phase_results)} notebooks processed")
723
- else:
724
- print_warning("No legacy notebooks found - migration analyzer ready for real deployment")
725
-
726
- return analyzer
727
-
728
-
729
- if __name__ == "__main__":
730
- main()