kailash 0.9.15__py3-none-any.whl → 0.9.17__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. kailash/__init__.py +4 -3
  2. kailash/middleware/database/base_models.py +7 -1
  3. kailash/migration/__init__.py +30 -0
  4. kailash/migration/cli.py +340 -0
  5. kailash/migration/compatibility_checker.py +662 -0
  6. kailash/migration/configuration_validator.py +837 -0
  7. kailash/migration/documentation_generator.py +1828 -0
  8. kailash/migration/examples/__init__.py +5 -0
  9. kailash/migration/examples/complete_migration_example.py +692 -0
  10. kailash/migration/migration_assistant.py +715 -0
  11. kailash/migration/performance_comparator.py +760 -0
  12. kailash/migration/regression_detector.py +1141 -0
  13. kailash/migration/tests/__init__.py +6 -0
  14. kailash/migration/tests/test_compatibility_checker.py +403 -0
  15. kailash/migration/tests/test_integration.py +463 -0
  16. kailash/migration/tests/test_migration_assistant.py +397 -0
  17. kailash/migration/tests/test_performance_comparator.py +433 -0
  18. kailash/monitoring/__init__.py +29 -2
  19. kailash/monitoring/asyncsql_metrics.py +275 -0
  20. kailash/nodes/data/async_sql.py +1828 -33
  21. kailash/runtime/local.py +1255 -8
  22. kailash/runtime/monitoring/__init__.py +1 -0
  23. kailash/runtime/monitoring/runtime_monitor.py +780 -0
  24. kailash/runtime/resource_manager.py +3033 -0
  25. kailash/sdk_exceptions.py +21 -0
  26. kailash/workflow/cyclic_runner.py +18 -2
  27. {kailash-0.9.15.dist-info → kailash-0.9.17.dist-info}/METADATA +1 -1
  28. {kailash-0.9.15.dist-info → kailash-0.9.17.dist-info}/RECORD +33 -14
  29. {kailash-0.9.15.dist-info → kailash-0.9.17.dist-info}/WHEEL +0 -0
  30. {kailash-0.9.15.dist-info → kailash-0.9.17.dist-info}/entry_points.txt +0 -0
  31. {kailash-0.9.15.dist-info → kailash-0.9.17.dist-info}/licenses/LICENSE +0 -0
  32. {kailash-0.9.15.dist-info → kailash-0.9.17.dist-info}/licenses/NOTICE +0 -0
  33. {kailash-0.9.15.dist-info → kailash-0.9.17.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,715 @@
1
+ """Migration assistant for automated LocalRuntime configuration conversion.
2
+
3
+ This module provides comprehensive automation for migrating existing LocalRuntime
4
+ configurations to the enhanced version, including parameter conversion, optimization
5
+ recommendations, and configuration validation.
6
+ """
7
+
8
+ import ast
9
+ import json
10
+ import os
11
+ import shutil
12
+ import tempfile
13
+ from dataclasses import dataclass, field
14
+ from pathlib import Path
15
+ from typing import Any, Dict, List, Optional, Set, Tuple, Union
16
+
17
+ from .compatibility_checker import (
18
+ CompatibilityChecker,
19
+ CompatibilityIssue,
20
+ IssueSeverity,
21
+ )
22
+
23
+
24
+ @dataclass
25
+ class MigrationStep:
26
+ """Represents a single migration step."""
27
+
28
+ step_id: str
29
+ description: str
30
+ file_path: str
31
+ original_code: str
32
+ migrated_code: str
33
+ automated: bool = True
34
+ validation_required: bool = False
35
+ rollback_available: bool = True
36
+
37
+
38
+ @dataclass
39
+ class MigrationPlan:
40
+ """Complete migration plan with all steps."""
41
+
42
+ steps: List[MigrationStep] = field(default_factory=list)
43
+ estimated_duration_minutes: int = 0
44
+ risk_level: str = "low" # low, medium, high
45
+ prerequisites: List[str] = field(default_factory=list)
46
+ post_migration_tests: List[str] = field(default_factory=list)
47
+ backup_required: bool = True
48
+
49
+
50
+ @dataclass
51
+ class MigrationResult:
52
+ """Results of migration execution."""
53
+
54
+ success: bool
55
+ steps_completed: int
56
+ steps_failed: int
57
+ errors: List[str] = field(default_factory=list)
58
+ warnings: List[str] = field(default_factory=list)
59
+ backup_path: Optional[str] = None
60
+ rollback_available: bool = True
61
+
62
+
63
+ class MigrationAssistant:
64
+ """Automated assistant for LocalRuntime migration."""
65
+
66
+ def __init__(self, dry_run: bool = True, create_backups: bool = True):
67
+ """Initialize the migration assistant.
68
+
69
+ Args:
70
+ dry_run: If True, only plan migration without executing changes
71
+ create_backups: If True, create backups before making changes
72
+ """
73
+ self.dry_run = dry_run
74
+ self.create_backups = create_backups
75
+ self.compatibility_checker = CompatibilityChecker()
76
+
77
+ # Configuration mapping for automated conversion
78
+ self.parameter_mappings = {
79
+ "debug_mode": "debug",
80
+ "enable_parallel": "max_concurrency",
81
+ "thread_pool_size": "max_concurrency",
82
+ "parallel_execution": "max_concurrency",
83
+ "enable_security_audit": "enable_audit",
84
+ "connection_pooling": "enable_connection_sharing",
85
+ "persistent_resources": "persistent_mode",
86
+ "memory_limit": "resource_limits",
87
+ "timeout": "resource_limits",
88
+ "retry_count": "retry_policy_config",
89
+ }
90
+
91
+ # Value transformations
92
+ self.value_transformations = {
93
+ "enable_parallel": self._transform_parallel_to_concurrency,
94
+ "thread_pool_size": self._transform_thread_pool_size,
95
+ "memory_limit": self._transform_memory_limit,
96
+ "timeout": self._transform_timeout,
97
+ "retry_count": self._transform_retry_count,
98
+ }
99
+
100
+ # Method migrations
101
+ self.method_migrations = {
102
+ "execute_sync": self._migrate_execute_sync,
103
+ "execute_async": self._migrate_execute_async,
104
+ "get_results": self._migrate_get_results,
105
+ "set_context": self._migrate_set_context,
106
+ }
107
+
108
+ # Enterprise upgrade suggestions
109
+ self.enterprise_upgrades = {
110
+ "basic_monitoring": self._suggest_enterprise_monitoring,
111
+ "simple_auth": self._suggest_enterprise_auth,
112
+ "basic_caching": self._suggest_enterprise_caching,
113
+ "error_handling": self._suggest_enterprise_error_handling,
114
+ }
115
+
116
+ def create_migration_plan(
117
+ self,
118
+ root_path: Union[str, Path],
119
+ include_patterns: Optional[List[str]] = None,
120
+ exclude_patterns: Optional[List[str]] = None,
121
+ ) -> MigrationPlan:
122
+ """Create a comprehensive migration plan.
123
+
124
+ Args:
125
+ root_path: Root directory to analyze
126
+ include_patterns: File patterns to include
127
+ exclude_patterns: File patterns to exclude
128
+
129
+ Returns:
130
+ Complete migration plan with all steps
131
+ """
132
+ root_path = Path(root_path)
133
+
134
+ # First, analyze compatibility
135
+ analysis_result = self.compatibility_checker.analyze_codebase(
136
+ root_path, include_patterns, exclude_patterns
137
+ )
138
+
139
+ plan = MigrationPlan()
140
+
141
+ # Group issues by file for efficient processing
142
+ issues_by_file = {}
143
+ for issue in analysis_result.issues:
144
+ file_path = issue.file_path
145
+ if file_path not in issues_by_file:
146
+ issues_by_file[file_path] = []
147
+ issues_by_file[file_path].append(issue)
148
+
149
+ # Create migration steps for each file
150
+ step_id = 1
151
+ for file_path, issues in issues_by_file.items():
152
+ file_steps = self._create_file_migration_steps(file_path, issues, step_id)
153
+ plan.steps.extend(file_steps)
154
+ step_id += len(file_steps)
155
+
156
+ # Calculate plan metadata
157
+ self._calculate_plan_metadata(plan, analysis_result)
158
+
159
+ return plan
160
+
161
+ def execute_migration(self, plan: MigrationPlan) -> MigrationResult:
162
+ """Execute the migration plan.
163
+
164
+ Args:
165
+ plan: Migration plan to execute
166
+
167
+ Returns:
168
+ Migration execution results
169
+ """
170
+ result = MigrationResult(success=True, steps_completed=0, steps_failed=0)
171
+
172
+ # Create backup if requested
173
+ if self.create_backups and not self.dry_run:
174
+ try:
175
+ result.backup_path = self._create_backup(plan)
176
+ except Exception as e:
177
+ result.errors.append(f"Failed to create backup: {str(e)}")
178
+ result.success = False
179
+ return result
180
+
181
+ # Execute migration steps
182
+ for step in plan.steps:
183
+ try:
184
+ if self.dry_run:
185
+ # Just validate the step
186
+ self._validate_migration_step(step)
187
+ result.steps_completed += 1
188
+ else:
189
+ # Execute the actual migration
190
+ self._execute_migration_step(step)
191
+ result.steps_completed += 1
192
+
193
+ except Exception as e:
194
+ result.steps_failed += 1
195
+ result.errors.append(f"Step {step.step_id} failed: {str(e)}")
196
+
197
+ # Decide whether to continue or abort
198
+ if step.validation_required:
199
+ result.success = False
200
+ break
201
+ else:
202
+ result.warnings.append(
203
+ f"Non-critical step {step.step_id} failed, continuing"
204
+ )
205
+
206
+ # Final validation
207
+ if result.success and not self.dry_run:
208
+ validation_errors = self._validate_migration_result(plan)
209
+ if validation_errors:
210
+ result.errors.extend(validation_errors)
211
+ result.success = False
212
+
213
+ return result
214
+
215
+ def _create_file_migration_steps(
216
+ self, file_path: str, issues: List[CompatibilityIssue], start_id: int
217
+ ) -> List[MigrationStep]:
218
+ """Create migration steps for a single file."""
219
+ steps = []
220
+ current_id = start_id
221
+
222
+ # Read the original file
223
+ try:
224
+ with open(file_path, "r", encoding="utf-8") as f:
225
+ original_content = f.read()
226
+ except Exception as e:
227
+ # Create error step
228
+ error_step = MigrationStep(
229
+ step_id=f"error_{current_id}",
230
+ description=f"Failed to read file {file_path}: {str(e)}",
231
+ file_path=file_path,
232
+ original_code="",
233
+ migrated_code="",
234
+ automated=False,
235
+ validation_required=True,
236
+ )
237
+ return [error_step]
238
+
239
+ # Group issues by type for efficient processing
240
+ parameter_issues = [i for i in issues if "parameter" in i.description.lower()]
241
+ method_issues = [
242
+ i
243
+ for i in issues
244
+ if any(method in i.description for method in self.method_migrations.keys())
245
+ ]
246
+ config_issues = [i for i in issues if "configuration" in i.description.lower()]
247
+
248
+ # Create steps for different types of issues
249
+ if parameter_issues:
250
+ step = self._create_parameter_migration_step(
251
+ f"param_{current_id}", file_path, original_content, parameter_issues
252
+ )
253
+ steps.append(step)
254
+ current_id += 1
255
+
256
+ if method_issues:
257
+ step = self._create_method_migration_step(
258
+ f"method_{current_id}", file_path, original_content, method_issues
259
+ )
260
+ steps.append(step)
261
+ current_id += 1
262
+
263
+ if config_issues:
264
+ step = self._create_config_migration_step(
265
+ f"config_{current_id}", file_path, original_content, config_issues
266
+ )
267
+ steps.append(step)
268
+ current_id += 1
269
+
270
+ return steps
271
+
272
+ def _create_parameter_migration_step(
273
+ self,
274
+ step_id: str,
275
+ file_path: str,
276
+ content: str,
277
+ issues: List[CompatibilityIssue],
278
+ ) -> MigrationStep:
279
+ """Create migration step for parameter changes."""
280
+ migrated_content = content
281
+
282
+ for issue in issues:
283
+ # Find the parameter name from the issue description
284
+ for old_param, new_param in self.parameter_mappings.items():
285
+ if old_param in issue.code_snippet:
286
+ # Apply parameter transformation
287
+ if old_param in self.value_transformations:
288
+ migrated_content = self.value_transformations[old_param](
289
+ migrated_content, issue
290
+ )
291
+ else:
292
+ # Simple parameter rename
293
+ migrated_content = migrated_content.replace(
294
+ f"{old_param}=", f"{new_param}="
295
+ )
296
+
297
+ return MigrationStep(
298
+ step_id=step_id,
299
+ description=f"Migrate parameters in {file_path}",
300
+ file_path=file_path,
301
+ original_code=content,
302
+ migrated_code=migrated_content,
303
+ automated=True,
304
+ validation_required=False,
305
+ )
306
+
307
+ def _create_method_migration_step(
308
+ self,
309
+ step_id: str,
310
+ file_path: str,
311
+ content: str,
312
+ issues: List[CompatibilityIssue],
313
+ ) -> MigrationStep:
314
+ """Create migration step for method changes."""
315
+ migrated_content = content
316
+
317
+ for issue in issues:
318
+ for old_method, migration_func in self.method_migrations.items():
319
+ if old_method in issue.code_snippet:
320
+ migrated_content = migration_func(migrated_content, issue)
321
+
322
+ return MigrationStep(
323
+ step_id=step_id,
324
+ description=f"Migrate methods in {file_path}",
325
+ file_path=file_path,
326
+ original_code=content,
327
+ migrated_code=migrated_content,
328
+ automated=True,
329
+ validation_required=True, # Method changes need validation
330
+ )
331
+
332
+ def _create_config_migration_step(
333
+ self,
334
+ step_id: str,
335
+ file_path: str,
336
+ content: str,
337
+ issues: List[CompatibilityIssue],
338
+ ) -> MigrationStep:
339
+ """Create migration step for configuration changes."""
340
+ migrated_content = content
341
+
342
+ # Handle dictionary-style configuration conversion
343
+ if "dictionary-style configuration" in " ".join(i.description for i in issues):
344
+ migrated_content = self._convert_dict_config_to_parameters(migrated_content)
345
+
346
+ return MigrationStep(
347
+ step_id=step_id,
348
+ description=f"Migrate configuration in {file_path}",
349
+ file_path=file_path,
350
+ original_code=content,
351
+ migrated_code=migrated_content,
352
+ automated=True,
353
+ validation_required=True,
354
+ )
355
+
356
+ def _transform_parallel_to_concurrency(
357
+ self, content: str, issue: CompatibilityIssue
358
+ ) -> str:
359
+ """Transform enable_parallel to max_concurrency."""
360
+ import re
361
+
362
+ # Find the enable_parallel parameter and convert to max_concurrency
363
+ pattern = r"enable_parallel\s*=\s*(True|False)"
364
+
365
+ def replacement(match):
366
+ value = match.group(1)
367
+ if value == "True":
368
+ return "max_concurrency=10" # Default reasonable value
369
+ else:
370
+ return "max_concurrency=1" # Sequential execution
371
+
372
+ return re.sub(pattern, replacement, content)
373
+
374
+ def _transform_thread_pool_size(
375
+ self, content: str, issue: CompatibilityIssue
376
+ ) -> str:
377
+ """Transform thread_pool_size to max_concurrency."""
378
+ import re
379
+
380
+ pattern = r"thread_pool_size\s*=\s*(\d+)"
381
+ return re.sub(pattern, r"max_concurrency=\1", content)
382
+
383
+ def _transform_memory_limit(self, content: str, issue: CompatibilityIssue) -> str:
384
+ """Transform memory_limit to resource_limits."""
385
+ import re
386
+
387
+ pattern = r"memory_limit\s*=\s*(\d+)"
388
+
389
+ def replacement(match):
390
+ value = match.group(1)
391
+ return f'resource_limits={{"memory_mb": {value}}}'
392
+
393
+ return re.sub(pattern, replacement, content)
394
+
395
+ def _transform_timeout(self, content: str, issue: CompatibilityIssue) -> str:
396
+ """Transform timeout to resource_limits."""
397
+ import re
398
+
399
+ pattern = r"timeout\s*=\s*(\d+)"
400
+
401
+ def replacement(match):
402
+ value = match.group(1)
403
+ return f'resource_limits={{"timeout_seconds": {value}}}'
404
+
405
+ return re.sub(pattern, replacement, content)
406
+
407
+ def _transform_retry_count(self, content: str, issue: CompatibilityIssue) -> str:
408
+ """Transform retry_count to retry_policy_config."""
409
+ import re
410
+
411
+ pattern = r"retry_count\s*=\s*(\d+)"
412
+
413
+ def replacement(match):
414
+ value = match.group(1)
415
+ return (
416
+ f'retry_policy_config={{"max_retries": {value}, "backoff_factor": 1.0}}'
417
+ )
418
+
419
+ return re.sub(pattern, replacement, content)
420
+
421
+ def _migrate_execute_sync(self, content: str, issue: CompatibilityIssue) -> str:
422
+ """Migrate execute_sync to execute."""
423
+ import re
424
+
425
+ # Replace execute_sync calls with execute
426
+ pattern = r"\.execute_sync\s*\("
427
+ return re.sub(pattern, ".execute(", content)
428
+
429
+ def _migrate_execute_async(self, content: str, issue: CompatibilityIssue) -> str:
430
+ """Migrate execute_async to execute with async configuration."""
431
+ import re
432
+
433
+ # Replace execute_async with execute and add async configuration note
434
+ pattern = r"\.execute_async\s*\("
435
+ replacement = ".execute( # Note: Set enable_async=True in constructor\n"
436
+ return re.sub(pattern, replacement, content)
437
+
438
+ def _migrate_get_results(self, content: str, issue: CompatibilityIssue) -> str:
439
+ """Migrate get_results to direct result access."""
440
+ import re
441
+
442
+ # Replace get_results() calls with direct result access
443
+ pattern = r"\.get_results\s*\(\s*\)"
444
+ return re.sub(pattern, "[0] # Results now returned directly", content)
445
+
446
+ def _migrate_set_context(self, content: str, issue: CompatibilityIssue) -> str:
447
+ """Migrate set_context to constructor parameter."""
448
+ import re
449
+
450
+ # Add comment about moving context to constructor
451
+ pattern = r"\.set_context\s*\("
452
+ replacement = "# MIGRATION NOTE: Move context to LocalRuntime constructor\n# .set_context("
453
+ return re.sub(pattern, replacement, content)
454
+
455
+ def _convert_dict_config_to_parameters(self, content: str) -> str:
456
+ """Convert dictionary-style configuration to named parameters."""
457
+ import re
458
+
459
+ # This is a complex transformation that would need AST manipulation
460
+ # For now, add a comment indicating manual conversion needed
461
+ pattern = r"LocalRuntime\s*\(\s*\{[^}]+\}"
462
+
463
+ def replacement(match):
464
+ return f"# MIGRATION NOTE: Convert dictionary config to named parameters\n{match.group(0)}"
465
+
466
+ return re.sub(pattern, replacement, content, flags=re.DOTALL)
467
+
468
+ def _calculate_plan_metadata(self, plan: MigrationPlan, analysis_result) -> None:
469
+ """Calculate plan metadata like duration and risk level."""
470
+ # Estimate duration based on number of steps and complexity
471
+ base_minutes_per_step = 2
472
+ complex_step_bonus = 3
473
+
474
+ total_minutes = 0
475
+ critical_issues = 0
476
+
477
+ for step in plan.steps:
478
+ total_minutes += base_minutes_per_step
479
+ if step.validation_required:
480
+ total_minutes += complex_step_bonus
481
+ if not step.automated:
482
+ total_minutes += 10 # Manual steps take longer
483
+
484
+ # Add time for testing and validation
485
+ total_minutes += 15 # Base testing time
486
+
487
+ plan.estimated_duration_minutes = total_minutes
488
+
489
+ # Calculate risk level
490
+ critical_count = analysis_result.summary.get("critical_issues", 0)
491
+ breaking_changes = analysis_result.summary.get("breaking_changes", 0)
492
+
493
+ if critical_count > 5 or breaking_changes > 3:
494
+ plan.risk_level = "high"
495
+ elif critical_count > 2 or breaking_changes > 1:
496
+ plan.risk_level = "medium"
497
+ else:
498
+ plan.risk_level = "low"
499
+
500
+ # Set prerequisites
501
+ plan.prerequisites = [
502
+ "Create backup of codebase",
503
+ "Ensure all tests pass before migration",
504
+ "Review migration plan with team",
505
+ "Prepare rollback strategy",
506
+ ]
507
+
508
+ # Set post-migration tests
509
+ plan.post_migration_tests = [
510
+ "Run existing test suite",
511
+ "Validate LocalRuntime instantiation",
512
+ "Test workflow execution",
513
+ "Check performance benchmarks",
514
+ "Verify enterprise features if enabled",
515
+ ]
516
+
517
+ def _create_backup(self, plan: MigrationPlan) -> str:
518
+ """Create backup of files that will be modified."""
519
+ backup_dir = tempfile.mkdtemp(prefix="kailash_migration_backup_")
520
+
521
+ files_to_backup = set(step.file_path for step in plan.steps)
522
+
523
+ for file_path in files_to_backup:
524
+ src_path = Path(file_path)
525
+ if src_path.exists():
526
+ # Create relative path structure in backup
527
+ rel_path = src_path.relative_to(src_path.anchor)
528
+ backup_path = Path(backup_dir) / rel_path
529
+ backup_path.parent.mkdir(parents=True, exist_ok=True)
530
+ shutil.copy2(src_path, backup_path)
531
+
532
+ return backup_dir
533
+
534
+ def _validate_migration_step(self, step: MigrationStep) -> None:
535
+ """Validate a migration step without executing it."""
536
+ # Check if the original file exists
537
+ if not Path(step.file_path).exists():
538
+ raise FileNotFoundError(f"Source file not found: {step.file_path}")
539
+
540
+ # Validate that migrated code is syntactically valid Python
541
+ try:
542
+ ast.parse(step.migrated_code)
543
+ except SyntaxError as e:
544
+ raise ValueError(f"Migrated code has syntax errors: {str(e)}")
545
+
546
+ def _execute_migration_step(self, step: MigrationStep) -> None:
547
+ """Execute a single migration step."""
548
+ # Validate first
549
+ self._validate_migration_step(step)
550
+
551
+ # Write the migrated code
552
+ with open(step.file_path, "w", encoding="utf-8") as f:
553
+ f.write(step.migrated_code)
554
+
555
+ def _validate_migration_result(self, plan: MigrationPlan) -> List[str]:
556
+ """Validate the overall migration result."""
557
+ errors = []
558
+
559
+ # Check that all modified files are syntactically valid
560
+ files_modified = set(step.file_path for step in plan.steps)
561
+
562
+ for file_path in files_modified:
563
+ try:
564
+ with open(file_path, "r", encoding="utf-8") as f:
565
+ content = f.read()
566
+ ast.parse(content)
567
+ except Exception as e:
568
+ errors.append(f"File {file_path} is invalid after migration: {str(e)}")
569
+
570
+ return errors
571
+
572
+ def rollback_migration(self, result: MigrationResult) -> bool:
573
+ """Rollback a migration using the backup."""
574
+ if not result.backup_path or not result.rollback_available:
575
+ return False
576
+
577
+ try:
578
+ backup_path = Path(result.backup_path)
579
+ if not backup_path.exists():
580
+ return False
581
+
582
+ # Restore all files from backup
583
+ for backup_file in backup_path.rglob("*"):
584
+ if backup_file.is_file():
585
+ # Calculate original path
586
+ rel_path = backup_file.relative_to(backup_path)
587
+ original_path = Path("/") / rel_path
588
+
589
+ # Restore the file
590
+ original_path.parent.mkdir(parents=True, exist_ok=True)
591
+ shutil.copy2(backup_file, original_path)
592
+
593
+ return True
594
+
595
+ except Exception:
596
+ return False
597
+
598
+ def generate_migration_report(
599
+ self, plan: MigrationPlan, result: Optional[MigrationResult] = None
600
+ ) -> str:
601
+ """Generate a comprehensive migration report.
602
+
603
+ Args:
604
+ plan: The migration plan
605
+ result: Optional execution result
606
+
607
+ Returns:
608
+ Formatted migration report
609
+ """
610
+ report = []
611
+ report.append("=" * 60)
612
+ report.append("LocalRuntime Migration Report")
613
+ report.append("=" * 60)
614
+ report.append("")
615
+
616
+ # Plan summary
617
+ report.append("MIGRATION PLAN SUMMARY")
618
+ report.append("-" * 25)
619
+ report.append(f"Total Steps: {len(plan.steps)}")
620
+ report.append(f"Estimated Duration: {plan.estimated_duration_minutes} minutes")
621
+ report.append(f"Risk Level: {plan.risk_level.upper()}")
622
+ report.append(f"Backup Required: {'Yes' if plan.backup_required else 'No'}")
623
+ report.append("")
624
+
625
+ # Execution results (if available)
626
+ if result:
627
+ report.append("EXECUTION RESULTS")
628
+ report.append("-" * 20)
629
+ report.append(f"Success: {'Yes' if result.success else 'No'}")
630
+ report.append(f"Steps Completed: {result.steps_completed}")
631
+ report.append(f"Steps Failed: {result.steps_failed}")
632
+
633
+ if result.backup_path:
634
+ report.append(f"Backup Location: {result.backup_path}")
635
+
636
+ if result.errors:
637
+ report.append("")
638
+ report.append("ERRORS:")
639
+ for error in result.errors:
640
+ report.append(f" • {error}")
641
+
642
+ if result.warnings:
643
+ report.append("")
644
+ report.append("WARNINGS:")
645
+ for warning in result.warnings:
646
+ report.append(f" • {warning}")
647
+
648
+ report.append("")
649
+
650
+ # Prerequisites
651
+ if plan.prerequisites:
652
+ report.append("PREREQUISITES")
653
+ report.append("-" * 15)
654
+ for prereq in plan.prerequisites:
655
+ report.append(f"• {prereq}")
656
+ report.append("")
657
+
658
+ # Detailed steps
659
+ report.append("MIGRATION STEPS")
660
+ report.append("-" * 18)
661
+ for i, step in enumerate(plan.steps, 1):
662
+ report.append(f"{i}. {step.description}")
663
+ report.append(f" File: {step.file_path}")
664
+ report.append(f" Automated: {'Yes' if step.automated else 'No'}")
665
+ report.append(
666
+ f" Validation Required: {'Yes' if step.validation_required else 'No'}"
667
+ )
668
+ report.append("")
669
+
670
+ # Post-migration tests
671
+ if plan.post_migration_tests:
672
+ report.append("POST-MIGRATION VALIDATION")
673
+ report.append("-" * 27)
674
+ for test in plan.post_migration_tests:
675
+ report.append(f"• {test}")
676
+ report.append("")
677
+
678
+ return "\n".join(report)
679
+
680
+ # Enterprise upgrade suggestion methods
681
+ def _suggest_enterprise_monitoring(self, content: str) -> str:
682
+ """Suggest enterprise monitoring upgrades."""
683
+ suggestions = [
684
+ "Consider enabling enterprise monitoring with: enable_monitoring=True",
685
+ "Add performance benchmarking with PerformanceBenchmarkNode",
686
+ "Implement real-time metrics with MetricsCollectorNode",
687
+ ]
688
+ return "\n".join(f"# ENTERPRISE UPGRADE: {s}" for s in suggestions)
689
+
690
+ def _suggest_enterprise_auth(self, content: str) -> str:
691
+ """Suggest enterprise authentication upgrades."""
692
+ suggestions = [
693
+ "Upgrade to enterprise authentication with UserContext",
694
+ "Enable access control with enable_security=True",
695
+ "Consider RBAC with role-based permissions",
696
+ ]
697
+ return "\n".join(f"# ENTERPRISE UPGRADE: {s}" for s in suggestions)
698
+
699
+ def _suggest_enterprise_caching(self, content: str) -> str:
700
+ """Suggest enterprise caching upgrades."""
701
+ suggestions = [
702
+ "Replace basic caching with enterprise CacheNode",
703
+ "Consider Redis integration for distributed caching",
704
+ "Implement cache invalidation strategies",
705
+ ]
706
+ return "\n".join(f"# ENTERPRISE UPGRADE: {s}" for s in suggestions)
707
+
708
+ def _suggest_enterprise_error_handling(self, content: str) -> str:
709
+ """Suggest enterprise error handling upgrades."""
710
+ suggestions = [
711
+ "Upgrade error handling with EnhancedErrorFormatter",
712
+ "Implement circuit breaker patterns",
713
+ "Add comprehensive audit logging with enable_audit=True",
714
+ ]
715
+ return "\n".join(f"# ENTERPRISE UPGRADE: {s}" for s in suggestions)