crackerjack 0.32.0__py3-none-any.whl → 0.33.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crackerjack might be problematic. Click here for more details.

Files changed (34) hide show
  1. crackerjack/core/enhanced_container.py +67 -0
  2. crackerjack/core/phase_coordinator.py +183 -44
  3. crackerjack/core/workflow_orchestrator.py +459 -138
  4. crackerjack/managers/publish_manager.py +22 -5
  5. crackerjack/managers/test_command_builder.py +4 -2
  6. crackerjack/managers/test_manager.py +15 -4
  7. crackerjack/mcp/server_core.py +162 -34
  8. crackerjack/mcp/tools/core_tools.py +1 -1
  9. crackerjack/mcp/tools/execution_tools.py +8 -3
  10. crackerjack/mixins/__init__.py +5 -0
  11. crackerjack/mixins/error_handling.py +214 -0
  12. crackerjack/models/config.py +9 -0
  13. crackerjack/models/protocols.py +69 -0
  14. crackerjack/models/task.py +3 -0
  15. crackerjack/security/__init__.py +1 -1
  16. crackerjack/security/audit.py +92 -78
  17. crackerjack/services/config.py +3 -2
  18. crackerjack/services/config_merge.py +11 -5
  19. crackerjack/services/coverage_ratchet.py +22 -0
  20. crackerjack/services/git.py +37 -24
  21. crackerjack/services/initialization.py +25 -9
  22. crackerjack/services/memory_optimizer.py +477 -0
  23. crackerjack/services/parallel_executor.py +474 -0
  24. crackerjack/services/performance_benchmarks.py +292 -577
  25. crackerjack/services/performance_cache.py +443 -0
  26. crackerjack/services/performance_monitor.py +633 -0
  27. crackerjack/services/security.py +63 -0
  28. crackerjack/services/security_logger.py +9 -1
  29. crackerjack/services/terminal_utils.py +0 -0
  30. {crackerjack-0.32.0.dist-info → crackerjack-0.33.0.dist-info}/METADATA +2 -2
  31. {crackerjack-0.32.0.dist-info → crackerjack-0.33.0.dist-info}/RECORD +34 -27
  32. {crackerjack-0.32.0.dist-info → crackerjack-0.33.0.dist-info}/WHEEL +0 -0
  33. {crackerjack-0.32.0.dist-info → crackerjack-0.33.0.dist-info}/entry_points.txt +0 -0
  34. {crackerjack-0.32.0.dist-info → crackerjack-0.33.0.dist-info}/licenses/LICENSE +0 -0
@@ -11,11 +11,16 @@ from rich.console import Console
11
11
 
12
12
  from crackerjack.models.protocols import (
13
13
  ConfigMergeServiceProtocol,
14
+ ConfigurationServiceProtocol,
15
+ CoverageRatchetProtocol,
14
16
  FileSystemInterface,
15
17
  GitInterface,
16
18
  HookManager,
19
+ InitializationServiceProtocol,
17
20
  PublishManager,
21
+ SecurityServiceProtocol,
18
22
  TestManagerProtocol,
23
+ UnifiedConfigurationServiceProtocol,
19
24
  )
20
25
  from crackerjack.services.logging import get_logger
21
26
 
@@ -470,17 +475,79 @@ class ServiceCollectionBuilder:
470
475
 
471
476
  return self
472
477
 
478
+ def add_service_protocols(self) -> "ServiceCollectionBuilder":
479
+ """Add registrations for service protocols that don't have explicit builders."""
480
+ console = self.console or Console(force_terminal=True)
481
+ pkg_path = self.pkg_path or Path.cwd()
482
+
483
+ # Register CoverageRatchetProtocol
484
+ def create_coverage_ratchet() -> CoverageRatchetProtocol:
485
+ from crackerjack.services.coverage_ratchet import CoverageRatchetService
486
+
487
+ return CoverageRatchetService(pkg_path, console)
488
+
489
+ self.container.register_transient(
490
+ CoverageRatchetProtocol,
491
+ factory=create_coverage_ratchet,
492
+ )
493
+
494
+ # Register ConfigurationServiceProtocol
495
+ def create_configuration_service() -> ConfigurationServiceProtocol:
496
+ from crackerjack.services.config import ConfigurationService
497
+
498
+ return ConfigurationService(console=console, pkg_path=pkg_path)
499
+
500
+ self.container.register_transient(
501
+ ConfigurationServiceProtocol,
502
+ factory=create_configuration_service,
503
+ )
504
+
505
+ # Register SecurityServiceProtocol
506
+ def create_security_service() -> SecurityServiceProtocol:
507
+ from crackerjack.services.security import SecurityService
508
+
509
+ return SecurityService()
510
+
511
+ self.container.register_transient(
512
+ SecurityServiceProtocol,
513
+ factory=create_security_service,
514
+ )
515
+
516
+ # Register InitializationServiceProtocol
517
+ def create_initialization_service() -> InitializationServiceProtocol:
518
+ from crackerjack.services.filesystem import FileSystemService
519
+ from crackerjack.services.git import GitService
520
+ from crackerjack.services.initialization import InitializationService
521
+
522
+ filesystem = FileSystemService()
523
+ git_service = GitService(console, pkg_path)
524
+ return InitializationService(console, filesystem, git_service, pkg_path)
525
+
526
+ self.container.register_transient(
527
+ InitializationServiceProtocol,
528
+ factory=create_initialization_service,
529
+ )
530
+
531
+ return self
532
+
473
533
  def add_configuration_services(self) -> "ServiceCollectionBuilder":
474
534
  console = self.console or Console(force_terminal=True)
475
535
  pkg_path = self.pkg_path or Path.cwd()
476
536
 
477
537
  from crackerjack.services.unified_config import UnifiedConfigurationService
478
538
 
539
+ # Register concrete class for backwards compatibility
479
540
  self.container.register_singleton(
480
541
  UnifiedConfigurationService,
481
542
  factory=lambda: UnifiedConfigurationService(console, pkg_path),
482
543
  )
483
544
 
545
+ # Register protocol interface
546
+ self.container.register_singleton(
547
+ UnifiedConfigurationServiceProtocol,
548
+ factory=lambda: self.container.get(UnifiedConfigurationService),
549
+ )
550
+
484
551
  # Register ConfigMergeService for smart configuration merging
485
552
  from crackerjack.services.config_merge import ConfigMergeService
486
553
 
@@ -7,6 +7,7 @@ from rich.console import Console
7
7
 
8
8
  from crackerjack.code_cleaner import CodeCleaner, PackageCleaningResult
9
9
  from crackerjack.core.autofix_coordinator import AutofixCoordinator
10
+ from crackerjack.mixins import ErrorHandlingMixin
10
11
  from crackerjack.models.protocols import (
11
12
  ConfigMergeServiceProtocol,
12
13
  FileSystemInterface,
@@ -16,12 +17,20 @@ from crackerjack.models.protocols import (
16
17
  PublishManager,
17
18
  TestManagerProtocol,
18
19
  )
19
- from crackerjack.services.config import ConfigurationService
20
+ from crackerjack.services.memory_optimizer import (
21
+ create_lazy_service,
22
+ get_memory_optimizer,
23
+ )
24
+ from crackerjack.services.parallel_executor import (
25
+ get_async_executor,
26
+ get_parallel_executor,
27
+ )
28
+ from crackerjack.services.performance_cache import get_filesystem_cache, get_git_cache
20
29
 
21
30
  from .session_coordinator import SessionCoordinator
22
31
 
23
32
 
24
- class PhaseCoordinator:
33
+ class PhaseCoordinator(ErrorHandlingMixin):
25
34
  def __init__(
26
35
  self,
27
36
  console: Console,
@@ -55,13 +64,31 @@ class PhaseCoordinator:
55
64
  security_logger=None,
56
65
  backup_service=None,
57
66
  )
67
+ # Initialize configuration service - could be injected via DI
68
+ from crackerjack.services.config import ConfigurationService
69
+
58
70
  self.config_service = ConfigurationService(console=console, pkg_path=pkg_path)
59
- self.autofix_coordinator = AutofixCoordinator(
60
- console=console, pkg_path=pkg_path
61
- )
71
+ # Lazy-loaded autofix coordinator (now using lazy service)
72
+ # self.autofix_coordinator will be accessed via property
62
73
 
63
74
  self.logger = logging.getLogger("crackerjack.phases")
64
75
 
76
+ # Performance optimization services
77
+ self._memory_optimizer = get_memory_optimizer()
78
+ self._parallel_executor = get_parallel_executor()
79
+ self._async_executor = get_async_executor()
80
+ self._git_cache = get_git_cache()
81
+ self._filesystem_cache = get_filesystem_cache()
82
+
83
+ # Lazy-loaded heavy services
84
+ self._lazy_autofix = create_lazy_service(
85
+ lambda: AutofixCoordinator(console=console, pkg_path=pkg_path),
86
+ "autofix_coordinator",
87
+ )
88
+
89
+ # Initialize ErrorHandlingMixin
90
+ super().__init__()
91
+
65
92
  def run_cleaning_phase(self, options: OptionsProtocol) -> bool:
66
93
  if not options.clean:
67
94
  return True
@@ -71,16 +98,16 @@ class PhaseCoordinator:
71
98
  self._display_cleaning_header()
72
99
  return self._execute_cleaning_process()
73
100
  except Exception as e:
74
- self.console.print(f"[red]❌[/ red] Cleaning failed: {e}")
101
+ self.handle_subprocess_error(e, [], "Code cleaning", critical=False)
75
102
  self.session.fail_task("cleaning", str(e))
76
103
  return False
77
104
 
78
105
  def _display_cleaning_header(self) -> None:
79
- self.console.print("\n" + "-" * 80)
106
+ self.console.print("\n" + "-" * 40)
80
107
  self.console.print(
81
108
  "[bold bright_magenta]🛠️ SETUP[/bold bright_magenta] [bold bright_white]Initializing project structure[/bold bright_white]",
82
109
  )
83
- self.console.print("-" * 80 + "\n")
110
+ self.console.print("-" * 40 + "\n")
84
111
  self.console.print("[yellow]🧹[/yellow] Starting code cleaning...")
85
112
 
86
113
  def _execute_cleaning_process(self) -> bool:
@@ -156,6 +183,7 @@ class PhaseCoordinator:
156
183
  self._complete_configuration_task(success)
157
184
  return success
158
185
  except Exception as e:
186
+ self.handle_subprocess_error(e, [], "Configuration phase", critical=False)
159
187
  self.session.fail_task("configuration", str(e))
160
188
  return False
161
189
 
@@ -370,32 +398,28 @@ class PhaseCoordinator:
370
398
  if options.skip_hooks:
371
399
  return True
372
400
 
373
- return self._execute_hooks_with_retry(
374
- "fast",
375
- self.hook_manager.run_fast_hooks,
376
- options,
377
- )
401
+ # Use standard execution for now - parallel support can be added later
402
+ hook_results = self.hook_manager.run_fast_hooks()
403
+ return all(r.status == "passed" for r in hook_results)
378
404
 
379
405
  def run_comprehensive_hooks_only(self, options: OptionsProtocol) -> bool:
380
406
  if options.skip_hooks:
381
407
  return True
382
408
 
383
- return self._execute_hooks_with_retry(
384
- "comprehensive",
385
- self.hook_manager.run_comprehensive_hooks,
386
- options,
387
- )
409
+ # Use standard execution for now - parallel support can be added later
410
+ hook_results = self.hook_manager.run_comprehensive_hooks()
411
+ return all(r.status == "passed" for r in hook_results)
388
412
 
389
413
  def run_testing_phase(self, options: OptionsProtocol) -> bool:
390
414
  if not options.test:
391
415
  return True
392
416
  self.session.track_task("testing", "Test execution")
393
417
  try:
394
- self.console.print("\n" + "-" * 80)
418
+ self.console.print("\n" + "-" * 40)
395
419
  self.console.print(
396
420
  "[bold bright_blue]🧪 TESTS[/ bold bright_blue] [bold bright_white]Running test suite[/ bold bright_white]",
397
421
  )
398
- self.console.print("-" * 80 + "\n")
422
+ self.console.print("-" * 40 + "\n")
399
423
  if not self.test_manager.validate_test_environment():
400
424
  self.session.fail_task("testing", "Test environment validation failed")
401
425
  return False
@@ -615,33 +639,60 @@ class PhaseCoordinator:
615
639
 
616
640
  for attempt in range(max_retries):
617
641
  try:
618
- results = hook_runner()
619
- summary = self.hook_manager.get_hook_summary(results)
620
-
621
- if self._has_hook_failures(summary):
622
- if self._should_retry_hooks(
623
- hook_type,
624
- attempt,
625
- max_retries,
626
- results,
627
- ):
628
- continue
629
-
630
- return self._handle_hook_failures(
631
- hook_type,
632
- options,
633
- summary,
634
- results,
635
- attempt,
636
- max_retries,
637
- )
638
- return self._handle_hook_success(hook_type, summary)
642
+ execution_result = self._execute_single_hook_attempt(hook_runner)
643
+ if execution_result is None:
644
+ return False
645
+
646
+ results, summary = execution_result
647
+ should_continue = self._process_hook_results(
648
+ hook_type, options, summary, results, attempt, max_retries
649
+ )
650
+
651
+ if should_continue == "continue":
652
+ continue
653
+ elif should_continue == "success":
654
+ return True
655
+ else:
656
+ return False
639
657
 
640
658
  except Exception as e:
641
659
  return self._handle_hook_exception(hook_type, e)
642
660
 
643
661
  return False
644
662
 
663
+ def _execute_single_hook_attempt(
664
+ self, hook_runner: t.Callable[[], list[t.Any]]
665
+ ) -> tuple[list[t.Any], dict[str, t.Any]] | None:
666
+ """Execute a single hook attempt and return results and summary."""
667
+ try:
668
+ results = hook_runner()
669
+ summary = self.hook_manager.get_hook_summary(results)
670
+ return results, summary
671
+ except Exception:
672
+ return None
673
+
674
+ def _process_hook_results(
675
+ self,
676
+ hook_type: str,
677
+ options: OptionsProtocol,
678
+ summary: dict[str, t.Any],
679
+ results: list[t.Any],
680
+ attempt: int,
681
+ max_retries: int,
682
+ ) -> str:
683
+ """Process hook results and return action: 'continue', 'success', or 'failure'."""
684
+ if not self._has_hook_failures(summary):
685
+ self._handle_hook_success(hook_type, summary)
686
+ return "success"
687
+
688
+ if self._should_retry_hooks(hook_type, attempt, max_retries, results):
689
+ return "continue"
690
+
691
+ self._handle_hook_failures(
692
+ hook_type, options, summary, results, attempt, max_retries
693
+ )
694
+ return "failure"
695
+
645
696
  def _initialize_hook_execution(self, hook_type: str) -> None:
646
697
  self.logger.info(f"Starting {hook_type} hooks execution")
647
698
  self.session.track_task(
@@ -671,11 +722,12 @@ class PhaseCoordinator:
671
722
  return False
672
723
 
673
724
  def _attempt_autofix_for_fast_hooks(self, results: list[t.Any]) -> bool:
674
- """Attempt to autofix fast hook failures."""
725
+ """Attempt to autofix fast hook failures using lazy-loaded coordinator."""
675
726
  try:
676
727
  self.logger.info("Attempting autofix for fast hook failures")
677
- # Apply autofixes for fast hooks
678
- return self.autofix_coordinator.apply_fast_stage_fixes()
728
+ # Apply autofixes for fast hooks using lazy-loaded service
729
+ autofix_coordinator = self._lazy_autofix.get()
730
+ return autofix_coordinator.apply_fast_stage_fixes()
679
731
  except Exception as e:
680
732
  self.logger.warning(f"Autofix attempt failed: {e}")
681
733
  return False
@@ -798,3 +850,90 @@ class PhaseCoordinator:
798
850
  self.console.print(f"[red]❌[/ red] {hook_type.title()} hooks error: {e}")
799
851
  self.session.fail_task(f"{hook_type}_hooks", str(e))
800
852
  return False
853
+
854
+ # Performance-optimized hook execution methods
855
+ async def _execute_hooks_with_parallel_support(
856
+ self,
857
+ hook_type: str,
858
+ hook_runner: t.Callable[[], list[t.Any]],
859
+ options: OptionsProtocol,
860
+ ) -> bool:
861
+ """Execute hooks with parallel optimization where safe."""
862
+ self._initialize_hook_execution(hook_type)
863
+
864
+ try:
865
+ # Execute hooks and handle results
866
+ return await self._process_parallel_hook_execution(
867
+ hook_type, hook_runner, options
868
+ )
869
+
870
+ except Exception as e:
871
+ return self._handle_hook_exception(hook_type, e)
872
+
873
+ async def _process_parallel_hook_execution(
874
+ self,
875
+ hook_type: str,
876
+ hook_runner: t.Callable[[], list[t.Any]],
877
+ options: OptionsProtocol,
878
+ ) -> bool:
879
+ """Process hook execution with autofix retry logic."""
880
+ # For now, maintain sequential execution for safety
881
+ # Future enhancement: implement parallel execution for independent hooks
882
+ results = hook_runner()
883
+ summary = self.hook_manager.get_hook_summary(results)
884
+
885
+ if not self._has_hook_failures(summary):
886
+ return self._handle_hook_success(hook_type, summary)
887
+
888
+ # Handle failures with potential autofix retry
889
+ return self._handle_parallel_hook_failures(
890
+ hook_type, hook_runner, options, results, summary
891
+ )
892
+
893
+ def _handle_parallel_hook_failures(
894
+ self,
895
+ hook_type: str,
896
+ hook_runner: t.Callable[[], list[t.Any]],
897
+ options: OptionsProtocol,
898
+ results: list[t.Any],
899
+ summary: dict[str, t.Any],
900
+ ) -> bool:
901
+ """Handle hook failures with autofix retry for fast hooks."""
902
+ if hook_type != "fast":
903
+ return self._handle_hook_failures(
904
+ hook_type, options, summary, results, 0, 1
905
+ )
906
+
907
+ # Try autofix for fast hooks
908
+ if not self._attempt_autofix_for_fast_hooks(results):
909
+ return self._handle_hook_failures(
910
+ hook_type, options, summary, results, 0, 1
911
+ )
912
+
913
+ # Retry after successful autofix
914
+ return self._retry_hooks_after_autofix(hook_type, hook_runner, options)
915
+
916
+ def _retry_hooks_after_autofix(
917
+ self,
918
+ hook_type: str,
919
+ hook_runner: t.Callable[[], list[t.Any]],
920
+ options: OptionsProtocol,
921
+ ) -> bool:
922
+ """Retry hooks after autofix was applied."""
923
+ self.console.print(
924
+ "[yellow]🔧[/ yellow] Applied autofixes for fast hooks, retrying..."
925
+ )
926
+
927
+ # Retry after autofix
928
+ results = hook_runner()
929
+ summary = self.hook_manager.get_hook_summary(results)
930
+
931
+ if not self._has_hook_failures(summary):
932
+ return self._handle_hook_success(hook_type, summary)
933
+
934
+ return self._handle_hook_failures(hook_type, options, summary, results, 0, 1)
935
+
936
+ @property
937
+ def autofix_coordinator(self):
938
+ """Lazy property for autofix coordinator."""
939
+ return self._lazy_autofix.get()