crackerjack 0.31.18__py3-none-any.whl → 0.33.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crackerjack might be problematic. Click here for more details.
- crackerjack/CLAUDE.md +71 -452
- crackerjack/__main__.py +1 -1
- crackerjack/agents/refactoring_agent.py +67 -46
- crackerjack/cli/handlers.py +7 -7
- crackerjack/config/hooks.py +36 -6
- crackerjack/core/async_workflow_orchestrator.py +2 -2
- crackerjack/core/enhanced_container.py +67 -0
- crackerjack/core/phase_coordinator.py +211 -44
- crackerjack/core/workflow_orchestrator.py +723 -72
- crackerjack/dynamic_config.py +1 -25
- crackerjack/managers/publish_manager.py +22 -5
- crackerjack/managers/test_command_builder.py +19 -13
- crackerjack/managers/test_manager.py +15 -4
- crackerjack/mcp/server_core.py +162 -34
- crackerjack/mcp/tools/core_tools.py +1 -1
- crackerjack/mcp/tools/execution_tools.py +16 -3
- crackerjack/mcp/tools/workflow_executor.py +130 -40
- crackerjack/mixins/__init__.py +5 -0
- crackerjack/mixins/error_handling.py +214 -0
- crackerjack/models/config.py +9 -0
- crackerjack/models/protocols.py +114 -0
- crackerjack/models/task.py +3 -0
- crackerjack/security/__init__.py +1 -0
- crackerjack/security/audit.py +226 -0
- crackerjack/services/config.py +3 -2
- crackerjack/services/config_merge.py +11 -5
- crackerjack/services/coverage_ratchet.py +22 -0
- crackerjack/services/git.py +121 -22
- crackerjack/services/initialization.py +25 -9
- crackerjack/services/memory_optimizer.py +477 -0
- crackerjack/services/parallel_executor.py +474 -0
- crackerjack/services/performance_benchmarks.py +292 -577
- crackerjack/services/performance_cache.py +443 -0
- crackerjack/services/performance_monitor.py +633 -0
- crackerjack/services/security.py +63 -0
- crackerjack/services/security_logger.py +9 -1
- crackerjack/services/terminal_utils.py +0 -0
- crackerjack/tools/validate_regex_patterns.py +14 -0
- {crackerjack-0.31.18.dist-info → crackerjack-0.33.0.dist-info}/METADATA +2 -2
- {crackerjack-0.31.18.dist-info → crackerjack-0.33.0.dist-info}/RECORD +43 -34
- {crackerjack-0.31.18.dist-info → crackerjack-0.33.0.dist-info}/WHEEL +0 -0
- {crackerjack-0.31.18.dist-info → crackerjack-0.33.0.dist-info}/entry_points.txt +0 -0
- {crackerjack-0.31.18.dist-info → crackerjack-0.33.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -7,6 +7,7 @@ from rich.console import Console
|
|
|
7
7
|
|
|
8
8
|
from crackerjack.code_cleaner import CodeCleaner, PackageCleaningResult
|
|
9
9
|
from crackerjack.core.autofix_coordinator import AutofixCoordinator
|
|
10
|
+
from crackerjack.mixins import ErrorHandlingMixin
|
|
10
11
|
from crackerjack.models.protocols import (
|
|
11
12
|
ConfigMergeServiceProtocol,
|
|
12
13
|
FileSystemInterface,
|
|
@@ -16,12 +17,20 @@ from crackerjack.models.protocols import (
|
|
|
16
17
|
PublishManager,
|
|
17
18
|
TestManagerProtocol,
|
|
18
19
|
)
|
|
19
|
-
from crackerjack.services.
|
|
20
|
+
from crackerjack.services.memory_optimizer import (
|
|
21
|
+
create_lazy_service,
|
|
22
|
+
get_memory_optimizer,
|
|
23
|
+
)
|
|
24
|
+
from crackerjack.services.parallel_executor import (
|
|
25
|
+
get_async_executor,
|
|
26
|
+
get_parallel_executor,
|
|
27
|
+
)
|
|
28
|
+
from crackerjack.services.performance_cache import get_filesystem_cache, get_git_cache
|
|
20
29
|
|
|
21
30
|
from .session_coordinator import SessionCoordinator
|
|
22
31
|
|
|
23
32
|
|
|
24
|
-
class PhaseCoordinator:
|
|
33
|
+
class PhaseCoordinator(ErrorHandlingMixin):
|
|
25
34
|
def __init__(
|
|
26
35
|
self,
|
|
27
36
|
console: Console,
|
|
@@ -55,13 +64,31 @@ class PhaseCoordinator:
|
|
|
55
64
|
security_logger=None,
|
|
56
65
|
backup_service=None,
|
|
57
66
|
)
|
|
67
|
+
# Initialize configuration service - could be injected via DI
|
|
68
|
+
from crackerjack.services.config import ConfigurationService
|
|
69
|
+
|
|
58
70
|
self.config_service = ConfigurationService(console=console, pkg_path=pkg_path)
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
)
|
|
71
|
+
# Lazy-loaded autofix coordinator (now using lazy service)
|
|
72
|
+
# self.autofix_coordinator will be accessed via property
|
|
62
73
|
|
|
63
74
|
self.logger = logging.getLogger("crackerjack.phases")
|
|
64
75
|
|
|
76
|
+
# Performance optimization services
|
|
77
|
+
self._memory_optimizer = get_memory_optimizer()
|
|
78
|
+
self._parallel_executor = get_parallel_executor()
|
|
79
|
+
self._async_executor = get_async_executor()
|
|
80
|
+
self._git_cache = get_git_cache()
|
|
81
|
+
self._filesystem_cache = get_filesystem_cache()
|
|
82
|
+
|
|
83
|
+
# Lazy-loaded heavy services
|
|
84
|
+
self._lazy_autofix = create_lazy_service(
|
|
85
|
+
lambda: AutofixCoordinator(console=console, pkg_path=pkg_path),
|
|
86
|
+
"autofix_coordinator",
|
|
87
|
+
)
|
|
88
|
+
|
|
89
|
+
# Initialize ErrorHandlingMixin
|
|
90
|
+
super().__init__()
|
|
91
|
+
|
|
65
92
|
def run_cleaning_phase(self, options: OptionsProtocol) -> bool:
|
|
66
93
|
if not options.clean:
|
|
67
94
|
return True
|
|
@@ -71,16 +98,16 @@ class PhaseCoordinator:
|
|
|
71
98
|
self._display_cleaning_header()
|
|
72
99
|
return self._execute_cleaning_process()
|
|
73
100
|
except Exception as e:
|
|
74
|
-
self.
|
|
101
|
+
self.handle_subprocess_error(e, [], "Code cleaning", critical=False)
|
|
75
102
|
self.session.fail_task("cleaning", str(e))
|
|
76
103
|
return False
|
|
77
104
|
|
|
78
105
|
def _display_cleaning_header(self) -> None:
|
|
79
|
-
self.console.print("\n" + "-" *
|
|
106
|
+
self.console.print("\n" + "-" * 40)
|
|
80
107
|
self.console.print(
|
|
81
108
|
"[bold bright_magenta]🛠️ SETUP[/bold bright_magenta] [bold bright_white]Initializing project structure[/bold bright_white]",
|
|
82
109
|
)
|
|
83
|
-
self.console.print("-" *
|
|
110
|
+
self.console.print("-" * 40 + "\n")
|
|
84
111
|
self.console.print("[yellow]🧹[/yellow] Starting code cleaning...")
|
|
85
112
|
|
|
86
113
|
def _execute_cleaning_process(self) -> bool:
|
|
@@ -156,6 +183,7 @@ class PhaseCoordinator:
|
|
|
156
183
|
self._complete_configuration_task(success)
|
|
157
184
|
return success
|
|
158
185
|
except Exception as e:
|
|
186
|
+
self.handle_subprocess_error(e, [], "Configuration phase", critical=False)
|
|
159
187
|
self.session.fail_task("configuration", str(e))
|
|
160
188
|
return False
|
|
161
189
|
|
|
@@ -370,32 +398,28 @@ class PhaseCoordinator:
|
|
|
370
398
|
if options.skip_hooks:
|
|
371
399
|
return True
|
|
372
400
|
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
options,
|
|
377
|
-
)
|
|
401
|
+
# Use standard execution for now - parallel support can be added later
|
|
402
|
+
hook_results = self.hook_manager.run_fast_hooks()
|
|
403
|
+
return all(r.status == "passed" for r in hook_results)
|
|
378
404
|
|
|
379
405
|
def run_comprehensive_hooks_only(self, options: OptionsProtocol) -> bool:
|
|
380
406
|
if options.skip_hooks:
|
|
381
407
|
return True
|
|
382
408
|
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
options,
|
|
387
|
-
)
|
|
409
|
+
# Use standard execution for now - parallel support can be added later
|
|
410
|
+
hook_results = self.hook_manager.run_comprehensive_hooks()
|
|
411
|
+
return all(r.status == "passed" for r in hook_results)
|
|
388
412
|
|
|
389
413
|
def run_testing_phase(self, options: OptionsProtocol) -> bool:
|
|
390
414
|
if not options.test:
|
|
391
415
|
return True
|
|
392
416
|
self.session.track_task("testing", "Test execution")
|
|
393
417
|
try:
|
|
394
|
-
self.console.print("\n" + "-" *
|
|
418
|
+
self.console.print("\n" + "-" * 40)
|
|
395
419
|
self.console.print(
|
|
396
420
|
"[bold bright_blue]🧪 TESTS[/ bold bright_blue] [bold bright_white]Running test suite[/ bold bright_white]",
|
|
397
421
|
)
|
|
398
|
-
self.console.print("-" *
|
|
422
|
+
self.console.print("-" * 40 + "\n")
|
|
399
423
|
if not self.test_manager.validate_test_environment():
|
|
400
424
|
self.session.fail_task("testing", "Test environment validation failed")
|
|
401
425
|
return False
|
|
@@ -445,6 +469,13 @@ class PhaseCoordinator:
|
|
|
445
469
|
) -> bool:
|
|
446
470
|
new_version = self.publish_manager.bump_version(version_type)
|
|
447
471
|
|
|
472
|
+
# Stage all changes after version bumping and code cleaning (if enabled)
|
|
473
|
+
self.console.print("[blue]📂[/ blue] Staging all changes for publishing...")
|
|
474
|
+
if not self.git_service.add_all_files():
|
|
475
|
+
self.console.print(
|
|
476
|
+
"[yellow]⚠️[/ yellow] Failed to stage files, continuing with publish..."
|
|
477
|
+
)
|
|
478
|
+
|
|
448
479
|
if not options.no_git_tags:
|
|
449
480
|
self.publish_manager.create_git_tag(new_version)
|
|
450
481
|
|
|
@@ -483,6 +514,27 @@ class PhaseCoordinator:
|
|
|
483
514
|
|
|
484
515
|
def _handle_no_changes_to_commit(self) -> bool:
|
|
485
516
|
self.console.print("[yellow]ℹ️[/ yellow] No changes to commit")
|
|
517
|
+
|
|
518
|
+
# Check if there are unpushed commits
|
|
519
|
+
from contextlib import suppress
|
|
520
|
+
|
|
521
|
+
with suppress(ValueError, Exception):
|
|
522
|
+
commit_count = self.git_service.get_unpushed_commit_count()
|
|
523
|
+
if commit_count > 0:
|
|
524
|
+
self.console.print(
|
|
525
|
+
f"[blue]📤[/ blue] Found {commit_count} unpushed commit(s), attempting push..."
|
|
526
|
+
)
|
|
527
|
+
if self.git_service.push():
|
|
528
|
+
self.session.complete_task(
|
|
529
|
+
"commit",
|
|
530
|
+
f"No new changes, pushed {commit_count} existing commit(s)",
|
|
531
|
+
)
|
|
532
|
+
return True
|
|
533
|
+
else:
|
|
534
|
+
self.console.print(
|
|
535
|
+
"[yellow]⚠️[/ yellow] Push failed for existing commits"
|
|
536
|
+
)
|
|
537
|
+
|
|
486
538
|
self.session.complete_task("commit", "No changes to commit")
|
|
487
539
|
return True
|
|
488
540
|
|
|
@@ -587,33 +639,60 @@ class PhaseCoordinator:
|
|
|
587
639
|
|
|
588
640
|
for attempt in range(max_retries):
|
|
589
641
|
try:
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
602
|
-
return
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
summary,
|
|
606
|
-
results,
|
|
607
|
-
attempt,
|
|
608
|
-
max_retries,
|
|
609
|
-
)
|
|
610
|
-
return self._handle_hook_success(hook_type, summary)
|
|
642
|
+
execution_result = self._execute_single_hook_attempt(hook_runner)
|
|
643
|
+
if execution_result is None:
|
|
644
|
+
return False
|
|
645
|
+
|
|
646
|
+
results, summary = execution_result
|
|
647
|
+
should_continue = self._process_hook_results(
|
|
648
|
+
hook_type, options, summary, results, attempt, max_retries
|
|
649
|
+
)
|
|
650
|
+
|
|
651
|
+
if should_continue == "continue":
|
|
652
|
+
continue
|
|
653
|
+
elif should_continue == "success":
|
|
654
|
+
return True
|
|
655
|
+
else:
|
|
656
|
+
return False
|
|
611
657
|
|
|
612
658
|
except Exception as e:
|
|
613
659
|
return self._handle_hook_exception(hook_type, e)
|
|
614
660
|
|
|
615
661
|
return False
|
|
616
662
|
|
|
663
|
+
def _execute_single_hook_attempt(
|
|
664
|
+
self, hook_runner: t.Callable[[], list[t.Any]]
|
|
665
|
+
) -> tuple[list[t.Any], dict[str, t.Any]] | None:
|
|
666
|
+
"""Execute a single hook attempt and return results and summary."""
|
|
667
|
+
try:
|
|
668
|
+
results = hook_runner()
|
|
669
|
+
summary = self.hook_manager.get_hook_summary(results)
|
|
670
|
+
return results, summary
|
|
671
|
+
except Exception:
|
|
672
|
+
return None
|
|
673
|
+
|
|
674
|
+
def _process_hook_results(
|
|
675
|
+
self,
|
|
676
|
+
hook_type: str,
|
|
677
|
+
options: OptionsProtocol,
|
|
678
|
+
summary: dict[str, t.Any],
|
|
679
|
+
results: list[t.Any],
|
|
680
|
+
attempt: int,
|
|
681
|
+
max_retries: int,
|
|
682
|
+
) -> str:
|
|
683
|
+
"""Process hook results and return action: 'continue', 'success', or 'failure'."""
|
|
684
|
+
if not self._has_hook_failures(summary):
|
|
685
|
+
self._handle_hook_success(hook_type, summary)
|
|
686
|
+
return "success"
|
|
687
|
+
|
|
688
|
+
if self._should_retry_hooks(hook_type, attempt, max_retries, results):
|
|
689
|
+
return "continue"
|
|
690
|
+
|
|
691
|
+
self._handle_hook_failures(
|
|
692
|
+
hook_type, options, summary, results, attempt, max_retries
|
|
693
|
+
)
|
|
694
|
+
return "failure"
|
|
695
|
+
|
|
617
696
|
def _initialize_hook_execution(self, hook_type: str) -> None:
|
|
618
697
|
self.logger.info(f"Starting {hook_type} hooks execution")
|
|
619
698
|
self.session.track_task(
|
|
@@ -643,11 +722,12 @@ class PhaseCoordinator:
|
|
|
643
722
|
return False
|
|
644
723
|
|
|
645
724
|
def _attempt_autofix_for_fast_hooks(self, results: list[t.Any]) -> bool:
|
|
646
|
-
"""Attempt to autofix fast hook failures."""
|
|
725
|
+
"""Attempt to autofix fast hook failures using lazy-loaded coordinator."""
|
|
647
726
|
try:
|
|
648
727
|
self.logger.info("Attempting autofix for fast hook failures")
|
|
649
|
-
# Apply autofixes for fast hooks
|
|
650
|
-
|
|
728
|
+
# Apply autofixes for fast hooks using lazy-loaded service
|
|
729
|
+
autofix_coordinator = self._lazy_autofix.get()
|
|
730
|
+
return autofix_coordinator.apply_fast_stage_fixes()
|
|
651
731
|
except Exception as e:
|
|
652
732
|
self.logger.warning(f"Autofix attempt failed: {e}")
|
|
653
733
|
return False
|
|
@@ -770,3 +850,90 @@ class PhaseCoordinator:
|
|
|
770
850
|
self.console.print(f"[red]❌[/ red] {hook_type.title()} hooks error: {e}")
|
|
771
851
|
self.session.fail_task(f"{hook_type}_hooks", str(e))
|
|
772
852
|
return False
|
|
853
|
+
|
|
854
|
+
# Performance-optimized hook execution methods
|
|
855
|
+
async def _execute_hooks_with_parallel_support(
|
|
856
|
+
self,
|
|
857
|
+
hook_type: str,
|
|
858
|
+
hook_runner: t.Callable[[], list[t.Any]],
|
|
859
|
+
options: OptionsProtocol,
|
|
860
|
+
) -> bool:
|
|
861
|
+
"""Execute hooks with parallel optimization where safe."""
|
|
862
|
+
self._initialize_hook_execution(hook_type)
|
|
863
|
+
|
|
864
|
+
try:
|
|
865
|
+
# Execute hooks and handle results
|
|
866
|
+
return await self._process_parallel_hook_execution(
|
|
867
|
+
hook_type, hook_runner, options
|
|
868
|
+
)
|
|
869
|
+
|
|
870
|
+
except Exception as e:
|
|
871
|
+
return self._handle_hook_exception(hook_type, e)
|
|
872
|
+
|
|
873
|
+
async def _process_parallel_hook_execution(
|
|
874
|
+
self,
|
|
875
|
+
hook_type: str,
|
|
876
|
+
hook_runner: t.Callable[[], list[t.Any]],
|
|
877
|
+
options: OptionsProtocol,
|
|
878
|
+
) -> bool:
|
|
879
|
+
"""Process hook execution with autofix retry logic."""
|
|
880
|
+
# For now, maintain sequential execution for safety
|
|
881
|
+
# Future enhancement: implement parallel execution for independent hooks
|
|
882
|
+
results = hook_runner()
|
|
883
|
+
summary = self.hook_manager.get_hook_summary(results)
|
|
884
|
+
|
|
885
|
+
if not self._has_hook_failures(summary):
|
|
886
|
+
return self._handle_hook_success(hook_type, summary)
|
|
887
|
+
|
|
888
|
+
# Handle failures with potential autofix retry
|
|
889
|
+
return self._handle_parallel_hook_failures(
|
|
890
|
+
hook_type, hook_runner, options, results, summary
|
|
891
|
+
)
|
|
892
|
+
|
|
893
|
+
def _handle_parallel_hook_failures(
|
|
894
|
+
self,
|
|
895
|
+
hook_type: str,
|
|
896
|
+
hook_runner: t.Callable[[], list[t.Any]],
|
|
897
|
+
options: OptionsProtocol,
|
|
898
|
+
results: list[t.Any],
|
|
899
|
+
summary: dict[str, t.Any],
|
|
900
|
+
) -> bool:
|
|
901
|
+
"""Handle hook failures with autofix retry for fast hooks."""
|
|
902
|
+
if hook_type != "fast":
|
|
903
|
+
return self._handle_hook_failures(
|
|
904
|
+
hook_type, options, summary, results, 0, 1
|
|
905
|
+
)
|
|
906
|
+
|
|
907
|
+
# Try autofix for fast hooks
|
|
908
|
+
if not self._attempt_autofix_for_fast_hooks(results):
|
|
909
|
+
return self._handle_hook_failures(
|
|
910
|
+
hook_type, options, summary, results, 0, 1
|
|
911
|
+
)
|
|
912
|
+
|
|
913
|
+
# Retry after successful autofix
|
|
914
|
+
return self._retry_hooks_after_autofix(hook_type, hook_runner, options)
|
|
915
|
+
|
|
916
|
+
def _retry_hooks_after_autofix(
|
|
917
|
+
self,
|
|
918
|
+
hook_type: str,
|
|
919
|
+
hook_runner: t.Callable[[], list[t.Any]],
|
|
920
|
+
options: OptionsProtocol,
|
|
921
|
+
) -> bool:
|
|
922
|
+
"""Retry hooks after autofix was applied."""
|
|
923
|
+
self.console.print(
|
|
924
|
+
"[yellow]🔧[/ yellow] Applied autofixes for fast hooks, retrying..."
|
|
925
|
+
)
|
|
926
|
+
|
|
927
|
+
# Retry after autofix
|
|
928
|
+
results = hook_runner()
|
|
929
|
+
summary = self.hook_manager.get_hook_summary(results)
|
|
930
|
+
|
|
931
|
+
if not self._has_hook_failures(summary):
|
|
932
|
+
return self._handle_hook_success(hook_type, summary)
|
|
933
|
+
|
|
934
|
+
return self._handle_hook_failures(hook_type, options, summary, results, 0, 1)
|
|
935
|
+
|
|
936
|
+
@property
|
|
937
|
+
def autofix_coordinator(self):
|
|
938
|
+
"""Lazy property for autofix coordinator."""
|
|
939
|
+
return self._lazy_autofix.get()
|