crackerjack 0.29.0__py3-none-any.whl → 0.30.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crackerjack might be problematic. Click here for more details.
- crackerjack/__main__.py +48 -2
- crackerjack/code_cleaner.py +980 -0
- crackerjack/crackerjack.py +713 -1048
- crackerjack/dynamic_config.py +586 -0
- crackerjack/pyproject.toml +2 -1
- {crackerjack-0.29.0.dist-info → crackerjack-0.30.3.dist-info}/METADATA +2 -1
- crackerjack-0.30.3.dist-info/RECORD +16 -0
- crackerjack/.pre-commit-config-ai.yaml +0 -149
- crackerjack/.pre-commit-config-fast.yaml +0 -69
- crackerjack/.pre-commit-config.yaml +0 -114
- crackerjack-0.29.0.dist-info/RECORD +0 -17
- {crackerjack-0.29.0.dist-info → crackerjack-0.30.3.dist-info}/WHEEL +0 -0
- {crackerjack-0.29.0.dist-info → crackerjack-0.30.3.dist-info}/licenses/LICENSE +0 -0
crackerjack/crackerjack.py
CHANGED
|
@@ -5,21 +5,21 @@ import re
|
|
|
5
5
|
import subprocess
|
|
6
6
|
import time
|
|
7
7
|
import typing as t
|
|
8
|
-
from concurrent.futures import ThreadPoolExecutor, as_completed
|
|
9
8
|
from contextlib import suppress
|
|
10
9
|
from dataclasses import dataclass
|
|
11
|
-
from functools import lru_cache
|
|
12
10
|
from pathlib import Path
|
|
13
11
|
from subprocess import CompletedProcess
|
|
14
12
|
from subprocess import run as execute
|
|
15
13
|
from tomllib import loads
|
|
16
14
|
|
|
17
|
-
import aiofiles
|
|
18
15
|
from pydantic import BaseModel
|
|
19
16
|
from rich.console import Console
|
|
20
17
|
from tomli_w import dumps
|
|
21
18
|
|
|
22
|
-
from .
|
|
19
|
+
from .code_cleaner import CodeCleaner
|
|
20
|
+
from .dynamic_config import (
|
|
21
|
+
generate_config_for_mode,
|
|
22
|
+
)
|
|
23
23
|
|
|
24
24
|
|
|
25
25
|
@dataclass
|
|
@@ -496,9 +496,6 @@ python -m crackerjack --resume-from {self.progress_file.name}
|
|
|
496
496
|
|
|
497
497
|
config_files = (
|
|
498
498
|
".gitignore",
|
|
499
|
-
".pre-commit-config.yaml",
|
|
500
|
-
".pre-commit-config-ai.yaml",
|
|
501
|
-
".pre-commit-config-fast.yaml",
|
|
502
499
|
".libcst.codemod.yaml",
|
|
503
500
|
)
|
|
504
501
|
|
|
@@ -544,980 +541,11 @@ class OptionsProtocol(t.Protocol):
|
|
|
544
541
|
track_progress: bool = False
|
|
545
542
|
resume_from: str | None = None
|
|
546
543
|
progress_file: str | None = None
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
def _analyze_workload_characteristics(self, files: list[Path]) -> dict[str, t.Any]:
|
|
553
|
-
if not files:
|
|
554
|
-
return {
|
|
555
|
-
"total_files": 0,
|
|
556
|
-
"total_size": 0,
|
|
557
|
-
"avg_file_size": 0,
|
|
558
|
-
"complexity": "low",
|
|
559
|
-
}
|
|
560
|
-
total_size = 0
|
|
561
|
-
large_files = 0
|
|
562
|
-
for file_path in files:
|
|
563
|
-
try:
|
|
564
|
-
size = file_path.stat().st_size
|
|
565
|
-
total_size += size
|
|
566
|
-
if size > 50_000:
|
|
567
|
-
large_files += 1
|
|
568
|
-
except (OSError, PermissionError):
|
|
569
|
-
continue
|
|
570
|
-
avg_file_size = total_size / len(files) if files else 0
|
|
571
|
-
large_file_ratio = large_files / len(files) if files else 0
|
|
572
|
-
if len(files) > 100 or avg_file_size > 20_000 or large_file_ratio > 0.3:
|
|
573
|
-
complexity = "high"
|
|
574
|
-
elif len(files) > 50 or avg_file_size > 10_000 or large_file_ratio > 0.1:
|
|
575
|
-
complexity = "medium"
|
|
576
|
-
else:
|
|
577
|
-
complexity = "low"
|
|
578
|
-
|
|
579
|
-
return {
|
|
580
|
-
"total_files": len(files),
|
|
581
|
-
"total_size": total_size,
|
|
582
|
-
"avg_file_size": avg_file_size,
|
|
583
|
-
"large_files": large_files,
|
|
584
|
-
"large_file_ratio": large_file_ratio,
|
|
585
|
-
"complexity": complexity,
|
|
586
|
-
}
|
|
587
|
-
|
|
588
|
-
def _calculate_optimal_workers(self, workload: dict[str, t.Any]) -> int:
|
|
589
|
-
import os
|
|
590
|
-
|
|
591
|
-
cpu_count = os.cpu_count() or 4
|
|
592
|
-
if workload["complexity"] == "high":
|
|
593
|
-
max_workers = min(cpu_count // 2, 3)
|
|
594
|
-
elif workload["complexity"] == "medium":
|
|
595
|
-
max_workers = min(cpu_count, 6)
|
|
596
|
-
else:
|
|
597
|
-
max_workers = min(cpu_count + 2, 8)
|
|
598
|
-
|
|
599
|
-
return min(max_workers, workload["total_files"])
|
|
600
|
-
|
|
601
|
-
def clean_files(self, pkg_dir: Path | None) -> None:
|
|
602
|
-
if pkg_dir is None:
|
|
603
|
-
return
|
|
604
|
-
python_files = [
|
|
605
|
-
file_path
|
|
606
|
-
for file_path in pkg_dir.rglob("*.py")
|
|
607
|
-
if not str(file_path.parent).startswith("__")
|
|
608
|
-
]
|
|
609
|
-
if not python_files:
|
|
610
|
-
return
|
|
611
|
-
workload = self._analyze_workload_characteristics(python_files)
|
|
612
|
-
max_workers = self._calculate_optimal_workers(workload)
|
|
613
|
-
if len(python_files) > 10:
|
|
614
|
-
self.console.print(
|
|
615
|
-
f"[dim]Cleaning {workload['total_files']} files "
|
|
616
|
-
f"({workload['complexity']} complexity) with {max_workers} workers[/dim]"
|
|
617
|
-
)
|
|
618
|
-
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
|
619
|
-
future_to_file = {
|
|
620
|
-
executor.submit(self.clean_file, file_path): file_path
|
|
621
|
-
for file_path in python_files
|
|
622
|
-
}
|
|
623
|
-
for future in as_completed(future_to_file):
|
|
624
|
-
file_path = future_to_file[future]
|
|
625
|
-
try:
|
|
626
|
-
future.result()
|
|
627
|
-
except Exception as e:
|
|
628
|
-
self.console.print(
|
|
629
|
-
f"[bold bright_red]❌ Error cleaning {file_path}: {e}[/bold bright_red]"
|
|
630
|
-
)
|
|
631
|
-
self._cleanup_cache_directories(pkg_dir)
|
|
632
|
-
|
|
633
|
-
def _cleanup_cache_directories(self, pkg_dir: Path) -> None:
|
|
634
|
-
with suppress(PermissionError, OSError):
|
|
635
|
-
pycache_dir = pkg_dir / "__pycache__"
|
|
636
|
-
if pycache_dir.exists():
|
|
637
|
-
for cache_file in pycache_dir.iterdir():
|
|
638
|
-
with suppress(PermissionError, OSError):
|
|
639
|
-
cache_file.unlink()
|
|
640
|
-
pycache_dir.rmdir()
|
|
641
|
-
parent_pycache = pkg_dir.parent / "__pycache__"
|
|
642
|
-
if parent_pycache.exists():
|
|
643
|
-
for cache_file in parent_pycache.iterdir():
|
|
644
|
-
with suppress(PermissionError, OSError):
|
|
645
|
-
cache_file.unlink()
|
|
646
|
-
parent_pycache.rmdir()
|
|
647
|
-
|
|
648
|
-
def clean_file(self, file_path: Path) -> None:
|
|
649
|
-
from crackerjack.errors import ExecutionError, handle_error
|
|
650
|
-
|
|
651
|
-
try:
|
|
652
|
-
code = file_path.read_text(encoding="utf-8")
|
|
653
|
-
original_code = code
|
|
654
|
-
cleaning_failed = False
|
|
655
|
-
try:
|
|
656
|
-
code = self.remove_line_comments_streaming(code)
|
|
657
|
-
except Exception as e:
|
|
658
|
-
self.console.print(
|
|
659
|
-
f"[bold bright_yellow]⚠️ Warning: Failed to remove line comments from {file_path}: {e}[/bold bright_yellow]"
|
|
660
|
-
)
|
|
661
|
-
code = original_code
|
|
662
|
-
cleaning_failed = True
|
|
663
|
-
try:
|
|
664
|
-
code = self.remove_docstrings_streaming(code)
|
|
665
|
-
except Exception as e:
|
|
666
|
-
self.console.print(
|
|
667
|
-
f"[bold bright_yellow]⚠️ Warning: Failed to remove docstrings from {file_path}: {e}[/bold bright_yellow]"
|
|
668
|
-
)
|
|
669
|
-
code = original_code
|
|
670
|
-
cleaning_failed = True
|
|
671
|
-
try:
|
|
672
|
-
code = self.remove_extra_whitespace_streaming(code)
|
|
673
|
-
except Exception as e:
|
|
674
|
-
self.console.print(
|
|
675
|
-
f"[bold bright_yellow]⚠️ Warning: Failed to remove extra whitespace from {file_path}: {e}[/bold bright_yellow]"
|
|
676
|
-
)
|
|
677
|
-
code = original_code
|
|
678
|
-
cleaning_failed = True
|
|
679
|
-
try:
|
|
680
|
-
code = self.reformat_code(code)
|
|
681
|
-
except Exception as e:
|
|
682
|
-
self.console.print(
|
|
683
|
-
f"[bold bright_yellow]⚠️ Warning: Failed to reformat {file_path}: {e}[/bold bright_yellow]"
|
|
684
|
-
)
|
|
685
|
-
code = original_code
|
|
686
|
-
cleaning_failed = True
|
|
687
|
-
file_path.write_text(code, encoding="utf-8")
|
|
688
|
-
if cleaning_failed:
|
|
689
|
-
self.console.print(
|
|
690
|
-
f"[bold yellow]⚡ Partially cleaned:[/bold yellow] [dim bright_white]{file_path}[/dim bright_white]"
|
|
691
|
-
)
|
|
692
|
-
else:
|
|
693
|
-
self.console.print(
|
|
694
|
-
f"[bold green]✨ Cleaned:[/bold green] [dim bright_white]{file_path}[/dim bright_white]"
|
|
695
|
-
)
|
|
696
|
-
except PermissionError as e:
|
|
697
|
-
self.console.print(
|
|
698
|
-
f"[red]Failed to clean: {file_path} (Permission denied)[/red]"
|
|
699
|
-
)
|
|
700
|
-
handle_error(
|
|
701
|
-
ExecutionError(
|
|
702
|
-
message=f"Permission denied while cleaning {file_path}",
|
|
703
|
-
error_code=ErrorCode.PERMISSION_ERROR,
|
|
704
|
-
details=str(e),
|
|
705
|
-
recovery=f"Check file permissions for {file_path} and ensure you have write access",
|
|
706
|
-
),
|
|
707
|
-
console=self.console,
|
|
708
|
-
exit_on_error=False,
|
|
709
|
-
)
|
|
710
|
-
except OSError as e:
|
|
711
|
-
self.console.print(
|
|
712
|
-
f"[red]Failed to clean: {file_path} (File system error)[/red]"
|
|
713
|
-
)
|
|
714
|
-
handle_error(
|
|
715
|
-
ExecutionError(
|
|
716
|
-
message=f"File system error while cleaning {file_path}",
|
|
717
|
-
error_code=ErrorCode.FILE_WRITE_ERROR,
|
|
718
|
-
details=str(e),
|
|
719
|
-
recovery=f"Check that {file_path} exists and is not being used by another process",
|
|
720
|
-
),
|
|
721
|
-
console=self.console,
|
|
722
|
-
exit_on_error=False,
|
|
723
|
-
)
|
|
724
|
-
except UnicodeDecodeError as e:
|
|
725
|
-
self.console.print(
|
|
726
|
-
f"[red]Failed to clean: {file_path} (Encoding error)[/red]"
|
|
727
|
-
)
|
|
728
|
-
handle_error(
|
|
729
|
-
ExecutionError(
|
|
730
|
-
message=f"Encoding error while reading {file_path}",
|
|
731
|
-
error_code=ErrorCode.FILE_READ_ERROR,
|
|
732
|
-
details=str(e),
|
|
733
|
-
recovery=f"File {file_path} contains non-UTF-8 characters. Please check the file encoding.",
|
|
734
|
-
),
|
|
735
|
-
console=self.console,
|
|
736
|
-
exit_on_error=False,
|
|
737
|
-
)
|
|
738
|
-
except Exception as e:
|
|
739
|
-
self.console.print(
|
|
740
|
-
f"[red]Failed to clean: {file_path} (Unexpected error)[/red]"
|
|
741
|
-
)
|
|
742
|
-
handle_error(
|
|
743
|
-
ExecutionError(
|
|
744
|
-
message=f"Unexpected error while cleaning {file_path}",
|
|
745
|
-
error_code=ErrorCode.UNEXPECTED_ERROR,
|
|
746
|
-
details=str(e),
|
|
747
|
-
recovery="This is an unexpected error. Please report this issue with the file content if possible.",
|
|
748
|
-
),
|
|
749
|
-
console=self.console,
|
|
750
|
-
exit_on_error=False,
|
|
751
|
-
)
|
|
752
|
-
|
|
753
|
-
def _initialize_docstring_state(self) -> dict[str, t.Any]:
|
|
754
|
-
return {
|
|
755
|
-
"in_docstring": False,
|
|
756
|
-
"delimiter": None,
|
|
757
|
-
"waiting": False,
|
|
758
|
-
"function_indent": 0,
|
|
759
|
-
"removed_docstring": False,
|
|
760
|
-
"in_multiline_def": False,
|
|
761
|
-
}
|
|
762
|
-
|
|
763
|
-
def _handle_function_definition(
|
|
764
|
-
self, line: str, stripped: str, state: dict[str, t.Any]
|
|
765
|
-
) -> bool:
|
|
766
|
-
if self._is_function_or_class_definition(stripped):
|
|
767
|
-
state["waiting"] = True
|
|
768
|
-
state["function_indent"] = len(line) - len(line.lstrip())
|
|
769
|
-
state["removed_docstring"] = False
|
|
770
|
-
state["in_multiline_def"] = not stripped.endswith(":")
|
|
771
|
-
return True
|
|
772
|
-
return False
|
|
773
|
-
|
|
774
|
-
def _handle_multiline_definition(
|
|
775
|
-
self, line: str, stripped: str, state: dict[str, t.Any]
|
|
776
|
-
) -> bool:
|
|
777
|
-
if state["in_multiline_def"]:
|
|
778
|
-
if stripped.endswith(":"):
|
|
779
|
-
state["in_multiline_def"] = False
|
|
780
|
-
return True
|
|
781
|
-
return False
|
|
782
|
-
|
|
783
|
-
def _handle_waiting_docstring(
|
|
784
|
-
self, lines: list[str], i: int, stripped: str, state: dict[str, t.Any]
|
|
785
|
-
) -> tuple[bool, str | None]:
|
|
786
|
-
if state["waiting"] and stripped:
|
|
787
|
-
if self._handle_docstring_start(stripped, state):
|
|
788
|
-
pass_line = None
|
|
789
|
-
if not state["in_docstring"]:
|
|
790
|
-
function_indent: int = state["function_indent"]
|
|
791
|
-
if self._needs_pass_statement(lines, i + 1, function_indent):
|
|
792
|
-
pass_line = " " * (function_indent + 4) + "pass"
|
|
793
|
-
state["removed_docstring"] = True
|
|
794
|
-
return True, pass_line
|
|
795
|
-
else:
|
|
796
|
-
state["waiting"] = False
|
|
797
|
-
return False, None
|
|
798
|
-
|
|
799
|
-
def _handle_docstring_content(
|
|
800
|
-
self, lines: list[str], i: int, stripped: str, state: dict[str, t.Any]
|
|
801
|
-
) -> tuple[bool, str | None]:
|
|
802
|
-
if state["in_docstring"]:
|
|
803
|
-
if self._handle_docstring_end(stripped, state):
|
|
804
|
-
pass_line = None
|
|
805
|
-
function_indent: int = state["function_indent"]
|
|
806
|
-
if self._needs_pass_statement(lines, i + 1, function_indent):
|
|
807
|
-
pass_line = " " * (function_indent + 4) + "pass"
|
|
808
|
-
state["removed_docstring"] = False
|
|
809
|
-
return True, pass_line
|
|
810
|
-
else:
|
|
811
|
-
return True, None
|
|
812
|
-
return False, None
|
|
813
|
-
|
|
814
|
-
def _process_line(
|
|
815
|
-
self, lines: list[str], i: int, line: str, state: dict[str, t.Any]
|
|
816
|
-
) -> tuple[bool, str | None]:
|
|
817
|
-
stripped = line.strip()
|
|
818
|
-
if self._handle_function_definition(line, stripped, state):
|
|
819
|
-
return True, line
|
|
820
|
-
if self._handle_multiline_definition(line, stripped, state):
|
|
821
|
-
return True, line
|
|
822
|
-
handled, pass_line = self._handle_waiting_docstring(lines, i, stripped, state)
|
|
823
|
-
if handled:
|
|
824
|
-
return True, pass_line
|
|
825
|
-
handled, pass_line = self._handle_docstring_content(lines, i, stripped, state)
|
|
826
|
-
if handled:
|
|
827
|
-
return True, pass_line
|
|
828
|
-
if state["removed_docstring"] and stripped:
|
|
829
|
-
state["removed_docstring"] = False
|
|
830
|
-
return False, line
|
|
831
|
-
|
|
832
|
-
def remove_docstrings(self, code: str) -> str:
|
|
833
|
-
lines = code.split("\n")
|
|
834
|
-
cleaned_lines: list[str] = []
|
|
835
|
-
docstring_state = self._initialize_docstring_state()
|
|
836
|
-
for i, line in enumerate(lines):
|
|
837
|
-
handled, result_line = self._process_line(lines, i, line, docstring_state)
|
|
838
|
-
if handled:
|
|
839
|
-
if result_line is not None:
|
|
840
|
-
cleaned_lines.append(result_line)
|
|
841
|
-
else:
|
|
842
|
-
cleaned_lines.append(line)
|
|
843
|
-
return "\n".join(cleaned_lines)
|
|
844
|
-
|
|
845
|
-
def _is_function_or_class_definition(self, stripped_line: str) -> bool:
|
|
846
|
-
return stripped_line.startswith(("def ", "class ", "async def "))
|
|
847
|
-
|
|
848
|
-
def _handle_docstring_start(self, stripped: str, state: dict[str, t.Any]) -> bool:
|
|
849
|
-
if not stripped.startswith(('"""', "'''", '"', "'")):
|
|
850
|
-
return False
|
|
851
|
-
if stripped.startswith(('"""', "'''")):
|
|
852
|
-
delimiter = stripped[:3]
|
|
853
|
-
else:
|
|
854
|
-
delimiter = stripped[0]
|
|
855
|
-
state["delimiter"] = delimiter
|
|
856
|
-
if self._is_single_line_docstring(stripped, delimiter):
|
|
857
|
-
state["waiting"] = False
|
|
858
|
-
return True
|
|
859
|
-
else:
|
|
860
|
-
state["in_docstring"] = True
|
|
861
|
-
state["waiting"] = False
|
|
862
|
-
return True
|
|
863
|
-
|
|
864
|
-
def _is_single_line_docstring(self, stripped: str, delimiter: str) -> bool:
|
|
865
|
-
return stripped.endswith(delimiter) and len(stripped) > len(delimiter)
|
|
866
|
-
|
|
867
|
-
def _handle_docstring_end(self, stripped: str, state: dict[str, t.Any]) -> bool:
|
|
868
|
-
if state["delimiter"] and stripped.endswith(state["delimiter"]):
|
|
869
|
-
state["in_docstring"] = False
|
|
870
|
-
state["delimiter"] = None
|
|
871
|
-
return True
|
|
872
|
-
return False
|
|
873
|
-
|
|
874
|
-
def _needs_pass_statement(
|
|
875
|
-
self, lines: list[str], start_index: int, function_indent: int
|
|
876
|
-
) -> bool:
|
|
877
|
-
for i in range(start_index, len(lines)):
|
|
878
|
-
line = lines[i]
|
|
879
|
-
stripped = line.strip()
|
|
880
|
-
if not stripped:
|
|
881
|
-
continue
|
|
882
|
-
line_indent = len(line) - len(line.lstrip())
|
|
883
|
-
if line_indent <= function_indent:
|
|
884
|
-
return True
|
|
885
|
-
if line_indent > function_indent:
|
|
886
|
-
return False
|
|
887
|
-
return True
|
|
888
|
-
|
|
889
|
-
def remove_line_comments(self, code: str) -> str:
|
|
890
|
-
lines = code.split("\n")
|
|
891
|
-
cleaned_lines: list[str] = []
|
|
892
|
-
for line in lines:
|
|
893
|
-
if not line.strip():
|
|
894
|
-
cleaned_lines.append(line)
|
|
895
|
-
continue
|
|
896
|
-
cleaned_line = self._process_line_for_comments(line)
|
|
897
|
-
if cleaned_line or not line.strip():
|
|
898
|
-
cleaned_lines.append(cleaned_line or line)
|
|
899
|
-
return "\n".join(cleaned_lines)
|
|
900
|
-
|
|
901
|
-
def _process_line_for_comments(self, line: str) -> str:
|
|
902
|
-
result: list[str] = []
|
|
903
|
-
string_state = {"in_string": None}
|
|
904
|
-
for i, char in enumerate(line):
|
|
905
|
-
if self._handle_string_character(char, i, line, string_state, result):
|
|
906
|
-
continue
|
|
907
|
-
elif self._handle_comment_character(char, i, line, string_state, result):
|
|
908
|
-
break
|
|
909
|
-
else:
|
|
910
|
-
result.append(char)
|
|
911
|
-
return "".join(result).rstrip()
|
|
912
|
-
|
|
913
|
-
def _handle_string_character(
|
|
914
|
-
self,
|
|
915
|
-
char: str,
|
|
916
|
-
index: int,
|
|
917
|
-
line: str,
|
|
918
|
-
string_state: dict[str, t.Any],
|
|
919
|
-
result: list[str],
|
|
920
|
-
) -> bool:
|
|
921
|
-
if char not in ("'", '"'):
|
|
922
|
-
return False
|
|
923
|
-
if index > 0 and line[index - 1] == "\\":
|
|
924
|
-
return False
|
|
925
|
-
if string_state["in_string"] is None:
|
|
926
|
-
string_state["in_string"] = char
|
|
927
|
-
elif string_state["in_string"] == char:
|
|
928
|
-
string_state["in_string"] = None
|
|
929
|
-
result.append(char)
|
|
930
|
-
return True
|
|
931
|
-
|
|
932
|
-
def _handle_comment_character(
|
|
933
|
-
self,
|
|
934
|
-
char: str,
|
|
935
|
-
index: int,
|
|
936
|
-
line: str,
|
|
937
|
-
string_state: dict[str, t.Any],
|
|
938
|
-
result: list[str],
|
|
939
|
-
) -> bool:
|
|
940
|
-
if char != "#" or string_state["in_string"] is not None:
|
|
941
|
-
return False
|
|
942
|
-
comment = line[index:].strip()
|
|
943
|
-
if self._is_special_comment_line(comment):
|
|
944
|
-
result.append(line[index:])
|
|
945
|
-
return True
|
|
946
|
-
|
|
947
|
-
def _is_special_comment_line(self, comment: str) -> bool:
|
|
948
|
-
special_comment_pattern = (
|
|
949
|
-
r"^#\s*(?:type:\s*ignore(?:\[.*?\])?|noqa|nosec|pragma:\s*no\s*cover"
|
|
950
|
-
r"|pylint:\s*disable|mypy:\s*ignore)"
|
|
951
|
-
)
|
|
952
|
-
return bool(re.match(special_comment_pattern, comment))
|
|
953
|
-
|
|
954
|
-
def remove_extra_whitespace(self, code: str) -> str:
|
|
955
|
-
lines = code.split("\n")
|
|
956
|
-
cleaned_lines: list[str] = []
|
|
957
|
-
function_tracker = {"in_function": False, "function_indent": 0}
|
|
958
|
-
import_tracker = {"in_imports": False, "last_import_type": None}
|
|
959
|
-
for i, line in enumerate(lines):
|
|
960
|
-
line = line.rstrip()
|
|
961
|
-
stripped_line = line.lstrip()
|
|
962
|
-
self._update_function_state(line, stripped_line, function_tracker)
|
|
963
|
-
self._update_import_state(line, stripped_line, import_tracker)
|
|
964
|
-
if not line:
|
|
965
|
-
if self._should_skip_empty_line(
|
|
966
|
-
i, lines, cleaned_lines, function_tracker, import_tracker
|
|
967
|
-
):
|
|
968
|
-
continue
|
|
969
|
-
cleaned_lines.append(line)
|
|
970
|
-
return "\n".join(self._remove_trailing_empty_lines(cleaned_lines))
|
|
971
|
-
|
|
972
|
-
def remove_docstrings_streaming(self, code: str) -> str:
|
|
973
|
-
if len(code) < 10000:
|
|
974
|
-
return self.remove_docstrings(code)
|
|
975
|
-
|
|
976
|
-
def process_lines():
|
|
977
|
-
lines = code.split("\n")
|
|
978
|
-
docstring_state = self._initialize_docstring_state()
|
|
979
|
-
for i, line in enumerate(lines):
|
|
980
|
-
handled, result_line = self._process_line(
|
|
981
|
-
lines, i, line, docstring_state
|
|
982
|
-
)
|
|
983
|
-
if handled:
|
|
984
|
-
if result_line is not None:
|
|
985
|
-
yield result_line
|
|
986
|
-
else:
|
|
987
|
-
yield line
|
|
988
|
-
|
|
989
|
-
return "\n".join(process_lines())
|
|
990
|
-
|
|
991
|
-
def remove_line_comments_streaming(self, code: str) -> str:
|
|
992
|
-
if len(code) < 10000:
|
|
993
|
-
return self.remove_line_comments(code)
|
|
994
|
-
|
|
995
|
-
def process_lines():
|
|
996
|
-
for line in code.split("\n"):
|
|
997
|
-
if not line.strip():
|
|
998
|
-
yield line
|
|
999
|
-
continue
|
|
1000
|
-
cleaned_line = self._process_line_for_comments(line)
|
|
1001
|
-
if cleaned_line or not line.strip():
|
|
1002
|
-
yield cleaned_line or line
|
|
1003
|
-
|
|
1004
|
-
return "\n".join(process_lines())
|
|
1005
|
-
|
|
1006
|
-
def remove_extra_whitespace_streaming(self, code: str) -> str:
|
|
1007
|
-
if len(code) < 10000:
|
|
1008
|
-
return self.remove_extra_whitespace(code)
|
|
1009
|
-
|
|
1010
|
-
def process_lines():
|
|
1011
|
-
lines = code.split("\n")
|
|
1012
|
-
function_tracker: dict[str, t.Any] = {
|
|
1013
|
-
"in_function": False,
|
|
1014
|
-
"function_indent": 0,
|
|
1015
|
-
}
|
|
1016
|
-
import_tracker: dict[str, t.Any] = {
|
|
1017
|
-
"in_imports": False,
|
|
1018
|
-
"last_import_type": None,
|
|
1019
|
-
}
|
|
1020
|
-
previous_lines: list[str] = []
|
|
1021
|
-
for i, line in enumerate(lines):
|
|
1022
|
-
line = line.rstrip()
|
|
1023
|
-
stripped_line = line.lstrip()
|
|
1024
|
-
self._update_function_state(line, stripped_line, function_tracker)
|
|
1025
|
-
self._update_import_state(line, stripped_line, import_tracker)
|
|
1026
|
-
if not line:
|
|
1027
|
-
if self._should_skip_empty_line(
|
|
1028
|
-
i, lines, previous_lines, function_tracker, import_tracker
|
|
1029
|
-
):
|
|
1030
|
-
continue
|
|
1031
|
-
previous_lines.append(line)
|
|
1032
|
-
yield line
|
|
1033
|
-
|
|
1034
|
-
processed_lines = list(process_lines())
|
|
1035
|
-
return "\n".join(self._remove_trailing_empty_lines(processed_lines))
|
|
1036
|
-
|
|
1037
|
-
def _update_function_state(
|
|
1038
|
-
self, line: str, stripped_line: str, function_tracker: dict[str, t.Any]
|
|
1039
|
-
) -> None:
|
|
1040
|
-
if stripped_line.startswith(("def ", "async def ")):
|
|
1041
|
-
function_tracker["in_function"] = True
|
|
1042
|
-
function_tracker["function_indent"] = len(line) - len(stripped_line)
|
|
1043
|
-
elif self._is_function_end(line, stripped_line, function_tracker):
|
|
1044
|
-
function_tracker["in_function"] = False
|
|
1045
|
-
function_tracker["function_indent"] = 0
|
|
1046
|
-
|
|
1047
|
-
def _update_import_state(
|
|
1048
|
-
self, line: str, stripped_line: str, import_tracker: dict[str, t.Any]
|
|
1049
|
-
) -> None:
|
|
1050
|
-
if stripped_line.startswith(("import ", "from ")):
|
|
1051
|
-
import_tracker["in_imports"] = True
|
|
1052
|
-
if self._is_stdlib_import(stripped_line):
|
|
1053
|
-
current_type = "stdlib"
|
|
1054
|
-
elif self._is_local_import(stripped_line):
|
|
1055
|
-
current_type = "local"
|
|
1056
|
-
else:
|
|
1057
|
-
current_type = "third_party"
|
|
1058
|
-
import_tracker["last_import_type"] = current_type
|
|
1059
|
-
elif stripped_line and not stripped_line.startswith("#"):
|
|
1060
|
-
import_tracker["in_imports"] = False
|
|
1061
|
-
import_tracker["last_import_type"] = None
|
|
1062
|
-
|
|
1063
|
-
@staticmethod
|
|
1064
|
-
@lru_cache(maxsize=256)
|
|
1065
|
-
def _is_stdlib_module(module: str) -> bool:
|
|
1066
|
-
stdlib_modules = {
|
|
1067
|
-
"os",
|
|
1068
|
-
"sys",
|
|
1069
|
-
"re",
|
|
1070
|
-
"json",
|
|
1071
|
-
"datetime",
|
|
1072
|
-
"time",
|
|
1073
|
-
"pathlib",
|
|
1074
|
-
"typing",
|
|
1075
|
-
"collections",
|
|
1076
|
-
"itertools",
|
|
1077
|
-
"functools",
|
|
1078
|
-
"operator",
|
|
1079
|
-
"math",
|
|
1080
|
-
"random",
|
|
1081
|
-
"uuid",
|
|
1082
|
-
"urllib",
|
|
1083
|
-
"http",
|
|
1084
|
-
"html",
|
|
1085
|
-
"xml",
|
|
1086
|
-
"email",
|
|
1087
|
-
"csv",
|
|
1088
|
-
"sqlite3",
|
|
1089
|
-
"subprocess",
|
|
1090
|
-
"threading",
|
|
1091
|
-
"multiprocessing",
|
|
1092
|
-
"asyncio",
|
|
1093
|
-
"contextlib",
|
|
1094
|
-
"dataclasses",
|
|
1095
|
-
"enum",
|
|
1096
|
-
"abc",
|
|
1097
|
-
"io",
|
|
1098
|
-
"tempfile",
|
|
1099
|
-
"shutil",
|
|
1100
|
-
"glob",
|
|
1101
|
-
"pickle",
|
|
1102
|
-
"copy",
|
|
1103
|
-
"heapq",
|
|
1104
|
-
"bisect",
|
|
1105
|
-
"array",
|
|
1106
|
-
"struct",
|
|
1107
|
-
"zlib",
|
|
1108
|
-
"hashlib",
|
|
1109
|
-
"hmac",
|
|
1110
|
-
"secrets",
|
|
1111
|
-
"base64",
|
|
1112
|
-
"binascii",
|
|
1113
|
-
"codecs",
|
|
1114
|
-
"locale",
|
|
1115
|
-
"platform",
|
|
1116
|
-
"socket",
|
|
1117
|
-
"ssl",
|
|
1118
|
-
"ipaddress",
|
|
1119
|
-
"logging",
|
|
1120
|
-
"warnings",
|
|
1121
|
-
"inspect",
|
|
1122
|
-
"ast",
|
|
1123
|
-
"dis",
|
|
1124
|
-
"tokenize",
|
|
1125
|
-
"keyword",
|
|
1126
|
-
"linecache",
|
|
1127
|
-
"traceback",
|
|
1128
|
-
"weakref",
|
|
1129
|
-
"gc",
|
|
1130
|
-
"ctypes",
|
|
1131
|
-
"unittest",
|
|
1132
|
-
"doctest",
|
|
1133
|
-
"pdb",
|
|
1134
|
-
"profile",
|
|
1135
|
-
"cProfile",
|
|
1136
|
-
"timeit",
|
|
1137
|
-
"trace",
|
|
1138
|
-
"calendar",
|
|
1139
|
-
"decimal",
|
|
1140
|
-
"fractions",
|
|
1141
|
-
"statistics",
|
|
1142
|
-
"tomllib",
|
|
1143
|
-
}
|
|
1144
|
-
return module in stdlib_modules
|
|
1145
|
-
|
|
1146
|
-
def _is_stdlib_import(self, stripped_line: str) -> bool:
|
|
1147
|
-
try:
|
|
1148
|
-
if stripped_line.startswith("from "):
|
|
1149
|
-
module = stripped_line.split()[1].split(".")[0]
|
|
1150
|
-
else:
|
|
1151
|
-
module = stripped_line.split()[1].split(".")[0]
|
|
1152
|
-
except IndexError:
|
|
1153
|
-
return False
|
|
1154
|
-
return CodeCleaner._is_stdlib_module(module)
|
|
1155
|
-
|
|
1156
|
-
def _is_local_import(self, stripped_line: str) -> bool:
|
|
1157
|
-
return stripped_line.startswith("from .") or " . " in stripped_line
|
|
1158
|
-
|
|
1159
|
-
def _is_function_end(
|
|
1160
|
-
self, line: str, stripped_line: str, function_tracker: dict[str, t.Any]
|
|
1161
|
-
) -> bool:
|
|
1162
|
-
return (
|
|
1163
|
-
function_tracker["in_function"]
|
|
1164
|
-
and bool(line)
|
|
1165
|
-
and (len(line) - len(stripped_line) <= function_tracker["function_indent"])
|
|
1166
|
-
and (not stripped_line.startswith(("@", "#")))
|
|
1167
|
-
)
|
|
1168
|
-
|
|
1169
|
-
def _should_skip_empty_line(
|
|
1170
|
-
self,
|
|
1171
|
-
line_idx: int,
|
|
1172
|
-
lines: list[str],
|
|
1173
|
-
cleaned_lines: list[str],
|
|
1174
|
-
function_tracker: dict[str, t.Any],
|
|
1175
|
-
import_tracker: dict[str, t.Any],
|
|
1176
|
-
) -> bool:
|
|
1177
|
-
if line_idx > 0 and cleaned_lines and (not cleaned_lines[-1]):
|
|
1178
|
-
return True
|
|
1179
|
-
|
|
1180
|
-
if self._is_import_section_separator(line_idx, lines, import_tracker):
|
|
1181
|
-
return False
|
|
1182
|
-
|
|
1183
|
-
if function_tracker["in_function"]:
|
|
1184
|
-
return self._should_skip_function_empty_line(line_idx, lines)
|
|
1185
|
-
return False
|
|
1186
|
-
|
|
1187
|
-
def _is_import_section_separator(
|
|
1188
|
-
self, line_idx: int, lines: list[str], import_tracker: dict[str, t.Any]
|
|
1189
|
-
) -> bool:
|
|
1190
|
-
if not import_tracker["in_imports"]:
|
|
1191
|
-
return False
|
|
1192
|
-
|
|
1193
|
-
next_line_idx = line_idx + 1
|
|
1194
|
-
while next_line_idx < len(lines) and not lines[next_line_idx].strip():
|
|
1195
|
-
next_line_idx += 1
|
|
1196
|
-
|
|
1197
|
-
if next_line_idx >= len(lines):
|
|
1198
|
-
return False
|
|
1199
|
-
|
|
1200
|
-
next_line = lines[next_line_idx].strip()
|
|
1201
|
-
if not next_line.startswith(("import ", "from ")):
|
|
1202
|
-
return False
|
|
1203
|
-
|
|
1204
|
-
if self._is_stdlib_import(next_line):
|
|
1205
|
-
next_type = "stdlib"
|
|
1206
|
-
elif self._is_local_import(next_line):
|
|
1207
|
-
next_type = "local"
|
|
1208
|
-
else:
|
|
1209
|
-
next_type = "third_party"
|
|
1210
|
-
|
|
1211
|
-
return import_tracker["last_import_type"] != next_type
|
|
1212
|
-
|
|
1213
|
-
def _should_skip_function_empty_line(self, line_idx: int, lines: list[str]) -> bool:
|
|
1214
|
-
next_line_idx = line_idx + 1
|
|
1215
|
-
if next_line_idx >= len(lines):
|
|
1216
|
-
return False
|
|
1217
|
-
next_line = lines[next_line_idx].strip()
|
|
1218
|
-
return not self._is_significant_next_line(next_line)
|
|
1219
|
-
|
|
1220
|
-
def _is_significant_next_line(self, next_line: str) -> bool:
|
|
1221
|
-
if next_line.startswith(("return", "class ", "def ", "async def ", "@")):
|
|
1222
|
-
return True
|
|
1223
|
-
if next_line in ("pass", "break", "continue", "raise"):
|
|
1224
|
-
return True
|
|
1225
|
-
return self._is_special_comment(next_line)
|
|
1226
|
-
|
|
1227
|
-
def _is_special_comment(self, line: str) -> bool:
|
|
1228
|
-
if not line.startswith("#"):
|
|
1229
|
-
return False
|
|
1230
|
-
special_patterns = ("type:", "noqa", "nosec", "pragma:", "pylint:", "mypy:")
|
|
1231
|
-
return any(pattern in line for pattern in special_patterns)
|
|
1232
|
-
|
|
1233
|
-
def _remove_trailing_empty_lines(self, lines: list[str]) -> list[str]:
|
|
1234
|
-
while lines and (not lines[-1]):
|
|
1235
|
-
lines.pop()
|
|
1236
|
-
return lines
|
|
1237
|
-
|
|
1238
|
-
def reformat_code(self, code: str) -> str:
|
|
1239
|
-
from crackerjack.errors import handle_error
|
|
1240
|
-
|
|
1241
|
-
try:
|
|
1242
|
-
import tempfile
|
|
1243
|
-
|
|
1244
|
-
with tempfile.NamedTemporaryFile(
|
|
1245
|
-
suffix=".py", mode="w+", delete=False
|
|
1246
|
-
) as temp:
|
|
1247
|
-
temp_path = Path(temp.name)
|
|
1248
|
-
temp_path.write_text(code)
|
|
1249
|
-
try:
|
|
1250
|
-
result = subprocess.run(
|
|
1251
|
-
["uv", "run", "ruff", "format", str(temp_path)],
|
|
1252
|
-
check=False,
|
|
1253
|
-
capture_output=True,
|
|
1254
|
-
text=True,
|
|
1255
|
-
)
|
|
1256
|
-
if result.returncode == 0:
|
|
1257
|
-
formatted_code = temp_path.read_text()
|
|
1258
|
-
else:
|
|
1259
|
-
self.console.print(
|
|
1260
|
-
f"[bold bright_yellow]⚠️ Ruff formatting failed: {result.stderr}[/bold bright_yellow]"
|
|
1261
|
-
)
|
|
1262
|
-
handle_error(
|
|
1263
|
-
ExecutionError(
|
|
1264
|
-
message="Code formatting failed",
|
|
1265
|
-
error_code=ErrorCode.FORMATTING_ERROR,
|
|
1266
|
-
details=result.stderr,
|
|
1267
|
-
recovery="Check Ruff configuration and formatting rules",
|
|
1268
|
-
),
|
|
1269
|
-
console=self.console,
|
|
1270
|
-
exit_on_error=False,
|
|
1271
|
-
)
|
|
1272
|
-
formatted_code = code
|
|
1273
|
-
except Exception as e:
|
|
1274
|
-
self.console.print(
|
|
1275
|
-
f"[bold bright_red]❌ Error running Ruff: {e}[/bold bright_red]"
|
|
1276
|
-
)
|
|
1277
|
-
handle_error(
|
|
1278
|
-
ExecutionError(
|
|
1279
|
-
message="Error running Ruff",
|
|
1280
|
-
error_code=ErrorCode.FORMATTING_ERROR,
|
|
1281
|
-
details=str(e),
|
|
1282
|
-
recovery="Verify Ruff is installed and configured correctly",
|
|
1283
|
-
),
|
|
1284
|
-
console=self.console,
|
|
1285
|
-
exit_on_error=False,
|
|
1286
|
-
)
|
|
1287
|
-
formatted_code = code
|
|
1288
|
-
finally:
|
|
1289
|
-
with suppress(FileNotFoundError):
|
|
1290
|
-
temp_path.unlink()
|
|
1291
|
-
return formatted_code
|
|
1292
|
-
except Exception as e:
|
|
1293
|
-
self.console.print(
|
|
1294
|
-
f"[bold bright_red]❌ Error during reformatting: {e}[/bold bright_red]"
|
|
1295
|
-
)
|
|
1296
|
-
handle_error(
|
|
1297
|
-
ExecutionError(
|
|
1298
|
-
message="Error during reformatting",
|
|
1299
|
-
error_code=ErrorCode.FORMATTING_ERROR,
|
|
1300
|
-
details=str(e),
|
|
1301
|
-
recovery="Check file permissions and disk space",
|
|
1302
|
-
),
|
|
1303
|
-
console=self.console,
|
|
1304
|
-
)
|
|
1305
|
-
return code
|
|
1306
|
-
|
|
1307
|
-
async def clean_files_async(self, pkg_dir: Path | None) -> None:
|
|
1308
|
-
if pkg_dir is None:
|
|
1309
|
-
return
|
|
1310
|
-
python_files = [
|
|
1311
|
-
file_path
|
|
1312
|
-
for file_path in pkg_dir.rglob("*.py")
|
|
1313
|
-
if not str(file_path.parent).startswith("__")
|
|
1314
|
-
]
|
|
1315
|
-
if not python_files:
|
|
1316
|
-
return
|
|
1317
|
-
max_concurrent = min(len(python_files), 8)
|
|
1318
|
-
semaphore = asyncio.Semaphore(max_concurrent)
|
|
1319
|
-
|
|
1320
|
-
async def clean_with_semaphore(file_path: Path) -> None:
|
|
1321
|
-
async with semaphore:
|
|
1322
|
-
await self.clean_file_async(file_path)
|
|
1323
|
-
|
|
1324
|
-
tasks = [clean_with_semaphore(file_path) for file_path in python_files]
|
|
1325
|
-
await asyncio.gather(*tasks, return_exceptions=True)
|
|
1326
|
-
|
|
1327
|
-
await self._cleanup_cache_directories_async(pkg_dir)
|
|
1328
|
-
|
|
1329
|
-
async def clean_file_async(self, file_path: Path) -> None:
|
|
1330
|
-
from crackerjack.errors import ExecutionError, handle_error
|
|
1331
|
-
|
|
1332
|
-
try:
|
|
1333
|
-
async with aiofiles.open(file_path, encoding="utf-8") as f: # type: ignore[misc]
|
|
1334
|
-
code: str = await f.read() # type: ignore[misc]
|
|
1335
|
-
original_code: str = code
|
|
1336
|
-
cleaning_failed = False
|
|
1337
|
-
try:
|
|
1338
|
-
code = self.remove_line_comments_streaming(code)
|
|
1339
|
-
except Exception as e:
|
|
1340
|
-
self.console.print(
|
|
1341
|
-
f"[bold bright_yellow]⚠️ Warning: Failed to remove line comments from {file_path}: {e}[/bold bright_yellow]"
|
|
1342
|
-
)
|
|
1343
|
-
code = original_code
|
|
1344
|
-
cleaning_failed = True
|
|
1345
|
-
try:
|
|
1346
|
-
code = self.remove_docstrings_streaming(code)
|
|
1347
|
-
except Exception as e:
|
|
1348
|
-
self.console.print(
|
|
1349
|
-
f"[bold bright_yellow]⚠️ Warning: Failed to remove docstrings from {file_path}: {e}[/bold bright_yellow]"
|
|
1350
|
-
)
|
|
1351
|
-
code = original_code
|
|
1352
|
-
cleaning_failed = True
|
|
1353
|
-
try:
|
|
1354
|
-
code = self.remove_extra_whitespace_streaming(code)
|
|
1355
|
-
except Exception as e:
|
|
1356
|
-
self.console.print(
|
|
1357
|
-
f"[bold bright_yellow]⚠️ Warning: Failed to remove extra whitespace from {file_path}: {e}[/bold bright_yellow]"
|
|
1358
|
-
)
|
|
1359
|
-
code = original_code
|
|
1360
|
-
cleaning_failed = True
|
|
1361
|
-
try:
|
|
1362
|
-
code = await self.reformat_code_async(code)
|
|
1363
|
-
except Exception as e:
|
|
1364
|
-
self.console.print(
|
|
1365
|
-
f"[bold bright_yellow]⚠️ Warning: Failed to reformat {file_path}: {e}[/bold bright_yellow]"
|
|
1366
|
-
)
|
|
1367
|
-
code = original_code
|
|
1368
|
-
cleaning_failed = True
|
|
1369
|
-
async with aiofiles.open(file_path, "w", encoding="utf-8") as f: # type: ignore[misc]
|
|
1370
|
-
await f.write(code) # type: ignore[misc]
|
|
1371
|
-
if cleaning_failed:
|
|
1372
|
-
self.console.print(
|
|
1373
|
-
f"[bold yellow]⚡ Partially cleaned:[/bold yellow] [dim bright_white]{file_path}[/dim bright_white]"
|
|
1374
|
-
)
|
|
1375
|
-
else:
|
|
1376
|
-
self.console.print(
|
|
1377
|
-
f"[bold green]✨ Cleaned:[/bold green] [dim bright_white]{file_path}[/dim bright_white]"
|
|
1378
|
-
)
|
|
1379
|
-
except PermissionError as e:
|
|
1380
|
-
self.console.print(
|
|
1381
|
-
f"[red]Failed to clean: {file_path} (Permission denied)[/red]"
|
|
1382
|
-
)
|
|
1383
|
-
handle_error(
|
|
1384
|
-
ExecutionError(
|
|
1385
|
-
message=f"Permission denied while cleaning {file_path}",
|
|
1386
|
-
error_code=ErrorCode.PERMISSION_ERROR,
|
|
1387
|
-
details=str(e),
|
|
1388
|
-
recovery=f"Check file permissions for {file_path} and ensure you have write access",
|
|
1389
|
-
),
|
|
1390
|
-
console=self.console,
|
|
1391
|
-
exit_on_error=False,
|
|
1392
|
-
)
|
|
1393
|
-
except OSError as e:
|
|
1394
|
-
self.console.print(
|
|
1395
|
-
f"[red]Failed to clean: {file_path} (File system error)[/red]"
|
|
1396
|
-
)
|
|
1397
|
-
handle_error(
|
|
1398
|
-
ExecutionError(
|
|
1399
|
-
message=f"File system error while cleaning {file_path}",
|
|
1400
|
-
error_code=ErrorCode.FILE_WRITE_ERROR,
|
|
1401
|
-
details=str(e),
|
|
1402
|
-
recovery=f"Check that {file_path} exists and is not being used by another process",
|
|
1403
|
-
),
|
|
1404
|
-
console=self.console,
|
|
1405
|
-
exit_on_error=False,
|
|
1406
|
-
)
|
|
1407
|
-
except UnicodeDecodeError as e:
|
|
1408
|
-
self.console.print(
|
|
1409
|
-
f"[red]Failed to clean: {file_path} (Encoding error)[/red]"
|
|
1410
|
-
)
|
|
1411
|
-
handle_error(
|
|
1412
|
-
ExecutionError(
|
|
1413
|
-
message=f"Encoding error while cleaning {file_path}",
|
|
1414
|
-
error_code=ErrorCode.FILE_READ_ERROR,
|
|
1415
|
-
details=str(e),
|
|
1416
|
-
recovery=f"Check the file encoding of {file_path} - it may not be UTF-8",
|
|
1417
|
-
),
|
|
1418
|
-
console=self.console,
|
|
1419
|
-
exit_on_error=False,
|
|
1420
|
-
)
|
|
1421
|
-
except Exception as e:
|
|
1422
|
-
self.console.print(f"[red]Unexpected error cleaning {file_path}: {e}[/red]")
|
|
1423
|
-
handle_error(
|
|
1424
|
-
ExecutionError(
|
|
1425
|
-
message=f"Unexpected error while cleaning {file_path}",
|
|
1426
|
-
error_code=ErrorCode.UNEXPECTED_ERROR,
|
|
1427
|
-
details=str(e),
|
|
1428
|
-
recovery="Please report this issue with the full error details",
|
|
1429
|
-
),
|
|
1430
|
-
console=self.console,
|
|
1431
|
-
exit_on_error=False,
|
|
1432
|
-
)
|
|
1433
|
-
|
|
1434
|
-
async def reformat_code_async(self, code: str) -> str:
|
|
1435
|
-
from crackerjack.errors import handle_error
|
|
1436
|
-
|
|
1437
|
-
try:
|
|
1438
|
-
import tempfile
|
|
1439
|
-
|
|
1440
|
-
with tempfile.NamedTemporaryFile(
|
|
1441
|
-
suffix=".py", mode="w+", delete=False
|
|
1442
|
-
) as temp:
|
|
1443
|
-
temp_path = Path(temp.name)
|
|
1444
|
-
async with aiofiles.open(temp_path, "w", encoding="utf-8") as f: # type: ignore[misc]
|
|
1445
|
-
await f.write(code) # type: ignore[misc]
|
|
1446
|
-
try:
|
|
1447
|
-
proc = await asyncio.create_subprocess_exec(
|
|
1448
|
-
"uv",
|
|
1449
|
-
"run",
|
|
1450
|
-
"ruff",
|
|
1451
|
-
"format",
|
|
1452
|
-
str(temp_path),
|
|
1453
|
-
stdout=asyncio.subprocess.PIPE,
|
|
1454
|
-
stderr=asyncio.subprocess.PIPE,
|
|
1455
|
-
)
|
|
1456
|
-
_, stderr = await proc.communicate()
|
|
1457
|
-
if proc.returncode == 0:
|
|
1458
|
-
async with aiofiles.open(temp_path, encoding="utf-8") as f: # type: ignore[misc]
|
|
1459
|
-
formatted_code = await f.read() # type: ignore[misc]
|
|
1460
|
-
else:
|
|
1461
|
-
self.console.print(
|
|
1462
|
-
f"[bold bright_yellow]⚠️ Warning: Ruff format failed with return code {proc.returncode}[/bold bright_yellow]"
|
|
1463
|
-
)
|
|
1464
|
-
if stderr:
|
|
1465
|
-
self.console.print(f"[dim]Ruff stderr: {stderr.decode()}[/dim]")
|
|
1466
|
-
formatted_code = code
|
|
1467
|
-
except Exception as e:
|
|
1468
|
-
self.console.print(
|
|
1469
|
-
f"[bold bright_red]❌ Error running Ruff: {e}[/bold bright_red]"
|
|
1470
|
-
)
|
|
1471
|
-
handle_error(
|
|
1472
|
-
ExecutionError(
|
|
1473
|
-
message="Error running Ruff",
|
|
1474
|
-
error_code=ErrorCode.FORMATTING_ERROR,
|
|
1475
|
-
details=str(e),
|
|
1476
|
-
recovery="Verify Ruff is installed and configured correctly",
|
|
1477
|
-
),
|
|
1478
|
-
console=self.console,
|
|
1479
|
-
exit_on_error=False,
|
|
1480
|
-
)
|
|
1481
|
-
formatted_code = code
|
|
1482
|
-
finally:
|
|
1483
|
-
with suppress(FileNotFoundError):
|
|
1484
|
-
temp_path.unlink()
|
|
1485
|
-
|
|
1486
|
-
return formatted_code
|
|
1487
|
-
except Exception as e:
|
|
1488
|
-
self.console.print(
|
|
1489
|
-
f"[bold bright_red]❌ Error during reformatting: {e}[/bold bright_red]"
|
|
1490
|
-
)
|
|
1491
|
-
handle_error(
|
|
1492
|
-
ExecutionError(
|
|
1493
|
-
message="Error during reformatting",
|
|
1494
|
-
error_code=ErrorCode.FORMATTING_ERROR,
|
|
1495
|
-
details=str(e),
|
|
1496
|
-
recovery="Check file permissions and disk space",
|
|
1497
|
-
),
|
|
1498
|
-
console=self.console,
|
|
1499
|
-
exit_on_error=False,
|
|
1500
|
-
)
|
|
1501
|
-
return code
|
|
1502
|
-
|
|
1503
|
-
async def _cleanup_cache_directories_async(self, pkg_dir: Path) -> None:
|
|
1504
|
-
def cleanup_sync() -> None:
|
|
1505
|
-
with suppress(PermissionError, OSError):
|
|
1506
|
-
pycache_dir = pkg_dir / "__pycache__"
|
|
1507
|
-
if pycache_dir.exists():
|
|
1508
|
-
for cache_file in pycache_dir.iterdir():
|
|
1509
|
-
with suppress(PermissionError, OSError):
|
|
1510
|
-
cache_file.unlink()
|
|
1511
|
-
pycache_dir.rmdir()
|
|
1512
|
-
parent_pycache = pkg_dir.parent / "__pycache__"
|
|
1513
|
-
if parent_pycache.exists():
|
|
1514
|
-
for cache_file in parent_pycache.iterdir():
|
|
1515
|
-
with suppress(PermissionError, OSError):
|
|
1516
|
-
cache_file.unlink()
|
|
1517
|
-
parent_pycache.rmdir()
|
|
1518
|
-
|
|
1519
|
-
loop = asyncio.get_event_loop()
|
|
1520
|
-
await loop.run_in_executor(None, cleanup_sync)
|
|
544
|
+
experimental_hooks: bool = False
|
|
545
|
+
enable_pyrefly: bool = False
|
|
546
|
+
enable_ty: bool = False
|
|
547
|
+
no_git_tags: bool = False
|
|
548
|
+
skip_version_check: bool = False
|
|
1521
549
|
|
|
1522
550
|
|
|
1523
551
|
class ConfigManager(BaseModel, arbitrary_types_allowed=True):
|
|
@@ -2121,7 +1149,30 @@ class ProjectManager(BaseModel, arbitrary_types_allowed=True):
|
|
|
2121
1149
|
|
|
2122
1150
|
return env_vars
|
|
2123
1151
|
|
|
1152
|
+
def _cleanup_legacy_config_files(self) -> None:
|
|
1153
|
+
legacy_config_files = [
|
|
1154
|
+
".pre-commit-config.yaml",
|
|
1155
|
+
".pre-commit-config-ai.yaml",
|
|
1156
|
+
".pre-commit-config-fast.yaml",
|
|
1157
|
+
]
|
|
1158
|
+
removed_files = []
|
|
1159
|
+
for config_file in legacy_config_files:
|
|
1160
|
+
config_path = Path(config_file)
|
|
1161
|
+
if config_path.exists():
|
|
1162
|
+
try:
|
|
1163
|
+
config_path.unlink()
|
|
1164
|
+
removed_files.append(config_file)
|
|
1165
|
+
except OSError as e:
|
|
1166
|
+
self.console.print(
|
|
1167
|
+
f"[yellow]Warning: Could not remove {config_file}: {e}[/yellow]"
|
|
1168
|
+
)
|
|
1169
|
+
if removed_files:
|
|
1170
|
+
self.console.print(
|
|
1171
|
+
f"[dim]🧹 Cleaned up legacy config files: {', '.join(removed_files)}[/dim]"
|
|
1172
|
+
)
|
|
1173
|
+
|
|
2124
1174
|
def update_pkg_configs(self) -> None:
|
|
1175
|
+
self._cleanup_legacy_config_files()
|
|
2125
1176
|
self.config_manager.copy_configs()
|
|
2126
1177
|
installed_pkgs = self.execute_command(
|
|
2127
1178
|
["uv", "pip", "list", "--freeze"], capture_output=True, text=True
|
|
@@ -2146,21 +1197,6 @@ class ProjectManager(BaseModel, arbitrary_types_allowed=True):
|
|
|
2146
1197
|
self.execute_command(["git", "branch", "-m", "main"])
|
|
2147
1198
|
self.execute_command(["git", "add", "pyproject.toml", "uv.lock"])
|
|
2148
1199
|
self.execute_command(["git", "config", "advice.addIgnoredFile", "false"])
|
|
2149
|
-
install_cmd = ["uv", "run", "pre-commit", "install"]
|
|
2150
|
-
if self.options and getattr(self.options, "ai_agent", False):
|
|
2151
|
-
install_cmd.extend(["-c", ".pre-commit-config-ai.yaml"])
|
|
2152
|
-
else:
|
|
2153
|
-
install_cmd.extend(["-c", ".pre-commit-config-fast.yaml"])
|
|
2154
|
-
self.execute_command(install_cmd)
|
|
2155
|
-
push_install_cmd = [
|
|
2156
|
-
"uv",
|
|
2157
|
-
"run",
|
|
2158
|
-
"pre-commit",
|
|
2159
|
-
"install",
|
|
2160
|
-
"--hook-type",
|
|
2161
|
-
"pre-push",
|
|
2162
|
-
]
|
|
2163
|
-
self.execute_command(push_install_cmd)
|
|
2164
1200
|
self.config_manager.update_pyproject_configs()
|
|
2165
1201
|
|
|
2166
1202
|
def run_pre_commit(self) -> None:
|
|
@@ -2196,12 +1232,30 @@ class ProjectManager(BaseModel, arbitrary_types_allowed=True):
|
|
|
2196
1232
|
|
|
2197
1233
|
def _select_precommit_config(self) -> str:
|
|
2198
1234
|
if hasattr(self, "options"):
|
|
2199
|
-
|
|
2200
|
-
|
|
1235
|
+
experimental_hooks = getattr(self.options, "experimental_hooks", False)
|
|
1236
|
+
enable_pyrefly = getattr(self.options, "enable_pyrefly", False)
|
|
1237
|
+
enable_ty = getattr(self.options, "enable_ty", False)
|
|
1238
|
+
enabled_experimental = []
|
|
1239
|
+
if experimental_hooks:
|
|
1240
|
+
enabled_experimental = ["pyrefly", "ty"]
|
|
1241
|
+
else:
|
|
1242
|
+
if enable_pyrefly:
|
|
1243
|
+
enabled_experimental.append("pyrefly")
|
|
1244
|
+
if enable_ty:
|
|
1245
|
+
enabled_experimental.append("ty")
|
|
1246
|
+
if enabled_experimental:
|
|
1247
|
+
mode = "experimental"
|
|
1248
|
+
config_path = generate_config_for_mode(mode, enabled_experimental)
|
|
2201
1249
|
elif getattr(self.options, "comprehensive", False):
|
|
2202
|
-
|
|
1250
|
+
mode = "comprehensive"
|
|
1251
|
+
config_path = generate_config_for_mode(mode)
|
|
1252
|
+
else:
|
|
1253
|
+
mode = "fast"
|
|
1254
|
+
config_path = generate_config_for_mode(mode)
|
|
2203
1255
|
|
|
2204
|
-
|
|
1256
|
+
return str(config_path)
|
|
1257
|
+
config_path = generate_config_for_mode("fast")
|
|
1258
|
+
return str(config_path)
|
|
2205
1259
|
|
|
2206
1260
|
def run_pre_commit_with_analysis(self) -> list[HookResult]:
|
|
2207
1261
|
self.console.print("\n" + "-" * 80)
|
|
@@ -3031,6 +2085,12 @@ class Crackerjack(BaseModel, arbitrary_types_allowed=True):
|
|
|
3031
2085
|
|
|
3032
2086
|
self._state_file.write_text(json.dumps(state, indent=2), encoding="utf-8")
|
|
3033
2087
|
|
|
2088
|
+
def _get_state(self) -> dict[str, t.Any]:
|
|
2089
|
+
return self._read_state()
|
|
2090
|
+
|
|
2091
|
+
def _save_state(self, state: dict[str, t.Any]) -> None:
|
|
2092
|
+
self._write_state(state)
|
|
2093
|
+
|
|
3034
2094
|
def _clear_state(self) -> None:
|
|
3035
2095
|
if self._state_file.exists():
|
|
3036
2096
|
from contextlib import suppress
|
|
@@ -3080,6 +2140,79 @@ class Crackerjack(BaseModel, arbitrary_types_allowed=True):
|
|
|
3080
2140
|
return data.get("project", {}).get("version", "unknown")
|
|
3081
2141
|
return "unknown"
|
|
3082
2142
|
|
|
2143
|
+
def _create_git_tag(self, version: str | None = None) -> None:
|
|
2144
|
+
if version is None:
|
|
2145
|
+
version = self._get_current_version()
|
|
2146
|
+
if version == "unknown":
|
|
2147
|
+
self.console.print(
|
|
2148
|
+
"[bold yellow]⚠️ Warning: Could not determine version for tagging[/bold yellow]"
|
|
2149
|
+
)
|
|
2150
|
+
return
|
|
2151
|
+
tag_name = f"v{version}"
|
|
2152
|
+
result = self.execute_command(
|
|
2153
|
+
["git", "tag", "-l", tag_name], capture_output=True, text=True
|
|
2154
|
+
)
|
|
2155
|
+
if result.stdout.strip():
|
|
2156
|
+
self.console.print(
|
|
2157
|
+
f"[bold yellow]⚠️ Tag {tag_name} already exists, skipping tag creation[/bold yellow]"
|
|
2158
|
+
)
|
|
2159
|
+
return
|
|
2160
|
+
self.console.print(
|
|
2161
|
+
f"[bold bright_cyan]🏷️ Creating git tag: {tag_name}[/bold bright_cyan]"
|
|
2162
|
+
)
|
|
2163
|
+
package_name = self.pkg_path.stem.lower().replace("-", "_")
|
|
2164
|
+
tag_message = f"Release {package_name} v{version}"
|
|
2165
|
+
self.execute_command(["git", "tag", "-a", tag_name, "-m", tag_message])
|
|
2166
|
+
self.console.print(f"[bold green]✅ Created tag: {tag_name}[/bold green]")
|
|
2167
|
+
|
|
2168
|
+
def _push_git_tags(self) -> None:
|
|
2169
|
+
self.console.print(
|
|
2170
|
+
"[bold bright_cyan]🚀 Pushing tags to remote repository[/bold bright_cyan]"
|
|
2171
|
+
)
|
|
2172
|
+
try:
|
|
2173
|
+
self.execute_command(["git", "push", "origin", "--tags"])
|
|
2174
|
+
self.console.print("[bold green]✅ Tags pushed successfully[/bold green]")
|
|
2175
|
+
except Exception as e:
|
|
2176
|
+
self.console.print(
|
|
2177
|
+
f"[bold yellow]⚠️ Warning: Failed to push tags: {e}[/bold yellow]"
|
|
2178
|
+
)
|
|
2179
|
+
|
|
2180
|
+
def _verify_version_consistency(self) -> bool:
|
|
2181
|
+
current_version = self._get_current_version()
|
|
2182
|
+
if current_version == "unknown":
|
|
2183
|
+
self.console.print(
|
|
2184
|
+
"[bold yellow]⚠️ Warning: Could not determine current version from pyproject.toml[/bold yellow]"
|
|
2185
|
+
)
|
|
2186
|
+
return False
|
|
2187
|
+
try:
|
|
2188
|
+
result = self.execute_command(
|
|
2189
|
+
["git", "describe", "--tags", "--abbrev=0"],
|
|
2190
|
+
capture_output=True,
|
|
2191
|
+
text=True,
|
|
2192
|
+
)
|
|
2193
|
+
latest_tag = result.stdout.strip()
|
|
2194
|
+
if latest_tag.startswith("v"):
|
|
2195
|
+
tag_version = latest_tag[1:]
|
|
2196
|
+
else:
|
|
2197
|
+
tag_version = latest_tag
|
|
2198
|
+
except Exception:
|
|
2199
|
+
self.console.print(
|
|
2200
|
+
"[bold bright_cyan]ℹ️ No git tags found - this appears to be the first release[/bold bright_cyan]"
|
|
2201
|
+
)
|
|
2202
|
+
return True
|
|
2203
|
+
if current_version != tag_version:
|
|
2204
|
+
self.console.print(
|
|
2205
|
+
f"[bold red]❌ Version mismatch detected:[/bold red]\n"
|
|
2206
|
+
f" pyproject.toml version: {current_version}\n"
|
|
2207
|
+
f" Latest git tag version: {tag_version}\n"
|
|
2208
|
+
f" These should match before committing or publishing."
|
|
2209
|
+
)
|
|
2210
|
+
return False
|
|
2211
|
+
self.console.print(
|
|
2212
|
+
f"[bold green]✅ Version consistency verified: {current_version}[/bold green]"
|
|
2213
|
+
)
|
|
2214
|
+
return True
|
|
2215
|
+
|
|
3083
2216
|
def _setup_package(self) -> None:
|
|
3084
2217
|
self.pkg_name = self.pkg_path.stem.lower().replace("-", "_")
|
|
3085
2218
|
self.pkg_dir = self.pkg_path / self.pkg_name
|
|
@@ -3099,17 +2232,147 @@ class Crackerjack(BaseModel, arbitrary_types_allowed=True):
|
|
|
3099
2232
|
assert self.project_manager is not None
|
|
3100
2233
|
if not options.no_config_updates:
|
|
3101
2234
|
self.project_manager.update_pkg_configs()
|
|
2235
|
+
self._run_automatic_updates()
|
|
2236
|
+
if self.pkg_path.stem != "crackerjack":
|
|
2237
|
+
self._check_and_update_crackerjack()
|
|
3102
2238
|
result: CompletedProcess[str] = self.execute_command(
|
|
3103
2239
|
["uv", "sync"], capture_output=True, text=True
|
|
3104
2240
|
)
|
|
3105
2241
|
if result.returncode == 0:
|
|
3106
2242
|
self.console.print(
|
|
3107
|
-
"[bold green]✓ Dependencies installed[/bold green]\n"
|
|
2243
|
+
"[bold green]✓ Dependencies installed[/bold green]\n"
|
|
2244
|
+
)
|
|
2245
|
+
else:
|
|
2246
|
+
self.console.print(
|
|
2247
|
+
"\n\n[bold red]❌ UV sync failed. Is UV installed? Run `pipx install uv` and try again.[/bold red]\n\n"
|
|
2248
|
+
)
|
|
2249
|
+
|
|
2250
|
+
def _run_automatic_updates(self) -> None:
|
|
2251
|
+
self.console.print("[dim]🔄 Checking for updates...[/dim]")
|
|
2252
|
+
self._upgrade_dependencies()
|
|
2253
|
+
self._update_hooks_if_needed()
|
|
2254
|
+
|
|
2255
|
+
def _upgrade_dependencies(self) -> None:
|
|
2256
|
+
try:
|
|
2257
|
+
result = self.execute_command(
|
|
2258
|
+
["uv", "sync", "--upgrade"], capture_output=True, text=True
|
|
2259
|
+
)
|
|
2260
|
+
if result.returncode == 0:
|
|
2261
|
+
self._handle_upgrade_success(result)
|
|
2262
|
+
else:
|
|
2263
|
+
self.console.print(
|
|
2264
|
+
f"[yellow]⚠️ Dependency upgrade failed: {result.stderr}[/yellow]"
|
|
3108
2265
|
)
|
|
2266
|
+
except Exception as e:
|
|
2267
|
+
self.console.print(f"[yellow]⚠️ Error upgrading dependencies: {e}[/yellow]")
|
|
2268
|
+
|
|
2269
|
+
def _handle_upgrade_success(
|
|
2270
|
+
self, result: "subprocess.CompletedProcess[str]"
|
|
2271
|
+
) -> None:
|
|
2272
|
+
if "no changes" not in result.stdout.lower():
|
|
2273
|
+
self.console.print("[green]✅ Dependencies upgraded[/green]")
|
|
2274
|
+
self._show_upgrade_summary(result.stdout)
|
|
2275
|
+
else:
|
|
2276
|
+
self.console.print("[dim]✓ Dependencies already up to date[/dim]")
|
|
2277
|
+
|
|
2278
|
+
def _show_upgrade_summary(self, stdout: str) -> None:
|
|
2279
|
+
if stdout.strip():
|
|
2280
|
+
upgrade_lines = [line for line in stdout.split("\n") if "->" in line]
|
|
2281
|
+
if upgrade_lines:
|
|
2282
|
+
self.console.print(f"[dim]{len(upgrade_lines)} packages upgraded[/dim]")
|
|
2283
|
+
|
|
2284
|
+
def _update_hooks_if_needed(self) -> None:
|
|
2285
|
+
import time
|
|
2286
|
+
from pathlib import Path
|
|
2287
|
+
|
|
2288
|
+
marker_file = Path(".crackerjack-hooks-updated")
|
|
2289
|
+
current_time = time.time()
|
|
2290
|
+
week_seconds = 7 * 24 * 60 * 60
|
|
2291
|
+
should_update = True
|
|
2292
|
+
if marker_file.exists():
|
|
2293
|
+
try:
|
|
2294
|
+
last_update = float(marker_file.read_text().strip())
|
|
2295
|
+
if current_time - last_update < week_seconds:
|
|
2296
|
+
should_update = False
|
|
2297
|
+
except (ValueError, OSError):
|
|
2298
|
+
should_update = True
|
|
2299
|
+
if should_update:
|
|
2300
|
+
self._update_precommit_hooks()
|
|
2301
|
+
from contextlib import suppress
|
|
2302
|
+
|
|
2303
|
+
with suppress(OSError):
|
|
2304
|
+
marker_file.write_text(str(current_time))
|
|
2305
|
+
else:
|
|
2306
|
+
self.console.print("[dim]✓ Pre-commit hooks recently updated[/dim]")
|
|
2307
|
+
|
|
2308
|
+
def _update_precommit_hooks(self) -> None:
|
|
2309
|
+
try:
|
|
2310
|
+
result = self.execute_command(
|
|
2311
|
+
["uv", "run", "pre-commit", "autoupdate"],
|
|
2312
|
+
capture_output=True,
|
|
2313
|
+
text=True,
|
|
2314
|
+
)
|
|
2315
|
+
if result.returncode == 0:
|
|
2316
|
+
if "updated" in result.stdout.lower():
|
|
2317
|
+
self.console.print("[green]✅ Pre-commit hooks updated[/green]")
|
|
2318
|
+
update_lines = [
|
|
2319
|
+
line for line in result.stdout.split("\n") if "->" in line
|
|
2320
|
+
]
|
|
2321
|
+
if update_lines:
|
|
2322
|
+
self.console.print(
|
|
2323
|
+
f"[dim]{len(update_lines)} hooks updated[/dim]"
|
|
2324
|
+
)
|
|
2325
|
+
else:
|
|
2326
|
+
self.console.print(
|
|
2327
|
+
"[dim]✓ Pre-commit hooks already up to date[/dim]"
|
|
2328
|
+
)
|
|
3109
2329
|
else:
|
|
3110
2330
|
self.console.print(
|
|
3111
|
-
"
|
|
2331
|
+
f"[yellow]⚠️ Pre-commit update failed: {result.stderr}[/yellow]"
|
|
3112
2332
|
)
|
|
2333
|
+
except Exception as e:
|
|
2334
|
+
self.console.print(
|
|
2335
|
+
f"[yellow]⚠️ Error updating pre-commit hooks: {e}[/yellow]"
|
|
2336
|
+
)
|
|
2337
|
+
|
|
2338
|
+
def _check_and_update_crackerjack(self) -> None:
|
|
2339
|
+
try:
|
|
2340
|
+
import tomllib
|
|
2341
|
+
from pathlib import Path
|
|
2342
|
+
|
|
2343
|
+
pyproject_path = Path("pyproject.toml")
|
|
2344
|
+
if not pyproject_path.exists():
|
|
2345
|
+
return
|
|
2346
|
+
with pyproject_path.open("rb") as f:
|
|
2347
|
+
config = tomllib.load(f)
|
|
2348
|
+
dependencies = config.get("project", {}).get("dependencies", [])
|
|
2349
|
+
dev_dependencies = config.get("dependency-groups", {}).get("dev", [])
|
|
2350
|
+
has_crackerjack = any(
|
|
2351
|
+
dep.startswith("crackerjack") for dep in dependencies + dev_dependencies
|
|
2352
|
+
)
|
|
2353
|
+
if has_crackerjack:
|
|
2354
|
+
result = self.execute_command(
|
|
2355
|
+
["uv", "sync", "--upgrade", "--upgrade-package", "crackerjack"],
|
|
2356
|
+
capture_output=True,
|
|
2357
|
+
text=True,
|
|
2358
|
+
)
|
|
2359
|
+
if result.returncode == 0:
|
|
2360
|
+
if "crackerjack" in result.stdout:
|
|
2361
|
+
self.console.print(
|
|
2362
|
+
"[green]✅ Crackerjack upgraded to latest version[/green]"
|
|
2363
|
+
)
|
|
2364
|
+
else:
|
|
2365
|
+
self.console.print(
|
|
2366
|
+
"[dim]✓ Crackerjack already up to date[/dim]"
|
|
2367
|
+
)
|
|
2368
|
+
else:
|
|
2369
|
+
self.console.print(
|
|
2370
|
+
f"[yellow]⚠️ Crackerjack update check failed: {result.stderr}[/yellow]"
|
|
2371
|
+
)
|
|
2372
|
+
except Exception as e:
|
|
2373
|
+
self.console.print(
|
|
2374
|
+
f"[yellow]⚠️ Error checking crackerjack updates: {e}[/yellow]"
|
|
2375
|
+
)
|
|
3113
2376
|
|
|
3114
2377
|
def _clean_project(self, options: t.Any) -> None:
|
|
3115
2378
|
assert self.code_cleaner is not None
|
|
@@ -3416,37 +2679,103 @@ class Crackerjack(BaseModel, arbitrary_types_allowed=True):
|
|
|
3416
2679
|
else:
|
|
3417
2680
|
self._handle_test_success(options)
|
|
3418
2681
|
|
|
2682
|
+
def _prompt_version_selection(self) -> str:
|
|
2683
|
+
from rich.prompt import Prompt
|
|
2684
|
+
|
|
2685
|
+
if self.options and getattr(self.options, "ai_agent", False):
|
|
2686
|
+
self.console.print(
|
|
2687
|
+
"[dim]AI agent mode: defaulting to patch version bump[/dim]"
|
|
2688
|
+
)
|
|
2689
|
+
return "patch"
|
|
2690
|
+
self.console.print(
|
|
2691
|
+
"\n[bold bright_yellow]📦 VERSION SELECTION[/bold bright_yellow]"
|
|
2692
|
+
)
|
|
2693
|
+
self.console.print("[dim]Select the type of version bump to perform:[/dim]\n")
|
|
2694
|
+
choices = {
|
|
2695
|
+
"1": ("patch", "Bug fixes and minor changes (0.1.0 → 0.1.1)"),
|
|
2696
|
+
"2": ("minor", "New features, backwards compatible (0.1.0 → 0.2.0)"),
|
|
2697
|
+
"3": ("major", "Breaking changes, major updates (0.1.0 → 1.0.0)"),
|
|
2698
|
+
}
|
|
2699
|
+
for key, (bump_type, description) in choices.items():
|
|
2700
|
+
self.console.print(
|
|
2701
|
+
f" [bold bright_cyan]{key}[/bold bright_cyan] {bump_type:<6} - {description}"
|
|
2702
|
+
)
|
|
2703
|
+
while True:
|
|
2704
|
+
choice = Prompt.ask(
|
|
2705
|
+
"\n[bold]Select version bump type",
|
|
2706
|
+
choices=list(choices.keys()),
|
|
2707
|
+
default="1",
|
|
2708
|
+
show_choices=False,
|
|
2709
|
+
)
|
|
2710
|
+
if choice in choices:
|
|
2711
|
+
selected_type = choices[choice][0]
|
|
2712
|
+
self.console.print(
|
|
2713
|
+
f"[green]✓ Selected: {selected_type} version bump[/green]"
|
|
2714
|
+
)
|
|
2715
|
+
return selected_type
|
|
2716
|
+
else:
|
|
2717
|
+
self.console.print(
|
|
2718
|
+
"[red]Invalid choice. Please select 1, 2, or 3.[/red]"
|
|
2719
|
+
)
|
|
2720
|
+
|
|
3419
2721
|
def _bump_version(self, options: OptionsProtocol) -> None:
|
|
2722
|
+
if options.publish and str(options.publish) == "interactive":
|
|
2723
|
+
return self._handle_interactive_version_selection(options)
|
|
3420
2724
|
for option in (options.publish, options.bump):
|
|
3421
2725
|
if option:
|
|
3422
2726
|
version_type = str(option)
|
|
3423
2727
|
if self._has_version_been_bumped(version_type):
|
|
3424
|
-
self.
|
|
3425
|
-
|
|
3426
|
-
|
|
3427
|
-
|
|
3428
|
-
self.console.print("-" * 80 + "\n")
|
|
2728
|
+
self._display_version_already_bumped_message(version_type)
|
|
2729
|
+
return
|
|
2730
|
+
self._display_version_bump_message(option)
|
|
2731
|
+
if not self._confirm_version_bump_if_needed(option, version_type):
|
|
3429
2732
|
return
|
|
3430
|
-
self.console.print("\n" + "-" * 80)
|
|
3431
|
-
self.console.print(
|
|
3432
|
-
f"[bold bright_magenta]📦 VERSION[/bold bright_magenta] [bold bright_white]Bumping {option} version[/bold bright_white]"
|
|
3433
|
-
)
|
|
3434
|
-
self.console.print("-" * 80 + "\n")
|
|
3435
|
-
if version_type in ("minor", "major"):
|
|
3436
|
-
from rich.prompt import Confirm
|
|
3437
|
-
|
|
3438
|
-
if not Confirm.ask(
|
|
3439
|
-
f"Are you sure you want to bump the {option} version?",
|
|
3440
|
-
default=False,
|
|
3441
|
-
):
|
|
3442
|
-
self.console.print(
|
|
3443
|
-
f"[bold yellow]⏭️ Skipping {option} version bump[/bold yellow]"
|
|
3444
|
-
)
|
|
3445
|
-
return
|
|
3446
2733
|
self.execute_command(["uv", "version", "--bump", option])
|
|
3447
2734
|
self._mark_version_bumped(version_type)
|
|
2735
|
+
if not options.no_git_tags:
|
|
2736
|
+
self._create_git_tag()
|
|
3448
2737
|
break
|
|
3449
2738
|
|
|
2739
|
+
def _handle_interactive_version_selection(self, options: OptionsProtocol) -> None:
|
|
2740
|
+
selected_version = self._prompt_version_selection()
|
|
2741
|
+
from crackerjack.__main__ import BumpOption
|
|
2742
|
+
|
|
2743
|
+
options_dict = vars(options).copy()
|
|
2744
|
+
options_dict["publish"] = BumpOption(selected_version)
|
|
2745
|
+
from types import SimpleNamespace
|
|
2746
|
+
|
|
2747
|
+
temp_options = SimpleNamespace(**options_dict)
|
|
2748
|
+
|
|
2749
|
+
return self._bump_version(temp_options) # type: ignore[arg-type]
|
|
2750
|
+
|
|
2751
|
+
def _display_version_already_bumped_message(self, version_type: str) -> None:
|
|
2752
|
+
self.console.print("\n" + "-" * 80)
|
|
2753
|
+
self.console.print(
|
|
2754
|
+
f"[bold yellow]📦 VERSION[/bold yellow] [bold bright_white]Version already bumped ({version_type}), skipping to avoid duplicate bump[/bold bright_white]"
|
|
2755
|
+
)
|
|
2756
|
+
self.console.print("-" * 80 + "\n")
|
|
2757
|
+
|
|
2758
|
+
def _display_version_bump_message(self, option: t.Any) -> None:
|
|
2759
|
+
self.console.print("\n" + "-" * 80)
|
|
2760
|
+
self.console.print(
|
|
2761
|
+
f"[bold bright_magenta]📦 VERSION[/bold bright_magenta] [bold bright_white]Bumping {option} version[/bold bright_white]"
|
|
2762
|
+
)
|
|
2763
|
+
self.console.print("-" * 80 + "\n")
|
|
2764
|
+
|
|
2765
|
+
def _confirm_version_bump_if_needed(self, option: t.Any, version_type: str) -> bool:
|
|
2766
|
+
if version_type in ("minor", "major"):
|
|
2767
|
+
from rich.prompt import Confirm
|
|
2768
|
+
|
|
2769
|
+
if not Confirm.ask(
|
|
2770
|
+
f"Are you sure you want to bump the {option} version?",
|
|
2771
|
+
default=False,
|
|
2772
|
+
):
|
|
2773
|
+
self.console.print(
|
|
2774
|
+
f"[bold yellow]⏭️ Skipping {option} version bump[/bold yellow]"
|
|
2775
|
+
)
|
|
2776
|
+
return False
|
|
2777
|
+
return True
|
|
2778
|
+
|
|
3450
2779
|
def _validate_authentication_setup(self) -> None:
|
|
3451
2780
|
import os
|
|
3452
2781
|
import shutil
|
|
@@ -3540,6 +2869,281 @@ class Crackerjack(BaseModel, arbitrary_types_allowed=True):
|
|
|
3540
2869
|
|
|
3541
2870
|
return cmd
|
|
3542
2871
|
|
|
2872
|
+
def _publish_with_retry(self) -> None:
|
|
2873
|
+
max_retries = 2
|
|
2874
|
+
for attempt in range(max_retries):
|
|
2875
|
+
try:
|
|
2876
|
+
result = self._attempt_publish()
|
|
2877
|
+
if result.returncode == 0:
|
|
2878
|
+
self._verify_pypi_upload()
|
|
2879
|
+
return
|
|
2880
|
+
if not self._handle_publish_failure(result, attempt, max_retries):
|
|
2881
|
+
raise SystemExit(1)
|
|
2882
|
+
except SystemExit:
|
|
2883
|
+
if attempt < max_retries - 1:
|
|
2884
|
+
continue
|
|
2885
|
+
raise
|
|
2886
|
+
|
|
2887
|
+
def _attempt_publish(self) -> "subprocess.CompletedProcess[str]":
|
|
2888
|
+
self._validate_authentication_setup()
|
|
2889
|
+
publish_cmd = self._build_publish_command()
|
|
2890
|
+
self.console.print("[dim]📤 Uploading package to PyPI...[/dim]")
|
|
2891
|
+
import subprocess
|
|
2892
|
+
import time
|
|
2893
|
+
from threading import Thread
|
|
2894
|
+
|
|
2895
|
+
from rich.live import Live
|
|
2896
|
+
from rich.spinner import Spinner
|
|
2897
|
+
|
|
2898
|
+
result: subprocess.CompletedProcess[str] | None = None
|
|
2899
|
+
start_time = time.time()
|
|
2900
|
+
|
|
2901
|
+
def run_publish() -> None:
|
|
2902
|
+
nonlocal result
|
|
2903
|
+
result = self.execute_command(publish_cmd, capture_output=True, text=True)
|
|
2904
|
+
|
|
2905
|
+
publish_thread = Thread(target=run_publish)
|
|
2906
|
+
publish_thread.start()
|
|
2907
|
+
|
|
2908
|
+
elapsed_time = 0
|
|
2909
|
+
while publish_thread.is_alive():
|
|
2910
|
+
elapsed_time = time.time() - start_time
|
|
2911
|
+
|
|
2912
|
+
if elapsed_time < 5:
|
|
2913
|
+
text = "[dim]📤 Uploading to PyPI...[/dim]"
|
|
2914
|
+
elif elapsed_time < 15:
|
|
2915
|
+
text = "[dim]📤 Uploading to PyPI... (this may take a moment)[/dim]"
|
|
2916
|
+
else:
|
|
2917
|
+
text = "[dim]📤 Uploading to PyPI... (large package or slow connection)[/dim]"
|
|
2918
|
+
|
|
2919
|
+
spinner = Spinner("dots", text=text)
|
|
2920
|
+
with Live(spinner, refresh_per_second=10, transient=True):
|
|
2921
|
+
time.sleep(0.5)
|
|
2922
|
+
|
|
2923
|
+
if not publish_thread.is_alive():
|
|
2924
|
+
break
|
|
2925
|
+
|
|
2926
|
+
publish_thread.join()
|
|
2927
|
+
|
|
2928
|
+
elapsed_time = time.time() - start_time
|
|
2929
|
+
|
|
2930
|
+
if result and result.returncode == 0:
|
|
2931
|
+
self.console.print(
|
|
2932
|
+
f"[green]✅ Package uploaded successfully! ({elapsed_time:.1f}s)[/green]"
|
|
2933
|
+
)
|
|
2934
|
+
elif result and result.returncode != 0:
|
|
2935
|
+
self.console.print(f"[red]❌ Upload failed after {elapsed_time:.1f}s[/red]")
|
|
2936
|
+
if result.stdout:
|
|
2937
|
+
self.console.print(f"[dim]stdout: {result.stdout}[/dim]")
|
|
2938
|
+
if result.stderr:
|
|
2939
|
+
self.console.print(f"[red]stderr: {result.stderr}[/red]")
|
|
2940
|
+
|
|
2941
|
+
if result is None:
|
|
2942
|
+
return subprocess.CompletedProcess(
|
|
2943
|
+
args=publish_cmd,
|
|
2944
|
+
returncode=1,
|
|
2945
|
+
stdout="",
|
|
2946
|
+
stderr="Thread execution failed",
|
|
2947
|
+
)
|
|
2948
|
+
|
|
2949
|
+
return result
|
|
2950
|
+
|
|
2951
|
+
def _verify_pypi_upload(self) -> None:
|
|
2952
|
+
if self.options and getattr(self.options, "ai_agent", False):
|
|
2953
|
+
return
|
|
2954
|
+
import time
|
|
2955
|
+
|
|
2956
|
+
package_name = self._get_package_name()
|
|
2957
|
+
current_version = self._get_current_version()
|
|
2958
|
+
self.console.print(
|
|
2959
|
+
f"[dim]🔍 Verifying upload of {package_name} v{current_version}...[/dim]"
|
|
2960
|
+
)
|
|
2961
|
+
time.sleep(2)
|
|
2962
|
+
self._retry_pypi_verification(package_name, current_version)
|
|
2963
|
+
|
|
2964
|
+
def _retry_pypi_verification(self, package_name: str, current_version: str) -> None:
|
|
2965
|
+
import time
|
|
2966
|
+
|
|
2967
|
+
max_attempts = 3
|
|
2968
|
+
for attempt in range(max_attempts):
|
|
2969
|
+
try:
|
|
2970
|
+
if self._check_pypi_package_exists(package_name, current_version):
|
|
2971
|
+
self._show_pypi_success(package_name, current_version)
|
|
2972
|
+
return
|
|
2973
|
+
if attempt < max_attempts - 1:
|
|
2974
|
+
self._show_pypi_retry_message(attempt, max_attempts)
|
|
2975
|
+
time.sleep(5)
|
|
2976
|
+
continue
|
|
2977
|
+
else:
|
|
2978
|
+
self._show_pypi_not_visible(package_name, current_version)
|
|
2979
|
+
return
|
|
2980
|
+
except Exception as e:
|
|
2981
|
+
if attempt < max_attempts - 1:
|
|
2982
|
+
self._show_pypi_error_retry(attempt, max_attempts, e)
|
|
2983
|
+
time.sleep(5)
|
|
2984
|
+
continue
|
|
2985
|
+
else:
|
|
2986
|
+
self._show_pypi_final_error(package_name, current_version, e)
|
|
2987
|
+
return
|
|
2988
|
+
|
|
2989
|
+
def _check_pypi_package_exists(
|
|
2990
|
+
self, package_name: str, current_version: str
|
|
2991
|
+
) -> bool:
|
|
2992
|
+
import json
|
|
2993
|
+
import urllib.error
|
|
2994
|
+
import urllib.request
|
|
2995
|
+
|
|
2996
|
+
url = f"https://pypi.org/pypi/{package_name}/{current_version}/json"
|
|
2997
|
+
try:
|
|
2998
|
+
with urllib.request.urlopen(url, timeout=10) as response: # nosec B310
|
|
2999
|
+
data = json.loads(response.read().decode())
|
|
3000
|
+
return data.get("info", {}).get("version") == current_version
|
|
3001
|
+
except urllib.error.HTTPError as e:
|
|
3002
|
+
if e.code == 404:
|
|
3003
|
+
return False
|
|
3004
|
+
raise
|
|
3005
|
+
|
|
3006
|
+
def _show_pypi_success(self, package_name: str, current_version: str) -> None:
|
|
3007
|
+
self.console.print(
|
|
3008
|
+
f"[green]✅ Verified: {package_name} v{current_version} is available on PyPI![/green]"
|
|
3009
|
+
)
|
|
3010
|
+
pypi_url = f"https://pypi.org/project/{package_name}/{current_version}/"
|
|
3011
|
+
self.console.print(f"[dim] 📦 Package URL: {pypi_url}[/dim]")
|
|
3012
|
+
|
|
3013
|
+
def _show_pypi_retry_message(self, attempt: int, max_attempts: int) -> None:
|
|
3014
|
+
self.console.print(
|
|
3015
|
+
f"[yellow]⏳ Package not yet available on PyPI (attempt {attempt + 1}/{max_attempts}), retrying...[/yellow]"
|
|
3016
|
+
)
|
|
3017
|
+
|
|
3018
|
+
def _show_pypi_not_visible(self, package_name: str, current_version: str) -> None:
|
|
3019
|
+
self.console.print(
|
|
3020
|
+
"[yellow]⚠️ Package uploaded but not yet visible on PyPI (this is normal)[/yellow]"
|
|
3021
|
+
)
|
|
3022
|
+
self.console.print(
|
|
3023
|
+
f"[dim] Check later at: https://pypi.org/project/{package_name}/{current_version}/[/dim]"
|
|
3024
|
+
)
|
|
3025
|
+
|
|
3026
|
+
def _show_pypi_error_retry(
|
|
3027
|
+
self, attempt: int, max_attempts: int, error: Exception
|
|
3028
|
+
) -> None:
|
|
3029
|
+
self.console.print(
|
|
3030
|
+
f"[yellow]⏳ Error checking PyPI (attempt {attempt + 1}/{max_attempts}): {error}[/yellow]"
|
|
3031
|
+
)
|
|
3032
|
+
|
|
3033
|
+
def _show_pypi_final_error(
|
|
3034
|
+
self, package_name: str, current_version: str, error: Exception
|
|
3035
|
+
) -> None:
|
|
3036
|
+
self.console.print(f"[yellow]⚠️ Could not verify PyPI upload: {error}[/yellow]")
|
|
3037
|
+
self.console.print(
|
|
3038
|
+
f"[dim] Check manually at: https://pypi.org/project/{package_name}/{current_version}/[/dim]"
|
|
3039
|
+
)
|
|
3040
|
+
|
|
3041
|
+
def _get_package_name(self) -> str:
|
|
3042
|
+
import tomllib
|
|
3043
|
+
from pathlib import Path
|
|
3044
|
+
|
|
3045
|
+
pyproject_path = Path("pyproject.toml")
|
|
3046
|
+
if pyproject_path.exists():
|
|
3047
|
+
with pyproject_path.open("rb") as f:
|
|
3048
|
+
data = tomllib.load(f)
|
|
3049
|
+
return data.get("project", {}).get("name", "unknown")
|
|
3050
|
+
return "unknown"
|
|
3051
|
+
|
|
3052
|
+
def _handle_publish_failure(
|
|
3053
|
+
self, result: "subprocess.CompletedProcess[str]", attempt: int, max_retries: int
|
|
3054
|
+
) -> bool:
|
|
3055
|
+
if self._is_auth_error(result):
|
|
3056
|
+
return self._handle_auth_error(attempt, max_retries)
|
|
3057
|
+
else:
|
|
3058
|
+
self._handle_non_auth_error(result)
|
|
3059
|
+
return False
|
|
3060
|
+
|
|
3061
|
+
def _handle_auth_error(self, attempt: int, max_retries: int) -> bool:
|
|
3062
|
+
if attempt < max_retries - 1:
|
|
3063
|
+
self.console.print(
|
|
3064
|
+
f"[yellow]⚠️ Authentication failed (attempt {attempt + 1}/{max_retries})[/yellow]"
|
|
3065
|
+
)
|
|
3066
|
+
return self._prompt_for_token()
|
|
3067
|
+
self._display_authentication_help()
|
|
3068
|
+
return False
|
|
3069
|
+
|
|
3070
|
+
def _handle_non_auth_error(
|
|
3071
|
+
self, result: "subprocess.CompletedProcess[str]"
|
|
3072
|
+
) -> None:
|
|
3073
|
+
self.console.print(result.stdout)
|
|
3074
|
+
self.console.print(result.stderr)
|
|
3075
|
+
|
|
3076
|
+
def _is_auth_error(self, result: "subprocess.CompletedProcess[str]") -> bool:
|
|
3077
|
+
error_text = (result.stdout + result.stderr).lower()
|
|
3078
|
+
auth_indicators = (
|
|
3079
|
+
"authentication",
|
|
3080
|
+
"unauthorized",
|
|
3081
|
+
"403",
|
|
3082
|
+
"401",
|
|
3083
|
+
"invalid credentials",
|
|
3084
|
+
"token",
|
|
3085
|
+
"password",
|
|
3086
|
+
"username",
|
|
3087
|
+
)
|
|
3088
|
+
return any(indicator in error_text for indicator in auth_indicators)
|
|
3089
|
+
|
|
3090
|
+
def _prompt_for_token(self) -> bool:
|
|
3091
|
+
import getpass
|
|
3092
|
+
import os
|
|
3093
|
+
import shutil
|
|
3094
|
+
|
|
3095
|
+
if self.options and getattr(self.options, "ai_agent", False):
|
|
3096
|
+
return False
|
|
3097
|
+
self.console.print("\n[bold yellow]🔐 PyPI Token Required[/bold yellow]")
|
|
3098
|
+
self.console.print(
|
|
3099
|
+
"[dim]Please enter your PyPI token (starts with 'pypi-'):[/dim]"
|
|
3100
|
+
)
|
|
3101
|
+
try:
|
|
3102
|
+
token = getpass.getpass("PyPI Token: ")
|
|
3103
|
+
if not token or not token.startswith("pypi-"):
|
|
3104
|
+
self.console.print(
|
|
3105
|
+
"[red]❌ Invalid token format. Token must start with 'pypi-'[/red]"
|
|
3106
|
+
)
|
|
3107
|
+
return False
|
|
3108
|
+
if shutil.which("keyring"):
|
|
3109
|
+
try:
|
|
3110
|
+
result = self.execute_command(
|
|
3111
|
+
[
|
|
3112
|
+
"keyring",
|
|
3113
|
+
"set",
|
|
3114
|
+
"https://upload.pypi.org/legacy/",
|
|
3115
|
+
"__token__",
|
|
3116
|
+
],
|
|
3117
|
+
input=token,
|
|
3118
|
+
capture_output=True,
|
|
3119
|
+
text=True,
|
|
3120
|
+
)
|
|
3121
|
+
if result.returncode == 0:
|
|
3122
|
+
self.console.print("[green]✅ Token stored in keyring[/green]")
|
|
3123
|
+
else:
|
|
3124
|
+
os.environ["UV_PUBLISH_TOKEN"] = token
|
|
3125
|
+
self.console.print(
|
|
3126
|
+
"[yellow]⚠️ Keyring storage failed, using environment variable[/yellow]"
|
|
3127
|
+
)
|
|
3128
|
+
except Exception:
|
|
3129
|
+
os.environ["UV_PUBLISH_TOKEN"] = token
|
|
3130
|
+
self.console.print(
|
|
3131
|
+
"[yellow]⚠️ Keyring storage failed, using environment variable[/yellow]"
|
|
3132
|
+
)
|
|
3133
|
+
else:
|
|
3134
|
+
os.environ["UV_PUBLISH_TOKEN"] = token
|
|
3135
|
+
self.console.print(
|
|
3136
|
+
"[yellow]⚠️ Keyring not available, using environment variable[/yellow]"
|
|
3137
|
+
)
|
|
3138
|
+
|
|
3139
|
+
return True
|
|
3140
|
+
except KeyboardInterrupt:
|
|
3141
|
+
self.console.print("\n[yellow]⚠️ Token entry cancelled[/yellow]")
|
|
3142
|
+
return False
|
|
3143
|
+
except Exception as e:
|
|
3144
|
+
self.console.print(f"[red]❌ Error storing token: {e}[/red]")
|
|
3145
|
+
return False
|
|
3146
|
+
|
|
3543
3147
|
def _display_authentication_help(self) -> None:
|
|
3544
3148
|
self.console.print(
|
|
3545
3149
|
"\n[bold bright_red]❌ Publish failed. Run crackerjack again to retry publishing without re-bumping version.[/bold bright_red]"
|
|
@@ -3569,28 +3173,36 @@ class Crackerjack(BaseModel, arbitrary_types_allowed=True):
|
|
|
3569
3173
|
"[bold bright_cyan]🚀 PUBLISH[/bold bright_cyan] [bold bright_white]Building and publishing package[/bold bright_white]"
|
|
3570
3174
|
)
|
|
3571
3175
|
self.console.print("-" * 80 + "\n")
|
|
3572
|
-
|
|
3573
|
-
|
|
3574
|
-
|
|
3575
|
-
|
|
3576
|
-
|
|
3577
|
-
|
|
3578
|
-
|
|
3579
|
-
|
|
3176
|
+
if not options.skip_version_check:
|
|
3177
|
+
if not self._verify_version_consistency():
|
|
3178
|
+
self.console.print(
|
|
3179
|
+
"[bold red]❌ Publishing aborted due to version mismatch. Please ensure pyproject.toml version matches git tag.[/bold red]"
|
|
3180
|
+
)
|
|
3181
|
+
raise SystemExit(1)
|
|
3182
|
+
state = self._get_state()
|
|
3183
|
+
if not state.get("build_completed", False):
|
|
3184
|
+
build = self.execute_command(
|
|
3185
|
+
["uv", "build"], capture_output=True, text=True
|
|
3580
3186
|
)
|
|
3581
|
-
|
|
3582
|
-
|
|
3583
|
-
|
|
3584
|
-
|
|
3585
|
-
|
|
3586
|
-
|
|
3587
|
-
|
|
3187
|
+
self.console.print(build.stdout)
|
|
3188
|
+
if build.returncode > 0:
|
|
3189
|
+
self.console.print(build.stderr)
|
|
3190
|
+
self.console.print(
|
|
3191
|
+
"[bold bright_red]❌ Build failed. Please fix errors.[/bold bright_red]"
|
|
3192
|
+
)
|
|
3193
|
+
raise SystemExit(1)
|
|
3194
|
+
state["build_completed"] = True
|
|
3195
|
+
self._save_state(state)
|
|
3196
|
+
else:
|
|
3588
3197
|
self.console.print(
|
|
3589
|
-
"
|
|
3198
|
+
"[dim]📦 Using existing build artifacts (retry mode)[/dim]"
|
|
3590
3199
|
)
|
|
3591
|
-
|
|
3592
|
-
|
|
3593
|
-
|
|
3200
|
+
self._publish_with_retry()
|
|
3201
|
+
self._mark_publish_completed()
|
|
3202
|
+
self._clear_state()
|
|
3203
|
+
self.console.print(
|
|
3204
|
+
"\n[bold bright_green]🏆 Package published successfully![/bold bright_green]"
|
|
3205
|
+
)
|
|
3594
3206
|
|
|
3595
3207
|
def _analyze_git_changes(self) -> dict[str, t.Any]:
|
|
3596
3208
|
diff_result = self._get_git_diff_output()
|
|
@@ -3750,6 +3362,12 @@ class Crackerjack(BaseModel, arbitrary_types_allowed=True):
|
|
|
3750
3362
|
"[bold bright_white]📝 COMMIT[/bold bright_white] [bold bright_white]Saving changes to git[/bold bright_white]"
|
|
3751
3363
|
)
|
|
3752
3364
|
self.console.print("-" * 80 + "\n")
|
|
3365
|
+
if not options.skip_version_check:
|
|
3366
|
+
if not self._verify_version_consistency():
|
|
3367
|
+
self.console.print(
|
|
3368
|
+
"[bold red]❌ Commit aborted due to version mismatch. Please ensure pyproject.toml version matches git tag.[/bold red]"
|
|
3369
|
+
)
|
|
3370
|
+
raise SystemExit(1)
|
|
3753
3371
|
changes = self._analyze_git_changes()
|
|
3754
3372
|
if changes["total_changes"] > 0:
|
|
3755
3373
|
self.console.print("[dim]🔍 Analyzing changes...[/dim]\n")
|
|
@@ -3787,18 +3405,30 @@ class Crackerjack(BaseModel, arbitrary_types_allowed=True):
|
|
|
3787
3405
|
["git", "commit", "-m", commit_msg, "--no-verify", "--", "."]
|
|
3788
3406
|
)
|
|
3789
3407
|
self.execute_command(["git", "push", "origin", "main", "--no-verify"])
|
|
3408
|
+
self._push_git_tags()
|
|
3790
3409
|
|
|
3791
3410
|
def _update_precommit(self, options: OptionsProtocol) -> None:
|
|
3792
3411
|
if options.update_precommit:
|
|
3412
|
+
self.console.print(
|
|
3413
|
+
"\n[bold yellow]⚠️ DEPRECATION WARNING[/bold yellow]: The --update-precommit (-u) flag is deprecated.\n"
|
|
3414
|
+
" Pre-commit hooks are now updated automatically on a weekly basis.\n"
|
|
3415
|
+
" This manual update will still work but is no longer needed.\n"
|
|
3416
|
+
)
|
|
3793
3417
|
self.console.print("\n" + "-" * 80)
|
|
3794
3418
|
self.console.print(
|
|
3795
3419
|
"[bold bright_blue]🔄 UPDATE[/bold bright_blue] [bold bright_white]Updating pre-commit hooks[/bold bright_white]"
|
|
3796
3420
|
)
|
|
3797
3421
|
self.console.print("-" * 80 + "\n")
|
|
3798
3422
|
if self.pkg_path.stem == "crackerjack":
|
|
3799
|
-
|
|
3800
|
-
|
|
3801
|
-
|
|
3423
|
+
config_path = self.project_manager._select_precommit_config()
|
|
3424
|
+
update_cmd = [
|
|
3425
|
+
"uv",
|
|
3426
|
+
"run",
|
|
3427
|
+
"pre-commit",
|
|
3428
|
+
"autoupdate",
|
|
3429
|
+
"-c",
|
|
3430
|
+
config_path,
|
|
3431
|
+
]
|
|
3802
3432
|
self.execute_command(update_cmd)
|
|
3803
3433
|
else:
|
|
3804
3434
|
self.project_manager.update_precommit_hooks()
|
|
@@ -3865,6 +3495,7 @@ class Crackerjack(BaseModel, arbitrary_types_allowed=True):
|
|
|
3865
3495
|
"[bold bright_magenta]🔍 COMPREHENSIVE QUALITY[/bold bright_magenta] [bold bright_white]Running all quality checks before publish/commit[/bold bright_white]"
|
|
3866
3496
|
)
|
|
3867
3497
|
self.console.print("-" * 80 + "\n")
|
|
3498
|
+
config_path = self.project_manager._select_precommit_config()
|
|
3868
3499
|
cmd = [
|
|
3869
3500
|
"uv",
|
|
3870
3501
|
"run",
|
|
@@ -3873,7 +3504,7 @@ class Crackerjack(BaseModel, arbitrary_types_allowed=True):
|
|
|
3873
3504
|
"--all-files",
|
|
3874
3505
|
"--hook-stage=manual",
|
|
3875
3506
|
"-c",
|
|
3876
|
-
|
|
3507
|
+
config_path,
|
|
3877
3508
|
]
|
|
3878
3509
|
result = self.execute_command(cmd)
|
|
3879
3510
|
if result.returncode > 0:
|
|
@@ -3911,6 +3542,7 @@ class Crackerjack(BaseModel, arbitrary_types_allowed=True):
|
|
|
3911
3542
|
)
|
|
3912
3543
|
self.console.print("-" * 80 + "\n")
|
|
3913
3544
|
|
|
3545
|
+
config_path = self.project_manager._select_precommit_config()
|
|
3914
3546
|
cmd = [
|
|
3915
3547
|
"uv",
|
|
3916
3548
|
"run",
|
|
@@ -3919,7 +3551,7 @@ class Crackerjack(BaseModel, arbitrary_types_allowed=True):
|
|
|
3919
3551
|
"--all-files",
|
|
3920
3552
|
"--hook-stage=manual",
|
|
3921
3553
|
"-c",
|
|
3922
|
-
|
|
3554
|
+
config_path,
|
|
3923
3555
|
]
|
|
3924
3556
|
|
|
3925
3557
|
result = await self.execute_command_async(cmd)
|
|
@@ -3955,7 +3587,9 @@ class Crackerjack(BaseModel, arbitrary_types_allowed=True):
|
|
|
3955
3587
|
|
|
3956
3588
|
def _run_pre_commit_task(self, options: OptionsProtocol) -> None:
|
|
3957
3589
|
if not options.skip_hooks:
|
|
3958
|
-
if getattr(options, "
|
|
3590
|
+
if getattr(options, "comprehensive", False):
|
|
3591
|
+
self._run_comprehensive_hooks()
|
|
3592
|
+
elif getattr(options, "ai_agent", False):
|
|
3959
3593
|
self.project_manager.run_pre_commit_with_analysis()
|
|
3960
3594
|
else:
|
|
3961
3595
|
self.project_manager.run_pre_commit()
|
|
@@ -3966,6 +3600,33 @@ class Crackerjack(BaseModel, arbitrary_types_allowed=True):
|
|
|
3966
3600
|
if self.session_tracker:
|
|
3967
3601
|
self.session_tracker.skip_task("pre_commit", "Skipped by user request")
|
|
3968
3602
|
|
|
3603
|
+
def _run_comprehensive_hooks(self) -> None:
|
|
3604
|
+
self.console.print("\n" + "-" * 80)
|
|
3605
|
+
self.console.print(
|
|
3606
|
+
"[bold bright_cyan]🔍 HOOKS[/bold bright_cyan] [bold bright_white]Running comprehensive quality checks[/bold bright_white]"
|
|
3607
|
+
)
|
|
3608
|
+
self.console.print("-" * 80 + "\n")
|
|
3609
|
+
stages = ["pre-commit", "pre-push", "manual"]
|
|
3610
|
+
config_path = self.project_manager._select_precommit_config()
|
|
3611
|
+
for stage in stages:
|
|
3612
|
+
self.console.print(f"[dim]Running {stage} stage hooks...[/dim]")
|
|
3613
|
+
cmd = [
|
|
3614
|
+
"uv",
|
|
3615
|
+
"run",
|
|
3616
|
+
"pre-commit",
|
|
3617
|
+
"run",
|
|
3618
|
+
"--all-files",
|
|
3619
|
+
f"--hook-stage={stage}",
|
|
3620
|
+
"-c",
|
|
3621
|
+
config_path,
|
|
3622
|
+
]
|
|
3623
|
+
result = self.execute_command(cmd)
|
|
3624
|
+
if result.returncode > 0:
|
|
3625
|
+
self.console.print(
|
|
3626
|
+
f"\n[bold red]❌ {stage} hooks failed. Please fix errors.[/bold red]\n"
|
|
3627
|
+
)
|
|
3628
|
+
raise SystemExit(1)
|
|
3629
|
+
|
|
3969
3630
|
def _initialize_session_tracking(self, options: OptionsProtocol) -> None:
|
|
3970
3631
|
if options.resume_from:
|
|
3971
3632
|
try:
|
|
@@ -4026,6 +3687,8 @@ class Crackerjack(BaseModel, arbitrary_types_allowed=True):
|
|
|
4026
3687
|
options.test = True
|
|
4027
3688
|
options.publish = options.all
|
|
4028
3689
|
options.commit = True
|
|
3690
|
+
if options.comprehensive:
|
|
3691
|
+
options.test = True
|
|
4029
3692
|
self._run_tracked_task(
|
|
4030
3693
|
"setup", "Initialize project structure", self._setup_package
|
|
4031
3694
|
)
|
|
@@ -4094,6 +3757,8 @@ class Crackerjack(BaseModel, arbitrary_types_allowed=True):
|
|
|
4094
3757
|
options.test = True
|
|
4095
3758
|
options.publish = options.all
|
|
4096
3759
|
options.commit = True
|
|
3760
|
+
if options.comprehensive:
|
|
3761
|
+
options.test = True
|
|
4097
3762
|
self._setup_package()
|
|
4098
3763
|
self._update_project(options)
|
|
4099
3764
|
self._update_precommit(options)
|