crackerjack 0.28.0__py3-none-any.whl → 0.30.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crackerjack might be problematic. Click here for more details.

@@ -5,21 +5,21 @@ import re
5
5
  import subprocess
6
6
  import time
7
7
  import typing as t
8
- from concurrent.futures import ThreadPoolExecutor, as_completed
9
8
  from contextlib import suppress
10
9
  from dataclasses import dataclass
11
- from functools import lru_cache
12
10
  from pathlib import Path
13
11
  from subprocess import CompletedProcess
14
12
  from subprocess import run as execute
15
13
  from tomllib import loads
16
14
 
17
- import aiofiles
18
15
  from pydantic import BaseModel
19
16
  from rich.console import Console
20
17
  from tomli_w import dumps
21
18
 
22
- from .errors import ErrorCode, ExecutionError
19
+ from .code_cleaner import CodeCleaner
20
+ from .dynamic_config import (
21
+ generate_config_for_mode,
22
+ )
23
23
 
24
24
 
25
25
  @dataclass
@@ -274,7 +274,7 @@ python -m crackerjack --resume-from {self.progress_file.name}
274
274
 
275
275
  """
276
276
 
277
- all_files = set()
277
+ all_files: set[str] = set()
278
278
  for task in self.tasks.values():
279
279
  if task.files_changed:
280
280
  all_files.update(task.files_changed)
@@ -351,7 +351,7 @@ python -m crackerjack --resume-from {self.progress_file.name}
351
351
 
352
352
  @classmethod
353
353
  def find_recent_progress_files(cls, directory: Path = Path.cwd()) -> list[Path]:
354
- progress_files = []
354
+ progress_files: list[Path] = []
355
355
  for file_path in directory.glob("SESSION-PROGRESS-*.md"):
356
356
  try:
357
357
  if file_path.is_file():
@@ -496,9 +496,6 @@ python -m crackerjack --resume-from {self.progress_file.name}
496
496
 
497
497
  config_files = (
498
498
  ".gitignore",
499
- ".pre-commit-config.yaml",
500
- ".pre-commit-config-ai.yaml",
501
- ".pre-commit-config-fast.yaml",
502
499
  ".libcst.codemod.yaml",
503
500
  )
504
501
 
@@ -525,6 +522,7 @@ class OptionsProtocol(t.Protocol):
525
522
  update_precommit: bool
526
523
  update_docs: bool
527
524
  force_update_docs: bool
525
+ compress_docs: bool
528
526
  clean: bool
529
527
  test: bool
530
528
  benchmark: bool
@@ -543,1178 +541,492 @@ class OptionsProtocol(t.Protocol):
543
541
  track_progress: bool = False
544
542
  resume_from: str | None = None
545
543
  progress_file: str | None = None
544
+ experimental_hooks: bool = False
545
+ enable_pyrefly: bool = False
546
+ enable_ty: bool = False
547
+ no_git_tags: bool = False
548
+ skip_version_check: bool = False
546
549
 
547
550
 
548
- class CodeCleaner(BaseModel, arbitrary_types_allowed=True):
551
+ class ConfigManager(BaseModel, arbitrary_types_allowed=True):
552
+ our_path: Path
553
+ pkg_path: Path
554
+ pkg_name: str
549
555
  console: Console
556
+ our_toml_path: Path | None = None
557
+ pkg_toml_path: Path | None = None
558
+ python_version: str = default_python_version
559
+ dry_run: bool = False
550
560
 
551
- def _analyze_workload_characteristics(self, files: list[Path]) -> dict[str, t.Any]:
552
- if not files:
553
- return {
554
- "total_files": 0,
555
- "total_size": 0,
556
- "avg_file_size": 0,
557
- "complexity": "low",
558
- }
559
- total_size = 0
560
- large_files = 0
561
- for file_path in files:
562
- try:
563
- size = file_path.stat().st_size
564
- total_size += size
565
- if size > 50_000:
566
- large_files += 1
567
- except (OSError, PermissionError):
568
- continue
569
- avg_file_size = total_size / len(files) if files else 0
570
- large_file_ratio = large_files / len(files) if files else 0
571
- if len(files) > 100 or avg_file_size > 20_000 or large_file_ratio > 0.3:
572
- complexity = "high"
573
- elif len(files) > 50 or avg_file_size > 10_000 or large_file_ratio > 0.1:
574
- complexity = "medium"
561
+ def swap_package_name(self, value: list[str] | str) -> list[str] | str:
562
+ if isinstance(value, list):
563
+ value.remove("crackerjack")
564
+ value.append(self.pkg_name)
575
565
  else:
576
- complexity = "low"
577
-
578
- return {
579
- "total_files": len(files),
580
- "total_size": total_size,
581
- "avg_file_size": avg_file_size,
582
- "large_files": large_files,
583
- "large_file_ratio": large_file_ratio,
584
- "complexity": complexity,
585
- }
566
+ value = value.replace("crackerjack", self.pkg_name)
567
+ return value
586
568
 
587
- def _calculate_optimal_workers(self, workload: dict[str, t.Any]) -> int:
588
- import os
569
+ def update_pyproject_configs(self) -> None:
570
+ self._setup_toml_paths()
571
+ if self._is_crackerjack_project():
572
+ self._handle_crackerjack_project()
573
+ return
574
+ our_toml_config = self._load_our_toml()
575
+ pkg_toml_config = self._load_pkg_toml()
576
+ self._ensure_required_sections(pkg_toml_config)
577
+ self._update_tool_settings(our_toml_config, pkg_toml_config)
578
+ self._update_python_version(our_toml_config, pkg_toml_config)
579
+ self._save_pkg_toml(pkg_toml_config)
589
580
 
590
- cpu_count = os.cpu_count() or 4
591
- if workload["complexity"] == "high":
592
- max_workers = min(cpu_count // 2, 3)
593
- elif workload["complexity"] == "medium":
594
- max_workers = min(cpu_count, 6)
595
- else:
596
- max_workers = min(cpu_count + 2, 8)
581
+ def _setup_toml_paths(self) -> None:
582
+ toml_file = "pyproject.toml"
583
+ self.our_toml_path = self.our_path / toml_file
584
+ self.pkg_toml_path = self.pkg_path / toml_file
597
585
 
598
- return min(max_workers, workload["total_files"])
586
+ def _is_crackerjack_project(self) -> bool:
587
+ return self.pkg_path.stem == "crackerjack"
599
588
 
600
- def clean_files(self, pkg_dir: Path | None) -> None:
601
- if pkg_dir is None:
602
- return
603
- python_files = [
604
- file_path
605
- for file_path in pkg_dir.rglob("*.py")
606
- if not str(file_path.parent).startswith("__")
607
- ]
608
- if not python_files:
609
- return
610
- workload = self._analyze_workload_characteristics(python_files)
611
- max_workers = self._calculate_optimal_workers(workload)
612
- if len(python_files) > 10:
613
- self.console.print(
614
- f"[dim]Cleaning {workload['total_files']} files "
615
- f"({workload['complexity']} complexity) with {max_workers} workers[/dim]"
616
- )
617
- with ThreadPoolExecutor(max_workers=max_workers) as executor:
618
- future_to_file = {
619
- executor.submit(self.clean_file, file_path): file_path
620
- for file_path in python_files
621
- }
622
- for future in as_completed(future_to_file):
623
- file_path = future_to_file[future]
624
- try:
625
- future.result()
626
- except Exception as e:
627
- self.console.print(
628
- f"[bold bright_red]❌ Error cleaning {file_path}: {e}[/bold bright_red]"
629
- )
630
- self._cleanup_cache_directories(pkg_dir)
631
-
632
- def _cleanup_cache_directories(self, pkg_dir: Path) -> None:
633
- with suppress(PermissionError, OSError):
634
- pycache_dir = pkg_dir / "__pycache__"
635
- if pycache_dir.exists():
636
- for cache_file in pycache_dir.iterdir():
637
- with suppress(PermissionError, OSError):
638
- cache_file.unlink()
639
- pycache_dir.rmdir()
640
- parent_pycache = pkg_dir.parent / "__pycache__"
641
- if parent_pycache.exists():
642
- for cache_file in parent_pycache.iterdir():
643
- with suppress(PermissionError, OSError):
644
- cache_file.unlink()
645
- parent_pycache.rmdir()
646
-
647
- def clean_file(self, file_path: Path) -> None:
648
- from crackerjack.errors import ExecutionError, handle_error
589
+ def _handle_crackerjack_project(self) -> None:
590
+ if self.our_toml_path and self.pkg_toml_path:
591
+ self.our_toml_path.write_text(self.pkg_toml_path.read_text())
649
592
 
650
- try:
651
- code = file_path.read_text(encoding="utf-8")
652
- original_code = code
653
- cleaning_failed = False
654
- try:
655
- code = self.remove_line_comments_streaming(code)
656
- except Exception as e:
657
- self.console.print(
658
- f"[bold bright_yellow]⚠️ Warning: Failed to remove line comments from {file_path}: {e}[/bold bright_yellow]"
659
- )
660
- code = original_code
661
- cleaning_failed = True
662
- try:
663
- code = self.remove_docstrings_streaming(code)
664
- except Exception as e:
665
- self.console.print(
666
- f"[bold bright_yellow]⚠️ Warning: Failed to remove docstrings from {file_path}: {e}[/bold bright_yellow]"
667
- )
668
- code = original_code
669
- cleaning_failed = True
670
- try:
671
- code = self.remove_extra_whitespace_streaming(code)
672
- except Exception as e:
673
- self.console.print(
674
- f"[bold bright_yellow]⚠️ Warning: Failed to remove extra whitespace from {file_path}: {e}[/bold bright_yellow]"
675
- )
676
- code = original_code
677
- cleaning_failed = True
678
- try:
679
- code = self.reformat_code(code)
680
- except Exception as e:
681
- self.console.print(
682
- f"[bold bright_yellow]⚠️ Warning: Failed to reformat {file_path}: {e}[/bold bright_yellow]"
683
- )
684
- code = original_code
685
- cleaning_failed = True
686
- file_path.write_text(code, encoding="utf-8")
687
- if cleaning_failed:
688
- self.console.print(
689
- f"[bold yellow]⚡ Partially cleaned:[/bold yellow] [dim bright_white]{file_path}[/dim bright_white]"
690
- )
691
- else:
692
- self.console.print(
693
- f"[bold green]✨ Cleaned:[/bold green] [dim bright_white]{file_path}[/dim bright_white]"
694
- )
695
- except PermissionError as e:
696
- self.console.print(
697
- f"[red]Failed to clean: {file_path} (Permission denied)[/red]"
698
- )
699
- handle_error(
700
- ExecutionError(
701
- message=f"Permission denied while cleaning {file_path}",
702
- error_code=ErrorCode.PERMISSION_ERROR,
703
- details=str(e),
704
- recovery=f"Check file permissions for {file_path} and ensure you have write access",
705
- ),
706
- console=self.console,
707
- exit_on_error=False,
708
- )
709
- except OSError as e:
710
- self.console.print(
711
- f"[red]Failed to clean: {file_path} (File system error)[/red]"
712
- )
713
- handle_error(
714
- ExecutionError(
715
- message=f"File system error while cleaning {file_path}",
716
- error_code=ErrorCode.FILE_WRITE_ERROR,
717
- details=str(e),
718
- recovery=f"Check that {file_path} exists and is not being used by another process",
719
- ),
720
- console=self.console,
721
- exit_on_error=False,
722
- )
723
- except UnicodeDecodeError as e:
724
- self.console.print(
725
- f"[red]Failed to clean: {file_path} (Encoding error)[/red]"
726
- )
727
- handle_error(
728
- ExecutionError(
729
- message=f"Encoding error while reading {file_path}",
730
- error_code=ErrorCode.FILE_READ_ERROR,
731
- details=str(e),
732
- recovery=f"File {file_path} contains non-UTF-8 characters. Please check the file encoding.",
733
- ),
734
- console=self.console,
735
- exit_on_error=False,
736
- )
737
- except Exception as e:
738
- self.console.print(
739
- f"[red]Failed to clean: {file_path} (Unexpected error)[/red]"
740
- )
741
- handle_error(
742
- ExecutionError(
743
- message=f"Unexpected error while cleaning {file_path}",
744
- error_code=ErrorCode.UNEXPECTED_ERROR,
745
- details=str(e),
746
- recovery="This is an unexpected error. Please report this issue with the file content if possible.",
747
- ),
748
- console=self.console,
749
- exit_on_error=False,
750
- )
593
+ def _load_our_toml(self) -> dict[str, t.Any]:
594
+ if self.our_toml_path:
595
+ return loads(self.our_toml_path.read_text())
596
+ return {}
751
597
 
752
- def _initialize_docstring_state(self) -> dict[str, t.Any]:
753
- return {
754
- "in_docstring": False,
755
- "delimiter": None,
756
- "waiting": False,
757
- "function_indent": 0,
758
- "removed_docstring": False,
759
- "in_multiline_def": False,
760
- }
598
+ def _load_pkg_toml(self) -> dict[str, t.Any]:
599
+ if self.pkg_toml_path:
600
+ return loads(self.pkg_toml_path.read_text())
601
+ return {}
761
602
 
762
- def _handle_function_definition(
763
- self, line: str, stripped: str, state: dict[str, t.Any]
764
- ) -> bool:
765
- if self._is_function_or_class_definition(stripped):
766
- state["waiting"] = True
767
- state["function_indent"] = len(line) - len(line.lstrip())
768
- state["removed_docstring"] = False
769
- state["in_multiline_def"] = not stripped.endswith(":")
770
- return True
771
- return False
603
+ def _ensure_required_sections(self, pkg_toml_config: dict[str, t.Any]) -> None:
604
+ pkg_toml_config.setdefault("tool", {})
605
+ pkg_toml_config.setdefault("project", {})
772
606
 
773
- def _handle_multiline_definition(
774
- self, line: str, stripped: str, state: dict[str, t.Any]
775
- ) -> bool:
776
- if state["in_multiline_def"]:
777
- if stripped.endswith(":"):
778
- state["in_multiline_def"] = False
779
- return True
780
- return False
607
+ def _update_tool_settings(
608
+ self, our_toml_config: dict[str, t.Any], pkg_toml_config: dict[str, t.Any]
609
+ ) -> None:
610
+ for tool, settings in our_toml_config.get("tool", {}).items():
611
+ if tool not in pkg_toml_config["tool"]:
612
+ pkg_toml_config["tool"][tool] = {}
613
+ pkg_tool_config = pkg_toml_config["tool"][tool]
614
+ self._merge_tool_config(settings, pkg_tool_config, tool)
781
615
 
782
- def _handle_waiting_docstring(
783
- self, lines: list[str], i: int, stripped: str, state: dict[str, t.Any]
784
- ) -> tuple[bool, str | None]:
785
- if state["waiting"] and stripped:
786
- if self._handle_docstring_start(stripped, state):
787
- pass_line = None
788
- if not state["in_docstring"]:
789
- function_indent: int = state["function_indent"]
790
- if self._needs_pass_statement(lines, i + 1, function_indent):
791
- pass_line = " " * (function_indent + 4) + "pass"
792
- state["removed_docstring"] = True
793
- return True, pass_line
794
- else:
795
- state["waiting"] = False
796
- return False, None
797
-
798
- def _handle_docstring_content(
799
- self, lines: list[str], i: int, stripped: str, state: dict[str, t.Any]
800
- ) -> tuple[bool, str | None]:
801
- if state["in_docstring"]:
802
- if self._handle_docstring_end(stripped, state):
803
- pass_line = None
804
- function_indent: int = state["function_indent"]
805
- if self._needs_pass_statement(lines, i + 1, function_indent):
806
- pass_line = " " * (function_indent + 4) + "pass"
807
- state["removed_docstring"] = False
808
- return True, pass_line
616
+ def _merge_tool_config(
617
+ self, our_config: dict[str, t.Any], pkg_config: dict[str, t.Any], tool: str
618
+ ) -> None:
619
+ for setting, value in our_config.items():
620
+ if isinstance(value, dict):
621
+ self._merge_nested_config(
622
+ setting, t.cast(dict[str, t.Any], value), pkg_config
623
+ )
809
624
  else:
810
- return True, None
811
- return False, None
812
-
813
- def _process_line(
814
- self, lines: list[str], i: int, line: str, state: dict[str, t.Any]
815
- ) -> tuple[bool, str | None]:
816
- stripped = line.strip()
817
- if self._handle_function_definition(line, stripped, state):
818
- return True, line
819
- if self._handle_multiline_definition(line, stripped, state):
820
- return True, line
821
- handled, pass_line = self._handle_waiting_docstring(lines, i, stripped, state)
822
- if handled:
823
- return True, pass_line
824
- handled, pass_line = self._handle_docstring_content(lines, i, stripped, state)
825
- if handled:
826
- return True, pass_line
827
- if state["removed_docstring"] and stripped:
828
- state["removed_docstring"] = False
829
- return False, line
830
-
831
- def remove_docstrings(self, code: str) -> str:
832
- lines = code.split("\n")
833
- cleaned_lines: list[str] = []
834
- docstring_state = self._initialize_docstring_state()
835
- for i, line in enumerate(lines):
836
- handled, result_line = self._process_line(lines, i, line, docstring_state)
837
- if handled:
838
- if result_line is not None:
839
- cleaned_lines.append(result_line)
625
+ self._merge_direct_config(setting, value, pkg_config)
626
+
627
+ def _merge_nested_config(
628
+ self, setting: str, value: dict[str, t.Any], pkg_config: dict[str, t.Any]
629
+ ) -> None:
630
+ if setting not in pkg_config:
631
+ pkg_config[setting] = {}
632
+ elif not isinstance(pkg_config[setting], dict):
633
+ pkg_config[setting] = {}
634
+ self._merge_tool_config(value, pkg_config[setting], "")
635
+ for k, v in value.items():
636
+ self._merge_nested_value(k, v, pkg_config[setting])
637
+
638
+ def _merge_nested_value(
639
+ self, key: str, value: t.Any, nested_config: dict[str, t.Any]
640
+ ) -> None:
641
+ if isinstance(value, str | list) and "crackerjack" in str(value):
642
+ nested_config[key] = self.swap_package_name(t.cast(str | list[str], value))
643
+ elif self._is_mergeable_list(key, value):
644
+ existing = nested_config.get(key, [])
645
+ if isinstance(existing, list) and isinstance(value, list):
646
+ nested_config[key] = list(
647
+ set(t.cast(list[str], existing) + t.cast(list[str], value))
648
+ )
840
649
  else:
841
- cleaned_lines.append(line)
842
- return "\n".join(cleaned_lines)
650
+ nested_config[key] = value
651
+ elif key not in nested_config:
652
+ nested_config[key] = value
843
653
 
844
- def _is_function_or_class_definition(self, stripped_line: str) -> bool:
845
- return stripped_line.startswith(("def ", "class ", "async def "))
654
+ def _merge_direct_config(
655
+ self, setting: str, value: t.Any, pkg_config: dict[str, t.Any]
656
+ ) -> None:
657
+ if isinstance(value, str | list) and "crackerjack" in str(value):
658
+ pkg_config[setting] = self.swap_package_name(t.cast(str | list[str], value))
659
+ elif self._is_mergeable_list(setting, value):
660
+ existing = pkg_config.get(setting, [])
661
+ if isinstance(existing, list) and isinstance(value, list):
662
+ pkg_config[setting] = list(
663
+ set(t.cast(list[str], existing) + t.cast(list[str], value))
664
+ )
665
+ else:
666
+ pkg_config[setting] = value
667
+ elif setting not in pkg_config:
668
+ pkg_config[setting] = value
846
669
 
847
- def _handle_docstring_start(self, stripped: str, state: dict[str, t.Any]) -> bool:
848
- if not stripped.startswith(('"""', "'''", '"', "'")):
849
- return False
850
- if stripped.startswith(('"""', "'''")):
851
- delimiter = stripped[:3]
852
- else:
853
- delimiter = stripped[0]
854
- state["delimiter"] = delimiter
855
- if self._is_single_line_docstring(stripped, delimiter):
856
- state["waiting"] = False
857
- return True
858
- else:
859
- state["in_docstring"] = True
860
- state["waiting"] = False
861
- return True
670
+ def _is_mergeable_list(self, key: str, value: t.Any) -> bool:
671
+ return key in (
672
+ "exclude-deps",
673
+ "exclude",
674
+ "excluded",
675
+ "skips",
676
+ "ignore",
677
+ ) and isinstance(value, list)
862
678
 
863
- def _is_single_line_docstring(self, stripped: str, delimiter: str) -> bool:
864
- return stripped.endswith(delimiter) and len(stripped) > len(delimiter)
679
+ def _update_python_version(
680
+ self, our_toml_config: dict[str, t.Any], pkg_toml_config: dict[str, t.Any]
681
+ ) -> None:
682
+ python_version_pattern = "\\s*W*(\\d\\.\\d*)"
683
+ requires_python = our_toml_config.get("project", {}).get("requires-python", "")
684
+ classifiers: list[str] = []
685
+ for classifier in pkg_toml_config.get("project", {}).get("classifiers", []):
686
+ classifier = re.sub(
687
+ python_version_pattern, f" {self.python_version}", classifier
688
+ )
689
+ classifiers.append(classifier)
690
+ pkg_toml_config["project"]["classifiers"] = classifiers
691
+ if requires_python:
692
+ pkg_toml_config["project"]["requires-python"] = requires_python
865
693
 
866
- def _handle_docstring_end(self, stripped: str, state: dict[str, t.Any]) -> bool:
867
- if state["delimiter"] and stripped.endswith(state["delimiter"]):
868
- state["in_docstring"] = False
869
- state["delimiter"] = None
870
- return True
871
- return False
694
+ def _save_pkg_toml(self, pkg_toml_config: dict[str, t.Any]) -> None:
695
+ if self.pkg_toml_path:
696
+ self.pkg_toml_path.write_text(dumps(pkg_toml_config))
872
697
 
873
- def _needs_pass_statement(
874
- self, lines: list[str], start_index: int, function_indent: int
875
- ) -> bool:
876
- for i in range(start_index, len(lines)):
877
- line = lines[i]
878
- stripped = line.strip()
879
- if not stripped:
698
+ def copy_configs(self) -> None:
699
+ configs_to_add: list[str] = []
700
+ for config in config_files:
701
+ config_path = self.our_path / config
702
+ pkg_config_path = self.pkg_path / config
703
+ pkg_config_path.touch()
704
+ if self.pkg_path.stem == "crackerjack":
705
+ config_path.write_text(pkg_config_path.read_text())
880
706
  continue
881
- line_indent = len(line) - len(line.lstrip())
882
- if line_indent <= function_indent:
883
- return True
884
- if line_indent > function_indent:
885
- return False
886
- return True
707
+ if config != ".gitignore":
708
+ pkg_config_path.write_text(
709
+ config_path.read_text().replace("crackerjack", self.pkg_name)
710
+ )
711
+ configs_to_add.append(config)
712
+ if configs_to_add:
713
+ self.execute_command(["git", "add"] + configs_to_add)
887
714
 
888
- def remove_line_comments(self, code: str) -> str:
889
- lines = code.split("\n")
890
- cleaned_lines: list[str] = []
891
- for line in lines:
892
- if not line.strip():
893
- cleaned_lines.append(line)
894
- continue
895
- cleaned_line = self._process_line_for_comments(line)
896
- if cleaned_line or not line.strip():
897
- cleaned_lines.append(cleaned_line or line)
898
- return "\n".join(cleaned_lines)
899
-
900
- def _process_line_for_comments(self, line: str) -> str:
901
- result: list[str] = []
902
- string_state = {"in_string": None}
903
- for i, char in enumerate(line):
904
- if self._handle_string_character(char, i, line, string_state, result):
905
- continue
906
- elif self._handle_comment_character(char, i, line, string_state, result):
907
- break
908
- else:
909
- result.append(char)
910
- return "".join(result).rstrip()
715
+ def copy_documentation_templates(
716
+ self, force_update: bool = False, compress_docs: bool = False
717
+ ) -> None:
718
+ docs_to_add: list[str] = []
719
+ for doc_file in documentation_files:
720
+ if self._should_process_doc_file(doc_file):
721
+ self._process_single_doc_file(
722
+ doc_file, force_update, compress_docs, docs_to_add
723
+ )
911
724
 
912
- def _handle_string_character(
913
- self,
914
- char: str,
915
- index: int,
916
- line: str,
917
- string_state: dict[str, t.Any],
918
- result: list[str],
919
- ) -> bool:
920
- if char not in ("'", '"'):
725
+ if docs_to_add:
726
+ self.execute_command(["git", "add"] + docs_to_add)
727
+
728
+ def _should_process_doc_file(self, doc_file: str) -> bool:
729
+ doc_path = self.our_path / doc_file
730
+ if not doc_path.exists():
921
731
  return False
922
- if index > 0 and line[index - 1] == "\\":
732
+ if self.pkg_path.stem == "crackerjack":
923
733
  return False
924
- if string_state["in_string"] is None:
925
- string_state["in_string"] = char
926
- elif string_state["in_string"] == char:
927
- string_state["in_string"] = None
928
- result.append(char)
929
734
  return True
930
735
 
931
- def _handle_comment_character(
736
+ def _process_single_doc_file(
932
737
  self,
933
- char: str,
934
- index: int,
935
- line: str,
936
- string_state: dict[str, t.Any],
937
- result: list[str],
938
- ) -> bool:
939
- if char != "#" or string_state["in_string"] is not None:
940
- return False
941
- comment = line[index:].strip()
942
- if self._is_special_comment_line(comment):
943
- result.append(line[index:])
944
- return True
738
+ doc_file: str,
739
+ force_update: bool,
740
+ compress_docs: bool,
741
+ docs_to_add: list[str],
742
+ ) -> None:
743
+ doc_path = self.our_path / doc_file
744
+ pkg_doc_path = self.pkg_path / doc_file
745
+ should_update = force_update or not pkg_doc_path.exists()
945
746
 
946
- def _is_special_comment_line(self, comment: str) -> bool:
947
- special_comment_pattern = (
948
- r"^#\s*(?:type:\s*ignore(?:\[.*?\])?|noqa|nosec|pragma:\s*no\s*cover"
949
- r"|pylint:\s*disable|mypy:\s*ignore)"
950
- )
951
- return bool(re.match(special_comment_pattern, comment))
747
+ if should_update:
748
+ pkg_doc_path.touch()
749
+ content = doc_path.read_text(encoding="utf-8")
952
750
 
953
- def remove_extra_whitespace(self, code: str) -> str:
954
- lines = code.split("\n")
955
- cleaned_lines: list[str] = []
956
- function_tracker = {"in_function": False, "function_indent": 0}
957
- import_tracker = {"in_imports": False, "last_import_type": None}
958
- for i, line in enumerate(lines):
959
- line = line.rstrip()
960
- stripped_line = line.lstrip()
961
- self._update_function_state(line, stripped_line, function_tracker)
962
- self._update_import_state(line, stripped_line, import_tracker)
963
- if not line:
964
- if self._should_skip_empty_line(
965
- i, lines, cleaned_lines, function_tracker, import_tracker
966
- ):
967
- continue
968
- cleaned_lines.append(line)
969
- return "\n".join(self._remove_trailing_empty_lines(cleaned_lines))
970
-
971
- def remove_docstrings_streaming(self, code: str) -> str:
972
- if len(code) < 10000:
973
- return self.remove_docstrings(code)
974
-
975
- def process_lines():
976
- lines = code.split("\n")
977
- docstring_state = self._initialize_docstring_state()
978
- for i, line in enumerate(lines):
979
- handled, result_line = self._process_line(
980
- lines, i, line, docstring_state
981
- )
982
- if handled:
983
- if result_line is not None:
984
- yield result_line
985
- else:
986
- yield line
751
+ auto_compress = self._should_compress_doc(doc_file, compress_docs)
752
+ updated_content = self._customize_documentation_content(
753
+ content, doc_file, auto_compress
754
+ )
755
+ pkg_doc_path.write_text(updated_content, encoding="utf-8")
756
+ docs_to_add.append(doc_file)
987
757
 
988
- return "\n".join(process_lines())
758
+ self._print_doc_update_message(doc_file, auto_compress)
989
759
 
990
- def remove_line_comments_streaming(self, code: str) -> str:
991
- if len(code) < 10000:
992
- return self.remove_line_comments(code)
760
+ def _should_compress_doc(self, doc_file: str, compress_docs: bool) -> bool:
761
+ return compress_docs or (
762
+ self.pkg_path.stem != "crackerjack" and doc_file == "CLAUDE.md"
763
+ )
993
764
 
994
- def process_lines():
995
- for line in code.split("\n"):
996
- if not line.strip():
997
- yield line
998
- continue
999
- cleaned_line = self._process_line_for_comments(line)
1000
- if cleaned_line or not line.strip():
1001
- yield cleaned_line or line
765
+ def _print_doc_update_message(self, doc_file: str, auto_compress: bool) -> None:
766
+ compression_note = (
767
+ " (compressed for Claude Code)"
768
+ if auto_compress and doc_file == "CLAUDE.md"
769
+ else ""
770
+ )
771
+ self.console.print(
772
+ f"[green]📋[/green] Updated {doc_file} with latest Crackerjack quality standards{compression_note}"
773
+ )
1002
774
 
1003
- return "\n".join(process_lines())
1004
-
1005
- def remove_extra_whitespace_streaming(self, code: str) -> str:
1006
- if len(code) < 10000:
1007
- return self.remove_extra_whitespace(code)
775
+ def _customize_documentation_content(
776
+ self, content: str, filename: str, compress: bool = False
777
+ ) -> str:
778
+ if filename == "CLAUDE.md":
779
+ return self._customize_claude_md(content, compress)
780
+ elif filename == "RULES.md":
781
+ return self._customize_rules_md(content)
782
+ return content
1008
783
 
1009
- def process_lines():
1010
- lines = code.split("\n")
1011
- function_tracker: dict[str, t.Any] = {
1012
- "in_function": False,
1013
- "function_indent": 0,
1014
- }
1015
- import_tracker: dict[str, t.Any] = {
1016
- "in_imports": False,
1017
- "last_import_type": None,
1018
- }
1019
- previous_lines: list[str] = []
1020
- for i, line in enumerate(lines):
1021
- line = line.rstrip()
1022
- stripped_line = line.lstrip()
1023
- self._update_function_state(line, stripped_line, function_tracker)
1024
- self._update_import_state(line, stripped_line, import_tracker)
1025
- if not line:
1026
- if self._should_skip_empty_line(
1027
- i, lines, previous_lines, function_tracker, import_tracker
1028
- ):
1029
- continue
1030
- previous_lines.append(line)
1031
- yield line
784
+ def _compress_claude_md(self, content: str, target_size: int = 30000) -> str:
785
+ content.split("\n")
786
+ current_size = len(content)
787
+ if current_size <= target_size:
788
+ return content
789
+ essential_sections = [
790
+ "# ",
791
+ "## Project Overview",
792
+ "## Key Commands",
793
+ "## Development Guidelines",
794
+ "## Code Quality Compliance",
795
+ "### Refurb Standards",
796
+ "### Bandit Security Standards",
797
+ "### Pyright Type Safety Standards",
798
+ "## AI Code Generation Best Practices",
799
+ "## Task Completion Requirements",
800
+ ]
801
+ compression_strategies = [
802
+ self._remove_redundant_examples,
803
+ self._compress_command_examples,
804
+ self._remove_verbose_sections,
805
+ self._compress_repeated_patterns,
806
+ self._summarize_long_sections,
807
+ ]
808
+ compressed_content = content
809
+ for strategy in compression_strategies:
810
+ compressed_content = strategy(compressed_content)
811
+ if len(compressed_content) <= target_size:
812
+ break
813
+ if len(compressed_content) > target_size:
814
+ compressed_content = self._extract_essential_sections(
815
+ compressed_content, essential_sections, target_size
816
+ )
1032
817
 
1033
- processed_lines = list(process_lines())
1034
- return "\n".join(self._remove_trailing_empty_lines(processed_lines))
818
+ return self._add_compression_notice(compressed_content)
1035
819
 
1036
- def _update_function_state(
1037
- self, line: str, stripped_line: str, function_tracker: dict[str, t.Any]
1038
- ) -> None:
1039
- if stripped_line.startswith(("def ", "async def ")):
1040
- function_tracker["in_function"] = True
1041
- function_tracker["function_indent"] = len(line) - len(stripped_line)
1042
- elif self._is_function_end(line, stripped_line, function_tracker):
1043
- function_tracker["in_function"] = False
1044
- function_tracker["function_indent"] = 0
1045
-
1046
- def _update_import_state(
1047
- self, line: str, stripped_line: str, import_tracker: dict[str, t.Any]
1048
- ) -> None:
1049
- if stripped_line.startswith(("import ", "from ")):
1050
- import_tracker["in_imports"] = True
1051
- if self._is_stdlib_import(stripped_line):
1052
- current_type = "stdlib"
1053
- elif self._is_local_import(stripped_line):
1054
- current_type = "local"
820
+ def _remove_redundant_examples(self, content: str) -> str:
821
+ lines = content.split("\n")
822
+ result = []
823
+ in_example_block = False
824
+ example_count = 0
825
+ max_examples_per_section = 2
826
+ for line in lines:
827
+ if line.strip().startswith("```"):
828
+ if not in_example_block:
829
+ example_count += 1
830
+ if example_count <= max_examples_per_section:
831
+ result.append(line)
832
+ in_example_block = True
833
+ else:
834
+ in_example_block = "skip"
835
+ else:
836
+ if in_example_block != "skip":
837
+ result.append(line)
838
+ in_example_block = False
839
+ elif in_example_block == "skip":
840
+ continue
841
+ elif line.startswith(("## ", "### ")):
842
+ example_count = 0
843
+ result.append(line)
1055
844
  else:
1056
- current_type = "third_party"
1057
- import_tracker["last_import_type"] = current_type
1058
- elif stripped_line and not stripped_line.startswith("#"):
1059
- import_tracker["in_imports"] = False
1060
- import_tracker["last_import_type"] = None
1061
-
1062
- @staticmethod
1063
- @lru_cache(maxsize=256)
1064
- def _is_stdlib_module(module: str) -> bool:
1065
- stdlib_modules = {
1066
- "os",
1067
- "sys",
1068
- "re",
1069
- "json",
1070
- "datetime",
1071
- "time",
1072
- "pathlib",
1073
- "typing",
1074
- "collections",
1075
- "itertools",
1076
- "functools",
1077
- "operator",
1078
- "math",
1079
- "random",
1080
- "uuid",
1081
- "urllib",
1082
- "http",
1083
- "html",
1084
- "xml",
1085
- "email",
1086
- "csv",
1087
- "sqlite3",
1088
- "subprocess",
1089
- "threading",
1090
- "multiprocessing",
1091
- "asyncio",
1092
- "contextlib",
1093
- "dataclasses",
1094
- "enum",
1095
- "abc",
1096
- "io",
1097
- "tempfile",
1098
- "shutil",
1099
- "glob",
1100
- "pickle",
1101
- "copy",
1102
- "heapq",
1103
- "bisect",
1104
- "array",
1105
- "struct",
1106
- "zlib",
1107
- "hashlib",
1108
- "hmac",
1109
- "secrets",
1110
- "base64",
1111
- "binascii",
1112
- "codecs",
1113
- "locale",
1114
- "platform",
1115
- "socket",
1116
- "ssl",
1117
- "ipaddress",
1118
- "logging",
1119
- "warnings",
1120
- "inspect",
1121
- "ast",
1122
- "dis",
1123
- "tokenize",
1124
- "keyword",
1125
- "linecache",
1126
- "traceback",
1127
- "weakref",
1128
- "gc",
1129
- "ctypes",
1130
- "unittest",
1131
- "doctest",
1132
- "pdb",
1133
- "profile",
1134
- "cProfile",
1135
- "timeit",
1136
- "trace",
1137
- "calendar",
1138
- "decimal",
1139
- "fractions",
1140
- "statistics",
1141
- "tomllib",
1142
- }
1143
- return module in stdlib_modules
845
+ result.append(line)
1144
846
 
1145
- def _is_stdlib_import(self, stripped_line: str) -> bool:
1146
- try:
1147
- if stripped_line.startswith("from "):
1148
- module = stripped_line.split()[1].split(".")[0]
1149
- else:
1150
- module = stripped_line.split()[1].split(".")[0]
1151
- except IndexError:
1152
- return False
1153
- return CodeCleaner._is_stdlib_module(module)
847
+ return "\n".join(result)
1154
848
 
1155
- def _is_local_import(self, stripped_line: str) -> bool:
1156
- return stripped_line.startswith("from .") or " . " in stripped_line
849
+ def _compress_command_examples(self, content: str) -> str:
850
+ import re
1157
851
 
1158
- def _is_function_end(
1159
- self, line: str, stripped_line: str, function_tracker: dict[str, t.Any]
1160
- ) -> bool:
1161
- return (
1162
- function_tracker["in_function"]
1163
- and bool(line)
1164
- and (len(line) - len(stripped_line) <= function_tracker["function_indent"])
1165
- and (not stripped_line.startswith(("@", "#")))
852
+ content = re.sub(
853
+ r"```bash\n((?:[^`]+\n){3,})```",
854
+ lambda m: "```bash\n"
855
+ + "\n".join(m.group(1).split("\n")[:3])
856
+ + "\n# ... (additional commands available)\n```",
857
+ content,
858
+ flags=re.MULTILINE,
1166
859
  )
1167
860
 
1168
- def _should_skip_empty_line(
1169
- self,
1170
- line_idx: int,
1171
- lines: list[str],
1172
- cleaned_lines: list[str],
1173
- function_tracker: dict[str, t.Any],
1174
- import_tracker: dict[str, t.Any],
1175
- ) -> bool:
1176
- if line_idx > 0 and cleaned_lines and (not cleaned_lines[-1]):
1177
- return True
1178
-
1179
- if self._is_import_section_separator(line_idx, lines, import_tracker):
1180
- return False
1181
-
1182
- if function_tracker["in_function"]:
1183
- return self._should_skip_function_empty_line(line_idx, lines)
1184
- return False
1185
-
1186
- def _is_import_section_separator(
1187
- self, line_idx: int, lines: list[str], import_tracker: dict[str, t.Any]
1188
- ) -> bool:
1189
- if not import_tracker["in_imports"]:
1190
- return False
1191
-
1192
- next_line_idx = line_idx + 1
1193
- while next_line_idx < len(lines) and not lines[next_line_idx].strip():
1194
- next_line_idx += 1
1195
-
1196
- if next_line_idx >= len(lines):
1197
- return False
1198
-
1199
- next_line = lines[next_line_idx].strip()
1200
- if not next_line.startswith(("import ", "from ")):
1201
- return False
1202
-
1203
- if self._is_stdlib_import(next_line):
1204
- next_type = "stdlib"
1205
- elif self._is_local_import(next_line):
1206
- next_type = "local"
1207
- else:
1208
- next_type = "third_party"
1209
-
1210
- return import_tracker["last_import_type"] != next_type
1211
-
1212
- def _should_skip_function_empty_line(self, line_idx: int, lines: list[str]) -> bool:
1213
- next_line_idx = line_idx + 1
1214
- if next_line_idx >= len(lines):
1215
- return False
1216
- next_line = lines[next_line_idx].strip()
1217
- return not self._is_significant_next_line(next_line)
1218
-
1219
- def _is_significant_next_line(self, next_line: str) -> bool:
1220
- if next_line.startswith(("return", "class ", "def ", "async def ", "@")):
1221
- return True
1222
- if next_line in ("pass", "break", "continue", "raise"):
1223
- return True
1224
- return self._is_special_comment(next_line)
1225
-
1226
- def _is_special_comment(self, line: str) -> bool:
1227
- if not line.startswith("#"):
1228
- return False
1229
- special_patterns = ("type:", "noqa", "nosec", "pragma:", "pylint:", "mypy:")
1230
- return any(pattern in line for pattern in special_patterns)
1231
-
1232
- def _remove_trailing_empty_lines(self, lines: list[str]) -> list[str]:
1233
- while lines and (not lines[-1]):
1234
- lines.pop()
1235
- return lines
1236
-
1237
- def reformat_code(self, code: str) -> str:
1238
- from crackerjack.errors import handle_error
1239
-
1240
- try:
1241
- import tempfile
1242
-
1243
- with tempfile.NamedTemporaryFile(
1244
- suffix=".py", mode="w+", delete=False
1245
- ) as temp:
1246
- temp_path = Path(temp.name)
1247
- temp_path.write_text(code)
1248
- try:
1249
- result = subprocess.run(
1250
- ["uv", "run", "ruff", "format", str(temp_path)],
1251
- check=False,
1252
- capture_output=True,
1253
- text=True,
1254
- )
1255
- if result.returncode == 0:
1256
- formatted_code = temp_path.read_text()
1257
- else:
1258
- self.console.print(
1259
- f"[bold bright_yellow]⚠️ Ruff formatting failed: {result.stderr}[/bold bright_yellow]"
1260
- )
1261
- handle_error(
1262
- ExecutionError(
1263
- message="Code formatting failed",
1264
- error_code=ErrorCode.FORMATTING_ERROR,
1265
- details=result.stderr,
1266
- recovery="Check Ruff configuration and formatting rules",
1267
- ),
1268
- console=self.console,
1269
- exit_on_error=False,
1270
- )
1271
- formatted_code = code
1272
- except Exception as e:
1273
- self.console.print(
1274
- f"[bold bright_red]❌ Error running Ruff: {e}[/bold bright_red]"
1275
- )
1276
- handle_error(
1277
- ExecutionError(
1278
- message="Error running Ruff",
1279
- error_code=ErrorCode.FORMATTING_ERROR,
1280
- details=str(e),
1281
- recovery="Verify Ruff is installed and configured correctly",
1282
- ),
1283
- console=self.console,
1284
- exit_on_error=False,
1285
- )
1286
- formatted_code = code
1287
- finally:
1288
- with suppress(FileNotFoundError):
1289
- temp_path.unlink()
1290
- return formatted_code
1291
- except Exception as e:
1292
- self.console.print(
1293
- f"[bold bright_red]❌ Error during reformatting: {e}[/bold bright_red]"
1294
- )
1295
- handle_error(
1296
- ExecutionError(
1297
- message="Error during reformatting",
1298
- error_code=ErrorCode.FORMATTING_ERROR,
1299
- details=str(e),
1300
- recovery="Check file permissions and disk space",
1301
- ),
1302
- console=self.console,
1303
- )
1304
- return code
861
+ return content
1305
862
 
1306
- async def clean_files_async(self, pkg_dir: Path | None) -> None:
1307
- if pkg_dir is None:
1308
- return
1309
- python_files = [
1310
- file_path
1311
- for file_path in pkg_dir.rglob("*.py")
1312
- if not str(file_path.parent).startswith("__")
863
+ def _remove_verbose_sections(self, content: str) -> str:
864
+ sections_to_compress = [
865
+ "## Recent Bug Fixes and Improvements",
866
+ "## Development Memories",
867
+ "## Self-Maintenance Protocol for AI Assistants",
868
+ "## Pre-commit Hook Maintenance",
1313
869
  ]
1314
- if not python_files:
1315
- return
1316
- max_concurrent = min(len(python_files), 8)
1317
- semaphore = asyncio.Semaphore(max_concurrent)
870
+ lines = content.split("\n")
871
+ result = []
872
+ skip_section = False
873
+ for line in lines:
874
+ if any(line.startswith(section) for section in sections_to_compress):
875
+ skip_section = True
876
+ result.extend(
877
+ (line, "*[Detailed information available in full CLAUDE.md]*")
878
+ )
879
+ result.append("")
880
+ elif line.startswith("## ") and skip_section:
881
+ skip_section = False
882
+ result.append(line)
883
+ elif not skip_section:
884
+ result.append(line)
1318
885
 
1319
- async def clean_with_semaphore(file_path: Path) -> None:
1320
- async with semaphore:
1321
- await self.clean_file_async(file_path)
886
+ return "\n".join(result)
1322
887
 
1323
- tasks = [clean_with_semaphore(file_path) for file_path in python_files]
1324
- await asyncio.gather(*tasks, return_exceptions=True)
888
+ def _compress_repeated_patterns(self, content: str) -> str:
889
+ import re
1325
890
 
1326
- await self._cleanup_cache_directories_async(pkg_dir)
891
+ content = re.sub(r"\n{3,}", "\n\n", content)
892
+ content = re.sub(
893
+ r"(\*\*[A-Z][^*]+:\*\*[^\n]+\n){3,}",
894
+ lambda m: m.group(0)[:200]
895
+ + "...\n*[Additional patterns available in full documentation]*\n",
896
+ content,
897
+ )
1327
898
 
1328
- async def clean_file_async(self, file_path: Path) -> None:
1329
- from crackerjack.errors import ExecutionError, handle_error
899
+ return content
1330
900
 
1331
- try:
1332
- async with aiofiles.open(file_path, encoding="utf-8") as f: # type: ignore[misc]
1333
- code: str = await f.read() # type: ignore[misc]
1334
- original_code: str = code
1335
- cleaning_failed = False
1336
- try:
1337
- code = self.remove_line_comments_streaming(code)
1338
- except Exception as e:
1339
- self.console.print(
1340
- f"[bold bright_yellow]⚠️ Warning: Failed to remove line comments from {file_path}: {e}[/bold bright_yellow]"
1341
- )
1342
- code = original_code
1343
- cleaning_failed = True
1344
- try:
1345
- code = self.remove_docstrings_streaming(code)
1346
- except Exception as e:
1347
- self.console.print(
1348
- f"[bold bright_yellow]⚠️ Warning: Failed to remove docstrings from {file_path}: {e}[/bold bright_yellow]"
1349
- )
1350
- code = original_code
1351
- cleaning_failed = True
1352
- try:
1353
- code = self.remove_extra_whitespace_streaming(code)
1354
- except Exception as e:
1355
- self.console.print(
1356
- f"[bold bright_yellow]⚠️ Warning: Failed to remove extra whitespace from {file_path}: {e}[/bold bright_yellow]"
1357
- )
1358
- code = original_code
1359
- cleaning_failed = True
1360
- try:
1361
- code = await self.reformat_code_async(code)
1362
- except Exception as e:
1363
- self.console.print(
1364
- f"[bold bright_yellow]⚠️ Warning: Failed to reformat {file_path}: {e}[/bold bright_yellow]"
1365
- )
1366
- code = original_code
1367
- cleaning_failed = True
1368
- async with aiofiles.open(file_path, "w", encoding="utf-8") as f: # type: ignore[misc]
1369
- await f.write(code) # type: ignore[misc]
1370
- if cleaning_failed:
1371
- self.console.print(
1372
- f"[bold yellow]⚡ Partially cleaned:[/bold yellow] [dim bright_white]{file_path}[/dim bright_white]"
1373
- )
901
+ def _summarize_long_sections(self, content: str) -> str:
902
+ lines = content.split("\n")
903
+ result = []
904
+ current_section = []
905
+ section_header = ""
906
+ for line in lines:
907
+ if line.startswith(("### ", "## ")):
908
+ if current_section and len("\n".join(current_section)) > 1000:
909
+ summary = self._create_section_summary(
910
+ section_header, current_section
911
+ )
912
+ result.extend(summary)
913
+ else:
914
+ result.extend(current_section)
915
+ current_section = [line]
916
+ section_header = line
1374
917
  else:
1375
- self.console.print(
1376
- f"[bold green]✨ Cleaned:[/bold green] [dim bright_white]{file_path}[/dim bright_white]"
1377
- )
1378
- except PermissionError as e:
1379
- self.console.print(
1380
- f"[red]Failed to clean: {file_path} (Permission denied)[/red]"
1381
- )
1382
- handle_error(
1383
- ExecutionError(
1384
- message=f"Permission denied while cleaning {file_path}",
1385
- error_code=ErrorCode.PERMISSION_ERROR,
1386
- details=str(e),
1387
- recovery=f"Check file permissions for {file_path} and ensure you have write access",
1388
- ),
1389
- console=self.console,
1390
- exit_on_error=False,
1391
- )
1392
- except OSError as e:
1393
- self.console.print(
1394
- f"[red]Failed to clean: {file_path} (File system error)[/red]"
1395
- )
1396
- handle_error(
1397
- ExecutionError(
1398
- message=f"File system error while cleaning {file_path}",
1399
- error_code=ErrorCode.FILE_WRITE_ERROR,
1400
- details=str(e),
1401
- recovery=f"Check that {file_path} exists and is not being used by another process",
1402
- ),
1403
- console=self.console,
1404
- exit_on_error=False,
1405
- )
1406
- except UnicodeDecodeError as e:
1407
- self.console.print(
1408
- f"[red]Failed to clean: {file_path} (Encoding error)[/red]"
1409
- )
1410
- handle_error(
1411
- ExecutionError(
1412
- message=f"Encoding error while cleaning {file_path}",
1413
- error_code=ErrorCode.FILE_READ_ERROR,
1414
- details=str(e),
1415
- recovery=f"Check the file encoding of {file_path} - it may not be UTF-8",
1416
- ),
1417
- console=self.console,
1418
- exit_on_error=False,
1419
- )
1420
- except Exception as e:
1421
- self.console.print(f"[red]Unexpected error cleaning {file_path}: {e}[/red]")
1422
- handle_error(
1423
- ExecutionError(
1424
- message=f"Unexpected error while cleaning {file_path}",
1425
- error_code=ErrorCode.UNEXPECTED_ERROR,
1426
- details=str(e),
1427
- recovery="Please report this issue with the full error details",
1428
- ),
1429
- console=self.console,
1430
- exit_on_error=False,
1431
- )
1432
-
1433
- async def reformat_code_async(self, code: str) -> str:
1434
- from crackerjack.errors import handle_error
918
+ current_section.append(line)
919
+ if current_section:
920
+ if len("\n".join(current_section)) > 1000:
921
+ summary = self._create_section_summary(section_header, current_section)
922
+ result.extend(summary)
923
+ else:
924
+ result.extend(current_section)
1435
925
 
1436
- try:
1437
- import tempfile
1438
-
1439
- with tempfile.NamedTemporaryFile(
1440
- suffix=".py", mode="w+", delete=False
1441
- ) as temp:
1442
- temp_path = Path(temp.name)
1443
- async with aiofiles.open(temp_path, "w", encoding="utf-8") as f: # type: ignore[misc]
1444
- await f.write(code) # type: ignore[misc]
1445
- try:
1446
- proc = await asyncio.create_subprocess_exec(
1447
- "uv",
1448
- "run",
1449
- "ruff",
1450
- "format",
1451
- str(temp_path),
1452
- stdout=asyncio.subprocess.PIPE,
1453
- stderr=asyncio.subprocess.PIPE,
1454
- )
1455
- _, stderr = await proc.communicate()
1456
- if proc.returncode == 0:
1457
- async with aiofiles.open(temp_path, encoding="utf-8") as f: # type: ignore[misc]
1458
- formatted_code = await f.read() # type: ignore[misc]
1459
- else:
1460
- self.console.print(
1461
- f"[bold bright_yellow]⚠️ Warning: Ruff format failed with return code {proc.returncode}[/bold bright_yellow]"
1462
- )
1463
- if stderr:
1464
- self.console.print(f"[dim]Ruff stderr: {stderr.decode()}[/dim]")
1465
- formatted_code = code
1466
- except Exception as e:
1467
- self.console.print(
1468
- f"[bold bright_red]❌ Error running Ruff: {e}[/bold bright_red]"
1469
- )
1470
- handle_error(
1471
- ExecutionError(
1472
- message="Error running Ruff",
1473
- error_code=ErrorCode.FORMATTING_ERROR,
1474
- details=str(e),
1475
- recovery="Verify Ruff is installed and configured correctly",
1476
- ),
1477
- console=self.console,
1478
- exit_on_error=False,
1479
- )
1480
- formatted_code = code
1481
- finally:
1482
- with suppress(FileNotFoundError):
1483
- temp_path.unlink()
926
+ return "\n".join(result)
1484
927
 
1485
- return formatted_code
1486
- except Exception as e:
1487
- self.console.print(
1488
- f"[bold bright_red]❌ Error during reformatting: {e}[/bold bright_red]"
1489
- )
1490
- handle_error(
1491
- ExecutionError(
1492
- message="Error during reformatting",
1493
- error_code=ErrorCode.FORMATTING_ERROR,
1494
- details=str(e),
1495
- recovery="Check file permissions and disk space",
1496
- ),
1497
- console=self.console,
1498
- exit_on_error=False,
1499
- )
1500
- return code
1501
-
1502
- async def _cleanup_cache_directories_async(self, pkg_dir: Path) -> None:
1503
- def cleanup_sync() -> None:
1504
- with suppress(PermissionError, OSError):
1505
- pycache_dir = pkg_dir / "__pycache__"
1506
- if pycache_dir.exists():
1507
- for cache_file in pycache_dir.iterdir():
1508
- with suppress(PermissionError, OSError):
1509
- cache_file.unlink()
1510
- pycache_dir.rmdir()
1511
- parent_pycache = pkg_dir.parent / "__pycache__"
1512
- if parent_pycache.exists():
1513
- for cache_file in parent_pycache.iterdir():
1514
- with suppress(PermissionError, OSError):
1515
- cache_file.unlink()
1516
- parent_pycache.rmdir()
1517
-
1518
- loop = asyncio.get_event_loop()
1519
- await loop.run_in_executor(None, cleanup_sync)
928
+ def _create_section_summary(
929
+ self, header: str, section_lines: list[str]
930
+ ) -> list[str]:
931
+ summary = [header, ""]
1520
932
 
933
+ key_points = []
934
+ for line in section_lines[2:]:
935
+ if line.strip().startswith(("- ", "* ", "1. ", "2. ")):
936
+ key_points.append(line)
937
+ elif line.strip().startswith("**") and ":" in line:
938
+ key_points.append(line)
1521
939
 
1522
- class ConfigManager(BaseModel, arbitrary_types_allowed=True):
1523
- our_path: Path
1524
- pkg_path: Path
1525
- pkg_name: str
1526
- console: Console
1527
- our_toml_path: Path | None = None
1528
- pkg_toml_path: Path | None = None
1529
- python_version: str = default_python_version
1530
- dry_run: bool = False
940
+ if len(key_points) >= 5:
941
+ break
1531
942
 
1532
- def swap_package_name(self, value: list[str] | str) -> list[str] | str:
1533
- if isinstance(value, list):
1534
- value.remove("crackerjack")
1535
- value.append(self.pkg_name)
943
+ if key_points:
944
+ summary.extend(key_points[:5])
945
+ summary.append("*[Complete details available in full CLAUDE.md]*")
1536
946
  else:
1537
- value = value.replace("crackerjack", self.pkg_name)
1538
- return value
1539
-
1540
- def update_pyproject_configs(self) -> None:
1541
- self._setup_toml_paths()
1542
- if self._is_crackerjack_project():
1543
- self._handle_crackerjack_project()
1544
- return
1545
- our_toml_config = self._load_our_toml()
1546
- pkg_toml_config = self._load_pkg_toml()
1547
- self._ensure_required_sections(pkg_toml_config)
1548
- self._update_tool_settings(our_toml_config, pkg_toml_config)
1549
- self._update_python_version(our_toml_config, pkg_toml_config)
1550
- self._save_pkg_toml(pkg_toml_config)
1551
-
1552
- def _setup_toml_paths(self) -> None:
1553
- toml_file = "pyproject.toml"
1554
- self.our_toml_path = self.our_path / toml_file
1555
- self.pkg_toml_path = self.pkg_path / toml_file
1556
-
1557
- def _is_crackerjack_project(self) -> bool:
1558
- return self.pkg_path.stem == "crackerjack"
1559
-
1560
- def _handle_crackerjack_project(self) -> None:
1561
- if self.our_toml_path and self.pkg_toml_path:
1562
- self.our_toml_path.write_text(self.pkg_toml_path.read_text())
1563
-
1564
- def _load_our_toml(self) -> dict[str, t.Any]:
1565
- if self.our_toml_path:
1566
- return loads(self.our_toml_path.read_text())
1567
- return {}
1568
-
1569
- def _load_pkg_toml(self) -> dict[str, t.Any]:
1570
- if self.pkg_toml_path:
1571
- return loads(self.pkg_toml_path.read_text())
1572
- return {}
947
+ content_preview = " ".join(
948
+ line.strip()
949
+ for line in section_lines[2:10]
950
+ if line.strip() and not line.startswith("#")
951
+ )[:200]
952
+ summary.extend(
953
+ (
954
+ f"{content_preview}...",
955
+ "*[Full section available in complete documentation]*",
956
+ )
957
+ )
1573
958
 
1574
- def _ensure_required_sections(self, pkg_toml_config: dict[str, t.Any]) -> None:
1575
- pkg_toml_config.setdefault("tool", {})
1576
- pkg_toml_config.setdefault("project", {})
959
+ summary.append("")
960
+ return summary
1577
961
 
1578
- def _update_tool_settings(
1579
- self, our_toml_config: dict[str, t.Any], pkg_toml_config: dict[str, t.Any]
1580
- ) -> None:
1581
- for tool, settings in our_toml_config.get("tool", {}).items():
1582
- if tool not in pkg_toml_config["tool"]:
1583
- pkg_toml_config["tool"][tool] = {}
1584
- pkg_tool_config = pkg_toml_config["tool"][tool]
1585
- self._merge_tool_config(settings, pkg_tool_config, tool)
962
+ def _extract_essential_sections(
963
+ self, content: str, essential_sections: list[str], target_size: int
964
+ ) -> str:
965
+ lines = content.split("\n")
966
+ result = []
967
+ current_section = []
968
+ keep_section = False
1586
969
 
1587
- def _merge_tool_config(
1588
- self, our_config: dict[str, t.Any], pkg_config: dict[str, t.Any], tool: str
1589
- ) -> None:
1590
- for setting, value in our_config.items():
1591
- if isinstance(value, dict):
1592
- self._merge_nested_config(
1593
- setting, t.cast(dict[str, t.Any], value), pkg_config
1594
- )
970
+ for line in lines:
971
+ new_section_started = self._process_line_for_section(
972
+ line, essential_sections, current_section, keep_section, result
973
+ )
974
+ if new_section_started is not None:
975
+ current_section, keep_section = new_section_started
1595
976
  else:
1596
- self._merge_direct_config(setting, value, pkg_config)
977
+ current_section.append(line)
1597
978
 
1598
- def _merge_nested_config(
1599
- self, setting: str, value: dict[str, t.Any], pkg_config: dict[str, t.Any]
1600
- ) -> None:
1601
- if setting not in pkg_config:
1602
- pkg_config[setting] = {}
1603
- elif not isinstance(pkg_config[setting], dict):
1604
- pkg_config[setting] = {}
1605
- self._merge_tool_config(value, pkg_config[setting], "")
1606
- for k, v in value.items():
1607
- self._merge_nested_value(k, v, pkg_config[setting])
979
+ if self._should_stop_extraction(result, target_size):
980
+ break
1608
981
 
1609
- def _merge_nested_value(
1610
- self, key: str, value: t.Any, nested_config: dict[str, t.Any]
1611
- ) -> None:
1612
- if isinstance(value, str | list) and "crackerjack" in str(value):
1613
- nested_config[key] = self.swap_package_name(t.cast(str | list[str], value))
1614
- elif self._is_mergeable_list(key, value):
1615
- existing = nested_config.get(key, [])
1616
- if isinstance(existing, list) and isinstance(value, list):
1617
- nested_config[key] = list(
1618
- set(t.cast(list[str], existing) + t.cast(list[str], value))
1619
- )
1620
- else:
1621
- nested_config[key] = value
1622
- elif key not in nested_config:
1623
- nested_config[key] = value
982
+ self._finalize_extraction(current_section, keep_section, result, target_size)
983
+ return "\n".join(result)
1624
984
 
1625
- def _merge_direct_config(
1626
- self, setting: str, value: t.Any, pkg_config: dict[str, t.Any]
1627
- ) -> None:
1628
- if isinstance(value, str | list) and "crackerjack" in str(value):
1629
- pkg_config[setting] = self.swap_package_name(t.cast(str | list[str], value))
1630
- elif self._is_mergeable_list(setting, value):
1631
- existing = pkg_config.get(setting, [])
1632
- if isinstance(existing, list) and isinstance(value, list):
1633
- pkg_config[setting] = list(
1634
- set(t.cast(list[str], existing) + t.cast(list[str], value))
1635
- )
1636
- else:
1637
- pkg_config[setting] = value
1638
- elif setting not in pkg_config:
1639
- pkg_config[setting] = value
985
+ def _process_line_for_section(
986
+ self,
987
+ line: str,
988
+ essential_sections: list[str],
989
+ current_section: list[str],
990
+ keep_section: bool,
991
+ result: list[str],
992
+ ) -> tuple[list[str], bool] | None:
993
+ if any(line.startswith(section) for section in essential_sections):
994
+ if current_section and keep_section:
995
+ result.extend(current_section)
996
+ return ([line], True)
997
+ elif line.startswith(("## ", "### ")):
998
+ if current_section and keep_section:
999
+ result.extend(current_section)
1000
+ return ([line], False)
1001
+ return None
1640
1002
 
1641
- def _is_mergeable_list(self, key: str, value: t.Any) -> bool:
1642
- return key in (
1643
- "exclude-deps",
1644
- "exclude",
1645
- "excluded",
1646
- "skips",
1647
- "ignore",
1648
- ) and isinstance(value, list)
1003
+ def _should_stop_extraction(self, result: list[str], target_size: int) -> bool:
1004
+ return len("\n".join(result)) > target_size
1649
1005
 
1650
- def _update_python_version(
1651
- self, our_toml_config: dict[str, t.Any], pkg_toml_config: dict[str, t.Any]
1006
+ def _finalize_extraction(
1007
+ self,
1008
+ current_section: list[str],
1009
+ keep_section: bool,
1010
+ result: list[str],
1011
+ target_size: int,
1652
1012
  ) -> None:
1653
- python_version_pattern = "\\s*W*(\\d\\.\\d*)"
1654
- requires_python = our_toml_config.get("project", {}).get("requires-python", "")
1655
- classifiers: list[str] = []
1656
- for classifier in pkg_toml_config.get("project", {}).get("classifiers", []):
1657
- classifier = re.sub(
1658
- python_version_pattern, f" {self.python_version}", classifier
1659
- )
1660
- classifiers.append(classifier)
1661
- pkg_toml_config["project"]["classifiers"] = classifiers
1662
- if requires_python:
1663
- pkg_toml_config["project"]["requires-python"] = requires_python
1013
+ if current_section and keep_section and len("\n".join(result)) < target_size:
1014
+ result.extend(current_section)
1664
1015
 
1665
- def _save_pkg_toml(self, pkg_toml_config: dict[str, t.Any]) -> None:
1666
- if self.pkg_toml_path:
1667
- self.pkg_toml_path.write_text(dumps(pkg_toml_config))
1016
+ def _add_compression_notice(self, content: str) -> str:
1017
+ notice = """
1018
+ *Note: This CLAUDE.md has been automatically compressed by Crackerjack to optimize for Claude Code usage.
1019
+ Complete documentation is available in the source repository.*
1668
1020
 
1669
- def copy_configs(self) -> None:
1670
- configs_to_add: list[str] = []
1671
- for config in config_files:
1672
- config_path = self.our_path / config
1673
- pkg_config_path = self.pkg_path / config
1674
- pkg_config_path.touch()
1675
- if self.pkg_path.stem == "crackerjack":
1676
- config_path.write_text(pkg_config_path.read_text())
1677
- continue
1678
- if config != ".gitignore":
1679
- pkg_config_path.write_text(
1680
- config_path.read_text().replace("crackerjack", self.pkg_name)
1681
- )
1682
- configs_to_add.append(config)
1683
- if configs_to_add:
1684
- self.execute_command(["git", "add"] + configs_to_add)
1021
+ """
1685
1022
 
1686
- def copy_documentation_templates(self, force_update: bool = False) -> None:
1687
- docs_to_add: list[str] = []
1688
- for doc_file in documentation_files:
1689
- doc_path = self.our_path / doc_file
1690
- pkg_doc_path = self.pkg_path / doc_file
1691
- if not doc_path.exists():
1692
- continue
1693
- if self.pkg_path.stem == "crackerjack":
1694
- continue
1695
- should_update = force_update or not pkg_doc_path.exists()
1696
- if should_update:
1697
- pkg_doc_path.touch()
1698
- content = doc_path.read_text(encoding="utf-8")
1699
- updated_content = self._customize_documentation_content(
1700
- content, doc_file
1701
- )
1702
- pkg_doc_path.write_text(updated_content, encoding="utf-8")
1703
- docs_to_add.append(doc_file)
1704
- self.console.print(
1705
- f"[green]📋[/green] Updated {doc_file} with latest Crackerjack quality standards"
1706
- )
1707
- if docs_to_add:
1708
- self.execute_command(["git", "add"] + docs_to_add)
1023
+ lines = content.split("\n")
1024
+ if len(lines) > 5:
1025
+ lines.insert(5, notice)
1709
1026
 
1710
- def _customize_documentation_content(self, content: str, filename: str) -> str:
1711
- if filename == "CLAUDE.md":
1712
- return self._customize_claude_md(content)
1713
- elif filename == "RULES.md":
1714
- return self._customize_rules_md(content)
1715
- return content
1027
+ return "\n".join(lines)
1716
1028
 
1717
- def _customize_claude_md(self, content: str) -> str:
1029
+ def _customize_claude_md(self, content: str, compress: bool = False) -> str:
1718
1030
  project_name = self.pkg_name
1719
1031
  content = content.replace("crackerjack", project_name).replace(
1720
1032
  "Crackerjack", project_name.title()
@@ -1737,9 +1049,13 @@ This file provides guidance to Claude Code (claude.ai/code) when working with co
1737
1049
 
1738
1050
  if start_idx > 0:
1739
1051
  relevant_content = "\n".join(lines[start_idx:])
1740
- return header + relevant_content
1052
+ full_content = header + relevant_content
1053
+ else:
1054
+ full_content = header + content
1741
1055
 
1742
- return header + content
1056
+ if compress:
1057
+ return self._compress_claude_md(full_content)
1058
+ return full_content
1743
1059
 
1744
1060
  def _customize_rules_md(self, content: str) -> str:
1745
1061
  project_name = self.pkg_name
@@ -1833,7 +1149,30 @@ class ProjectManager(BaseModel, arbitrary_types_allowed=True):
1833
1149
 
1834
1150
  return env_vars
1835
1151
 
1152
+ def _cleanup_legacy_config_files(self) -> None:
1153
+ legacy_config_files = [
1154
+ ".pre-commit-config.yaml",
1155
+ ".pre-commit-config-ai.yaml",
1156
+ ".pre-commit-config-fast.yaml",
1157
+ ]
1158
+ removed_files = []
1159
+ for config_file in legacy_config_files:
1160
+ config_path = Path(config_file)
1161
+ if config_path.exists():
1162
+ try:
1163
+ config_path.unlink()
1164
+ removed_files.append(config_file)
1165
+ except OSError as e:
1166
+ self.console.print(
1167
+ f"[yellow]Warning: Could not remove {config_file}: {e}[/yellow]"
1168
+ )
1169
+ if removed_files:
1170
+ self.console.print(
1171
+ f"[dim]🧹 Cleaned up legacy config files: {', '.join(removed_files)}[/dim]"
1172
+ )
1173
+
1836
1174
  def update_pkg_configs(self) -> None:
1175
+ self._cleanup_legacy_config_files()
1837
1176
  self.config_manager.copy_configs()
1838
1177
  installed_pkgs = self.execute_command(
1839
1178
  ["uv", "pip", "list", "--freeze"], capture_output=True, text=True
@@ -1844,26 +1183,20 @@ class ProjectManager(BaseModel, arbitrary_types_allowed=True):
1844
1183
  "[bold bright_blue]⚡ INIT[/bold bright_blue] [bold bright_white]First-time project setup[/bold bright_white]"
1845
1184
  )
1846
1185
  self.console.print("─" * 80 + "\n")
1847
- self.execute_command(["uv", "tool", "install", "keyring"])
1186
+ if self.options and getattr(self.options, "ai_agent", False):
1187
+ import subprocess
1188
+
1189
+ self.execute_command(
1190
+ ["uv", "tool", "install", "keyring"],
1191
+ capture_output=True,
1192
+ stderr=subprocess.DEVNULL,
1193
+ )
1194
+ else:
1195
+ self.execute_command(["uv", "tool", "install", "keyring"])
1848
1196
  self.execute_command(["git", "init"])
1849
1197
  self.execute_command(["git", "branch", "-m", "main"])
1850
1198
  self.execute_command(["git", "add", "pyproject.toml", "uv.lock"])
1851
1199
  self.execute_command(["git", "config", "advice.addIgnoredFile", "false"])
1852
- install_cmd = ["uv", "run", "pre-commit", "install"]
1853
- if hasattr(self, "options") and getattr(self.options, "ai_agent", False):
1854
- install_cmd.extend(["-c", ".pre-commit-config-ai.yaml"])
1855
- else:
1856
- install_cmd.extend(["-c", ".pre-commit-config-fast.yaml"])
1857
- self.execute_command(install_cmd)
1858
- push_install_cmd = [
1859
- "uv",
1860
- "run",
1861
- "pre-commit",
1862
- "install",
1863
- "--hook-type",
1864
- "pre-push",
1865
- ]
1866
- self.execute_command(push_install_cmd)
1867
1200
  self.config_manager.update_pyproject_configs()
1868
1201
 
1869
1202
  def run_pre_commit(self) -> None:
@@ -1899,12 +1232,30 @@ class ProjectManager(BaseModel, arbitrary_types_allowed=True):
1899
1232
 
1900
1233
  def _select_precommit_config(self) -> str:
1901
1234
  if hasattr(self, "options"):
1902
- if getattr(self.options, "ai_agent", False):
1903
- return ".pre-commit-config-ai.yaml"
1235
+ experimental_hooks = getattr(self.options, "experimental_hooks", False)
1236
+ enable_pyrefly = getattr(self.options, "enable_pyrefly", False)
1237
+ enable_ty = getattr(self.options, "enable_ty", False)
1238
+ enabled_experimental = []
1239
+ if experimental_hooks:
1240
+ enabled_experimental = ["pyrefly", "ty"]
1241
+ else:
1242
+ if enable_pyrefly:
1243
+ enabled_experimental.append("pyrefly")
1244
+ if enable_ty:
1245
+ enabled_experimental.append("ty")
1246
+ if enabled_experimental:
1247
+ mode = "experimental"
1248
+ config_path = generate_config_for_mode(mode, enabled_experimental)
1904
1249
  elif getattr(self.options, "comprehensive", False):
1905
- return ".pre-commit-config.yaml"
1250
+ mode = "comprehensive"
1251
+ config_path = generate_config_for_mode(mode)
1252
+ else:
1253
+ mode = "fast"
1254
+ config_path = generate_config_for_mode(mode)
1906
1255
 
1907
- return ".pre-commit-config-fast.yaml"
1256
+ return str(config_path)
1257
+ config_path = generate_config_for_mode("fast")
1258
+ return str(config_path)
1908
1259
 
1909
1260
  def run_pre_commit_with_analysis(self) -> list[HookResult]:
1910
1261
  self.console.print("\n" + "-" * 80)
@@ -1927,7 +1278,7 @@ class ProjectManager(BaseModel, arbitrary_types_allowed=True):
1927
1278
  result = self.execute_command(cmd, capture_output=True, text=True)
1928
1279
  total_duration = time.time() - start_time
1929
1280
  hook_results = self._parse_hook_output(result.stdout, result.stderr)
1930
- if hasattr(self, "options") and getattr(self.options, "ai_agent", False):
1281
+ if self.options and getattr(self.options, "ai_agent", False):
1931
1282
  self._generate_hooks_analysis(hook_results, total_duration)
1932
1283
  self._generate_quality_metrics()
1933
1284
  self._generate_project_structure_analysis()
@@ -2049,7 +1400,7 @@ class ProjectManager(BaseModel, arbitrary_types_allowed=True):
2049
1400
  return suggestions
2050
1401
 
2051
1402
  def _generate_quality_metrics(self) -> None:
2052
- if not (hasattr(self, "options") and getattr(self.options, "ai_agent", False)):
1403
+ if not (self.options and getattr(self.options, "ai_agent", False)):
2053
1404
  return
2054
1405
  metrics = {
2055
1406
  "project_info": {
@@ -2224,7 +1575,7 @@ class ProjectManager(BaseModel, arbitrary_types_allowed=True):
2224
1575
  return recommendations
2225
1576
 
2226
1577
  def _generate_project_structure_analysis(self) -> None:
2227
- if not (hasattr(self, "options") and getattr(self.options, "ai_agent", False)):
1578
+ if not (self.options and getattr(self.options, "ai_agent", False)):
2228
1579
  return
2229
1580
  structure = {
2230
1581
  "project_overview": {
@@ -2248,7 +1599,7 @@ class ProjectManager(BaseModel, arbitrary_types_allowed=True):
2248
1599
  )
2249
1600
 
2250
1601
  def _generate_error_context_analysis(self) -> None:
2251
- if not (hasattr(self, "options") and getattr(self.options, "ai_agent", False)):
1602
+ if not (self.options and getattr(self.options, "ai_agent", False)):
2252
1603
  return
2253
1604
  context = {
2254
1605
  "analysis_info": {
@@ -2269,7 +1620,7 @@ class ProjectManager(BaseModel, arbitrary_types_allowed=True):
2269
1620
  )
2270
1621
 
2271
1622
  def _generate_ai_agent_summary(self) -> None:
2272
- if not (hasattr(self, "options") and getattr(self.options, "ai_agent", False)):
1623
+ if not (self.options and getattr(self.options, "ai_agent", False)):
2273
1624
  return
2274
1625
  summary = {
2275
1626
  "analysis_summary": {
@@ -2549,7 +1900,7 @@ class ProjectManager(BaseModel, arbitrary_types_allowed=True):
2549
1900
  raise SystemExit(1)
2550
1901
  else:
2551
1902
  self.console.print(
2552
- "\n[bold bright_green] Pre-commit passed all checks![/bold bright_green]"
1903
+ "\n[bold bright_green]🏆 Pre-commit passed all checks![/bold bright_green]"
2553
1904
  )
2554
1905
 
2555
1906
  async def run_pre_commit_with_analysis_async(self) -> list[HookResult]:
@@ -2595,7 +1946,7 @@ class ProjectManager(BaseModel, arbitrary_types_allowed=True):
2595
1946
  raise SystemExit(1)
2596
1947
  else:
2597
1948
  self.console.print(
2598
- "\n[bold bright_green] Pre-commit passed all checks![/bold bright_green]"
1949
+ "\n[bold bright_green]🏆 Pre-commit passed all checks![/bold bright_green]"
2599
1950
  )
2600
1951
  self._generate_analysis_files(hook_results)
2601
1952
 
@@ -2686,6 +2037,7 @@ class Crackerjack(BaseModel, arbitrary_types_allowed=True):
2686
2037
  config_manager: ConfigManager | None = None
2687
2038
  project_manager: ProjectManager | None = None
2688
2039
  session_tracker: SessionTracker | None = None
2040
+ options: t.Any = None
2689
2041
  _file_cache: dict[str, list[Path]] = {}
2690
2042
  _file_cache_with_mtime: dict[str, tuple[float, list[Path]]] = {}
2691
2043
  _state_file: Path = Path(".crackerjack-state")
@@ -2733,6 +2085,12 @@ class Crackerjack(BaseModel, arbitrary_types_allowed=True):
2733
2085
 
2734
2086
  self._state_file.write_text(json.dumps(state, indent=2), encoding="utf-8")
2735
2087
 
2088
+ def _get_state(self) -> dict[str, t.Any]:
2089
+ return self._read_state()
2090
+
2091
+ def _save_state(self, state: dict[str, t.Any]) -> None:
2092
+ self._write_state(state)
2093
+
2736
2094
  def _clear_state(self) -> None:
2737
2095
  if self._state_file.exists():
2738
2096
  from contextlib import suppress
@@ -2782,6 +2140,79 @@ class Crackerjack(BaseModel, arbitrary_types_allowed=True):
2782
2140
  return data.get("project", {}).get("version", "unknown")
2783
2141
  return "unknown"
2784
2142
 
2143
+ def _create_git_tag(self, version: str | None = None) -> None:
2144
+ if version is None:
2145
+ version = self._get_current_version()
2146
+ if version == "unknown":
2147
+ self.console.print(
2148
+ "[bold yellow]⚠️ Warning: Could not determine version for tagging[/bold yellow]"
2149
+ )
2150
+ return
2151
+ tag_name = f"v{version}"
2152
+ result = self.execute_command(
2153
+ ["git", "tag", "-l", tag_name], capture_output=True, text=True
2154
+ )
2155
+ if result.stdout.strip():
2156
+ self.console.print(
2157
+ f"[bold yellow]⚠️ Tag {tag_name} already exists, skipping tag creation[/bold yellow]"
2158
+ )
2159
+ return
2160
+ self.console.print(
2161
+ f"[bold bright_cyan]🏷️ Creating git tag: {tag_name}[/bold bright_cyan]"
2162
+ )
2163
+ package_name = self.pkg_path.stem.lower().replace("-", "_")
2164
+ tag_message = f"Release {package_name} v{version}"
2165
+ self.execute_command(["git", "tag", "-a", tag_name, "-m", tag_message])
2166
+ self.console.print(f"[bold green]✅ Created tag: {tag_name}[/bold green]")
2167
+
2168
+ def _push_git_tags(self) -> None:
2169
+ self.console.print(
2170
+ "[bold bright_cyan]🚀 Pushing tags to remote repository[/bold bright_cyan]"
2171
+ )
2172
+ try:
2173
+ self.execute_command(["git", "push", "origin", "--tags"])
2174
+ self.console.print("[bold green]✅ Tags pushed successfully[/bold green]")
2175
+ except Exception as e:
2176
+ self.console.print(
2177
+ f"[bold yellow]⚠️ Warning: Failed to push tags: {e}[/bold yellow]"
2178
+ )
2179
+
2180
+ def _verify_version_consistency(self) -> bool:
2181
+ current_version = self._get_current_version()
2182
+ if current_version == "unknown":
2183
+ self.console.print(
2184
+ "[bold yellow]⚠️ Warning: Could not determine current version from pyproject.toml[/bold yellow]"
2185
+ )
2186
+ return False
2187
+ try:
2188
+ result = self.execute_command(
2189
+ ["git", "describe", "--tags", "--abbrev=0"],
2190
+ capture_output=True,
2191
+ text=True,
2192
+ )
2193
+ latest_tag = result.stdout.strip()
2194
+ if latest_tag.startswith("v"):
2195
+ tag_version = latest_tag[1:]
2196
+ else:
2197
+ tag_version = latest_tag
2198
+ except Exception:
2199
+ self.console.print(
2200
+ "[bold bright_cyan]ℹ️ No git tags found - this appears to be the first release[/bold bright_cyan]"
2201
+ )
2202
+ return True
2203
+ if current_version != tag_version:
2204
+ self.console.print(
2205
+ f"[bold red]❌ Version mismatch detected:[/bold red]\n"
2206
+ f" pyproject.toml version: {current_version}\n"
2207
+ f" Latest git tag version: {tag_version}\n"
2208
+ f" These should match before committing or publishing."
2209
+ )
2210
+ return False
2211
+ self.console.print(
2212
+ f"[bold green]✅ Version consistency verified: {current_version}[/bold green]"
2213
+ )
2214
+ return True
2215
+
2785
2216
  def _setup_package(self) -> None:
2786
2217
  self.pkg_name = self.pkg_path.stem.lower().replace("-", "_")
2787
2218
  self.pkg_dir = self.pkg_path / self.pkg_name
@@ -2801,6 +2232,9 @@ class Crackerjack(BaseModel, arbitrary_types_allowed=True):
2801
2232
  assert self.project_manager is not None
2802
2233
  if not options.no_config_updates:
2803
2234
  self.project_manager.update_pkg_configs()
2235
+ self._run_automatic_updates()
2236
+ if self.pkg_path.stem != "crackerjack":
2237
+ self._check_and_update_crackerjack()
2804
2238
  result: CompletedProcess[str] = self.execute_command(
2805
2239
  ["uv", "sync"], capture_output=True, text=True
2806
2240
  )
@@ -2813,6 +2247,133 @@ class Crackerjack(BaseModel, arbitrary_types_allowed=True):
2813
2247
  "\n\n[bold red]❌ UV sync failed. Is UV installed? Run `pipx install uv` and try again.[/bold red]\n\n"
2814
2248
  )
2815
2249
 
2250
+ def _run_automatic_updates(self) -> None:
2251
+ self.console.print("[dim]🔄 Checking for updates...[/dim]")
2252
+ self._upgrade_dependencies()
2253
+ self._update_hooks_if_needed()
2254
+
2255
+ def _upgrade_dependencies(self) -> None:
2256
+ try:
2257
+ result = self.execute_command(
2258
+ ["uv", "sync", "--upgrade"], capture_output=True, text=True
2259
+ )
2260
+ if result.returncode == 0:
2261
+ self._handle_upgrade_success(result)
2262
+ else:
2263
+ self.console.print(
2264
+ f"[yellow]⚠️ Dependency upgrade failed: {result.stderr}[/yellow]"
2265
+ )
2266
+ except Exception as e:
2267
+ self.console.print(f"[yellow]⚠️ Error upgrading dependencies: {e}[/yellow]")
2268
+
2269
+ def _handle_upgrade_success(
2270
+ self, result: "subprocess.CompletedProcess[str]"
2271
+ ) -> None:
2272
+ if "no changes" not in result.stdout.lower():
2273
+ self.console.print("[green]✅ Dependencies upgraded[/green]")
2274
+ self._show_upgrade_summary(result.stdout)
2275
+ else:
2276
+ self.console.print("[dim]✓ Dependencies already up to date[/dim]")
2277
+
2278
+ def _show_upgrade_summary(self, stdout: str) -> None:
2279
+ if stdout.strip():
2280
+ upgrade_lines = [line for line in stdout.split("\n") if "->" in line]
2281
+ if upgrade_lines:
2282
+ self.console.print(f"[dim]{len(upgrade_lines)} packages upgraded[/dim]")
2283
+
2284
+ def _update_hooks_if_needed(self) -> None:
2285
+ import time
2286
+ from pathlib import Path
2287
+
2288
+ marker_file = Path(".crackerjack-hooks-updated")
2289
+ current_time = time.time()
2290
+ week_seconds = 7 * 24 * 60 * 60
2291
+ should_update = True
2292
+ if marker_file.exists():
2293
+ try:
2294
+ last_update = float(marker_file.read_text().strip())
2295
+ if current_time - last_update < week_seconds:
2296
+ should_update = False
2297
+ except (ValueError, OSError):
2298
+ should_update = True
2299
+ if should_update:
2300
+ self._update_precommit_hooks()
2301
+ from contextlib import suppress
2302
+
2303
+ with suppress(OSError):
2304
+ marker_file.write_text(str(current_time))
2305
+ else:
2306
+ self.console.print("[dim]✓ Pre-commit hooks recently updated[/dim]")
2307
+
2308
+ def _update_precommit_hooks(self) -> None:
2309
+ try:
2310
+ result = self.execute_command(
2311
+ ["uv", "run", "pre-commit", "autoupdate"],
2312
+ capture_output=True,
2313
+ text=True,
2314
+ )
2315
+ if result.returncode == 0:
2316
+ if "updated" in result.stdout.lower():
2317
+ self.console.print("[green]✅ Pre-commit hooks updated[/green]")
2318
+ update_lines = [
2319
+ line for line in result.stdout.split("\n") if "->" in line
2320
+ ]
2321
+ if update_lines:
2322
+ self.console.print(
2323
+ f"[dim]{len(update_lines)} hooks updated[/dim]"
2324
+ )
2325
+ else:
2326
+ self.console.print(
2327
+ "[dim]✓ Pre-commit hooks already up to date[/dim]"
2328
+ )
2329
+ else:
2330
+ self.console.print(
2331
+ f"[yellow]⚠️ Pre-commit update failed: {result.stderr}[/yellow]"
2332
+ )
2333
+ except Exception as e:
2334
+ self.console.print(
2335
+ f"[yellow]⚠️ Error updating pre-commit hooks: {e}[/yellow]"
2336
+ )
2337
+
2338
+ def _check_and_update_crackerjack(self) -> None:
2339
+ try:
2340
+ import tomllib
2341
+ from pathlib import Path
2342
+
2343
+ pyproject_path = Path("pyproject.toml")
2344
+ if not pyproject_path.exists():
2345
+ return
2346
+ with pyproject_path.open("rb") as f:
2347
+ config = tomllib.load(f)
2348
+ dependencies = config.get("project", {}).get("dependencies", [])
2349
+ dev_dependencies = config.get("dependency-groups", {}).get("dev", [])
2350
+ has_crackerjack = any(
2351
+ dep.startswith("crackerjack") for dep in dependencies + dev_dependencies
2352
+ )
2353
+ if has_crackerjack:
2354
+ result = self.execute_command(
2355
+ ["uv", "sync", "--upgrade", "--upgrade-package", "crackerjack"],
2356
+ capture_output=True,
2357
+ text=True,
2358
+ )
2359
+ if result.returncode == 0:
2360
+ if "crackerjack" in result.stdout:
2361
+ self.console.print(
2362
+ "[green]✅ Crackerjack upgraded to latest version[/green]"
2363
+ )
2364
+ else:
2365
+ self.console.print(
2366
+ "[dim]✓ Crackerjack already up to date[/dim]"
2367
+ )
2368
+ else:
2369
+ self.console.print(
2370
+ f"[yellow]⚠️ Crackerjack update check failed: {result.stderr}[/yellow]"
2371
+ )
2372
+ except Exception as e:
2373
+ self.console.print(
2374
+ f"[yellow]⚠️ Error checking crackerjack updates: {e}[/yellow]"
2375
+ )
2376
+
2816
2377
  def _clean_project(self, options: t.Any) -> None:
2817
2378
  assert self.code_cleaner is not None
2818
2379
  if options.clean:
@@ -3002,152 +2563,608 @@ class Crackerjack(BaseModel, arbitrary_types_allowed=True):
3002
2563
  except (OSError, PermissionError):
3003
2564
  return "medium"
3004
2565
 
3005
- def _calculate_test_metrics(self, test_files: list[Path]) -> tuple[int, int]:
3006
- total_test_size = 0
3007
- slow_tests = 0
3008
- for test_file in test_files:
2566
+ def _calculate_test_metrics(self, test_files: list[Path]) -> tuple[int, int]:
2567
+ total_test_size = 0
2568
+ slow_tests = 0
2569
+ for test_file in test_files:
2570
+ try:
2571
+ size = test_file.stat().st_size
2572
+ total_test_size += size
2573
+ if size > 30_000 or "integration" in test_file.name.lower():
2574
+ slow_tests += 1
2575
+ except (OSError, PermissionError):
2576
+ continue
2577
+ return total_test_size, slow_tests
2578
+
2579
+ def _determine_test_complexity(
2580
+ self, test_count: int, avg_size: float, slow_ratio: float
2581
+ ) -> str:
2582
+ if test_count > 100 or avg_size > 25_000 or slow_ratio > 0.4:
2583
+ return "high"
2584
+ elif test_count > 50 or avg_size > 15_000 or slow_ratio > 0.2:
2585
+ return "medium"
2586
+ return "low"
2587
+
2588
+ def _analyze_test_workload(self) -> dict[str, t.Any]:
2589
+ try:
2590
+ test_files = self._get_cached_files_with_mtime("test_*.py")
2591
+ py_files = self._get_cached_files_with_mtime("*.py")
2592
+ total_test_size, slow_tests = self._calculate_test_metrics(test_files)
2593
+ avg_test_size = total_test_size / len(test_files) if test_files else 0
2594
+ slow_test_ratio = slow_tests / len(test_files) if test_files else 0
2595
+ complexity = self._determine_test_complexity(
2596
+ len(test_files), avg_test_size, slow_test_ratio
2597
+ )
2598
+ return {
2599
+ "total_files": len(py_files),
2600
+ "test_files": len(test_files),
2601
+ "total_test_size": total_test_size,
2602
+ "avg_test_size": avg_test_size,
2603
+ "slow_tests": slow_tests,
2604
+ "slow_test_ratio": slow_test_ratio,
2605
+ "complexity": complexity,
2606
+ }
2607
+ except (OSError, PermissionError):
2608
+ return {"complexity": "medium", "total_files": 0, "test_files": 0}
2609
+
2610
+ def _calculate_optimal_test_workers(self, workload: dict[str, t.Any]) -> int:
2611
+ import os
2612
+
2613
+ cpu_count = os.cpu_count() or 4
2614
+ if workload["complexity"] == "high":
2615
+ return min(cpu_count // 3, 2)
2616
+ elif workload["complexity"] == "medium":
2617
+ return min(cpu_count // 2, 4)
2618
+ return min(cpu_count, 8)
2619
+
2620
+ def _print_ai_agent_files(self, options: t.Any) -> None:
2621
+ if getattr(options, "ai_agent", False):
2622
+ self.console.print(
2623
+ "[bold bright_black]→ Structured test results: test-results.xml[/bold bright_black]"
2624
+ )
2625
+ self.console.print(
2626
+ "[bold bright_black]→ Coverage report: coverage.json[/bold bright_black]"
2627
+ )
2628
+ if options.benchmark or options.benchmark_regression:
2629
+ self.console.print(
2630
+ "[bold bright_black]→ Benchmark results: benchmark.json[/bold bright_black]"
2631
+ )
2632
+
2633
+ def _handle_test_failure(self, result: t.Any, options: t.Any) -> None:
2634
+ if result.stderr:
2635
+ self.console.print(result.stderr)
2636
+ self.console.print(
2637
+ "\n\n[bold bright_red]❌ Tests failed. Please fix errors.[/bold bright_red]\n"
2638
+ )
2639
+ self._print_ai_agent_files(options)
2640
+ raise SystemExit(1)
2641
+
2642
+ def _handle_test_success(self, options: t.Any) -> None:
2643
+ self.console.print(
2644
+ "\n\n[bold bright_green]🏆 Tests passed successfully![/bold bright_green]\n"
2645
+ )
2646
+ self._print_ai_agent_files(options)
2647
+
2648
+ def _run_tests(self, options: t.Any) -> None:
2649
+ if not options.test:
2650
+ return
2651
+ self.console.print("\n" + "-" * 80)
2652
+ self.console.print(
2653
+ "[bold bright_green]🧪 TESTING[/bold bright_green] [bold bright_white]Executing test suite[/bold bright_white]"
2654
+ )
2655
+ self.console.print("-" * 80 + "\n")
2656
+ test_command = self._prepare_pytest_command(options)
2657
+ result = self.execute_command(test_command, capture_output=True, text=True)
2658
+ if result.stdout:
2659
+ self.console.print(result.stdout)
2660
+ if result.returncode > 0:
2661
+ self._handle_test_failure(result, options)
2662
+ else:
2663
+ self._handle_test_success(options)
2664
+
2665
+ async def _run_tests_async(self, options: t.Any) -> None:
2666
+ if not options.test:
2667
+ return
2668
+ self.console.print("\n" + "-" * 80)
2669
+ self.console.print(
2670
+ "[bold bright_green]🧪 TESTING[/bold bright_green] [bold bright_white]Executing test suite (async optimized)[/bold bright_white]"
2671
+ )
2672
+ self.console.print("-" * 80 + "\n")
2673
+ test_command = self._prepare_pytest_command(options)
2674
+ result = await self.execute_command_async(test_command)
2675
+ if result.stdout:
2676
+ self.console.print(result.stdout)
2677
+ if result.returncode > 0:
2678
+ self._handle_test_failure(result, options)
2679
+ else:
2680
+ self._handle_test_success(options)
2681
+
2682
+ def _prompt_version_selection(self) -> str:
2683
+ from rich.prompt import Prompt
2684
+
2685
+ if self.options and getattr(self.options, "ai_agent", False):
2686
+ self.console.print(
2687
+ "[dim]AI agent mode: defaulting to patch version bump[/dim]"
2688
+ )
2689
+ return "patch"
2690
+ self.console.print(
2691
+ "\n[bold bright_yellow]📦 VERSION SELECTION[/bold bright_yellow]"
2692
+ )
2693
+ self.console.print("[dim]Select the type of version bump to perform:[/dim]\n")
2694
+ choices = {
2695
+ "1": ("patch", "Bug fixes and minor changes (0.1.0 → 0.1.1)"),
2696
+ "2": ("minor", "New features, backwards compatible (0.1.0 → 0.2.0)"),
2697
+ "3": ("major", "Breaking changes, major updates (0.1.0 → 1.0.0)"),
2698
+ }
2699
+ for key, (bump_type, description) in choices.items():
2700
+ self.console.print(
2701
+ f" [bold bright_cyan]{key}[/bold bright_cyan] {bump_type:<6} - {description}"
2702
+ )
2703
+ while True:
2704
+ choice = Prompt.ask(
2705
+ "\n[bold]Select version bump type",
2706
+ choices=list(choices.keys()),
2707
+ default="1",
2708
+ show_choices=False,
2709
+ )
2710
+ if choice in choices:
2711
+ selected_type = choices[choice][0]
2712
+ self.console.print(
2713
+ f"[green]✓ Selected: {selected_type} version bump[/green]"
2714
+ )
2715
+ return selected_type
2716
+ else:
2717
+ self.console.print(
2718
+ "[red]Invalid choice. Please select 1, 2, or 3.[/red]"
2719
+ )
2720
+
2721
+ def _bump_version(self, options: OptionsProtocol) -> None:
2722
+ if options.publish and str(options.publish) == "interactive":
2723
+ return self._handle_interactive_version_selection(options)
2724
+ for option in (options.publish, options.bump):
2725
+ if option:
2726
+ version_type = str(option)
2727
+ if self._has_version_been_bumped(version_type):
2728
+ self._display_version_already_bumped_message(version_type)
2729
+ return
2730
+ self._display_version_bump_message(option)
2731
+ if not self._confirm_version_bump_if_needed(option, version_type):
2732
+ return
2733
+ self.execute_command(["uv", "version", "--bump", option])
2734
+ self._mark_version_bumped(version_type)
2735
+ if not options.no_git_tags:
2736
+ self._create_git_tag()
2737
+ break
2738
+
2739
+ def _handle_interactive_version_selection(self, options: OptionsProtocol) -> None:
2740
+ selected_version = self._prompt_version_selection()
2741
+ from crackerjack.__main__ import BumpOption
2742
+
2743
+ options_dict = vars(options).copy()
2744
+ options_dict["publish"] = BumpOption(selected_version)
2745
+ from types import SimpleNamespace
2746
+
2747
+ temp_options = SimpleNamespace(**options_dict)
2748
+
2749
+ return self._bump_version(temp_options) # type: ignore[arg-type]
2750
+
2751
+ def _display_version_already_bumped_message(self, version_type: str) -> None:
2752
+ self.console.print("\n" + "-" * 80)
2753
+ self.console.print(
2754
+ f"[bold yellow]📦 VERSION[/bold yellow] [bold bright_white]Version already bumped ({version_type}), skipping to avoid duplicate bump[/bold bright_white]"
2755
+ )
2756
+ self.console.print("-" * 80 + "\n")
2757
+
2758
+ def _display_version_bump_message(self, option: t.Any) -> None:
2759
+ self.console.print("\n" + "-" * 80)
2760
+ self.console.print(
2761
+ f"[bold bright_magenta]📦 VERSION[/bold bright_magenta] [bold bright_white]Bumping {option} version[/bold bright_white]"
2762
+ )
2763
+ self.console.print("-" * 80 + "\n")
2764
+
2765
+ def _confirm_version_bump_if_needed(self, option: t.Any, version_type: str) -> bool:
2766
+ if version_type in ("minor", "major"):
2767
+ from rich.prompt import Confirm
2768
+
2769
+ if not Confirm.ask(
2770
+ f"Are you sure you want to bump the {option} version?",
2771
+ default=False,
2772
+ ):
2773
+ self.console.print(
2774
+ f"[bold yellow]⏭️ Skipping {option} version bump[/bold yellow]"
2775
+ )
2776
+ return False
2777
+ return True
2778
+
2779
+ def _validate_authentication_setup(self) -> None:
2780
+ import os
2781
+ import shutil
2782
+
2783
+ keyring_provider = self._get_keyring_provider()
2784
+ has_publish_token = bool(os.environ.get("UV_PUBLISH_TOKEN"))
2785
+ has_keyring = shutil.which("keyring") is not None
2786
+ self.console.print("[dim]🔐 Validating authentication setup...[/dim]")
2787
+ if has_publish_token:
2788
+ self._handle_publish_token_found()
2789
+ return
2790
+ if keyring_provider == "subprocess" and has_keyring:
2791
+ self._handle_keyring_validation()
2792
+ return
2793
+ if keyring_provider == "subprocess" and not has_keyring:
2794
+ self._handle_missing_keyring()
2795
+ if not keyring_provider:
2796
+ self._handle_no_keyring_provider()
2797
+
2798
+ def _handle_publish_token_found(self) -> None:
2799
+ self.console.print(
2800
+ "[dim] ✅ UV_PUBLISH_TOKEN environment variable found[/dim]"
2801
+ )
2802
+
2803
+ def _handle_keyring_validation(self) -> None:
2804
+ self.console.print(
2805
+ "[dim] ✅ Keyring provider configured and keyring executable found[/dim]"
2806
+ )
2807
+ try:
2808
+ result = self.execute_command(
2809
+ ["keyring", "get", "https://upload.pypi.org/legacy/", "__token__"],
2810
+ capture_output=True,
2811
+ text=True,
2812
+ )
2813
+ if result.returncode == 0:
2814
+ self.console.print("[dim] ✅ PyPI token found in keyring[/dim]")
2815
+ else:
2816
+ self.console.print(
2817
+ "[yellow] ⚠️ No PyPI token found in keyring - will prompt during publish[/yellow]"
2818
+ )
2819
+ except Exception:
2820
+ self.console.print(
2821
+ "[yellow] ⚠️ Could not check keyring - will attempt publish anyway[/yellow]"
2822
+ )
2823
+
2824
+ def _handle_missing_keyring(self) -> None:
2825
+ if not (self.options and getattr(self.options, "ai_agent", False)):
2826
+ self.console.print(
2827
+ "[yellow] ⚠️ Keyring provider set to 'subprocess' but keyring executable not found[/yellow]"
2828
+ )
2829
+ self.console.print(
2830
+ "[yellow] Install keyring: uv tool install keyring[/yellow]"
2831
+ )
2832
+
2833
+ def _handle_no_keyring_provider(self) -> None:
2834
+ if not (self.options and getattr(self.options, "ai_agent", False)):
2835
+ self.console.print(
2836
+ "[yellow] ⚠️ No keyring provider configured and no UV_PUBLISH_TOKEN set[/yellow]"
2837
+ )
2838
+
2839
+ def _get_keyring_provider(self) -> str | None:
2840
+ import os
2841
+ import tomllib
2842
+ from pathlib import Path
2843
+
2844
+ env_provider = os.environ.get("UV_KEYRING_PROVIDER")
2845
+ if env_provider:
2846
+ return env_provider
2847
+ for config_file in ("pyproject.toml", "uv.toml"):
2848
+ config_path = Path(config_file)
2849
+ if config_path.exists():
2850
+ try:
2851
+ with config_path.open("rb") as f:
2852
+ config = tomllib.load(f)
2853
+ return config.get("tool", {}).get("uv", {}).get("keyring-provider")
2854
+ except Exception:
2855
+ continue
2856
+
2857
+ return None
2858
+
2859
+ def _build_publish_command(self) -> list[str]:
2860
+ import os
2861
+
2862
+ cmd = ["uv", "publish"]
2863
+ publish_token = os.environ.get("UV_PUBLISH_TOKEN")
2864
+ if publish_token:
2865
+ cmd.extend(["--token", publish_token])
2866
+ keyring_provider = self._get_keyring_provider()
2867
+ if keyring_provider:
2868
+ cmd.extend(["--keyring-provider", keyring_provider])
2869
+
2870
+ return cmd
2871
+
2872
+ def _publish_with_retry(self) -> None:
2873
+ max_retries = 2
2874
+ for attempt in range(max_retries):
3009
2875
  try:
3010
- size = test_file.stat().st_size
3011
- total_test_size += size
3012
- if size > 30_000 or "integration" in test_file.name.lower():
3013
- slow_tests += 1
3014
- except (OSError, PermissionError):
3015
- continue
3016
- return total_test_size, slow_tests
2876
+ result = self._attempt_publish()
2877
+ if result.returncode == 0:
2878
+ self._verify_pypi_upload()
2879
+ return
2880
+ if not self._handle_publish_failure(result, attempt, max_retries):
2881
+ raise SystemExit(1)
2882
+ except SystemExit:
2883
+ if attempt < max_retries - 1:
2884
+ continue
2885
+ raise
3017
2886
 
3018
- def _determine_test_complexity(
3019
- self, test_count: int, avg_size: float, slow_ratio: float
3020
- ) -> str:
3021
- if test_count > 100 or avg_size > 25_000 or slow_ratio > 0.4:
3022
- return "high"
3023
- elif test_count > 50 or avg_size > 15_000 or slow_ratio > 0.2:
3024
- return "medium"
3025
- return "low"
2887
+ def _attempt_publish(self) -> "subprocess.CompletedProcess[str]":
2888
+ self._validate_authentication_setup()
2889
+ publish_cmd = self._build_publish_command()
2890
+ self.console.print("[dim]📤 Uploading package to PyPI...[/dim]")
2891
+ import subprocess
2892
+ import time
2893
+ from threading import Thread
3026
2894
 
3027
- def _analyze_test_workload(self) -> dict[str, t.Any]:
3028
- try:
3029
- test_files = self._get_cached_files_with_mtime("test_*.py")
3030
- py_files = self._get_cached_files_with_mtime("*.py")
3031
- total_test_size, slow_tests = self._calculate_test_metrics(test_files)
3032
- avg_test_size = total_test_size / len(test_files) if test_files else 0
3033
- slow_test_ratio = slow_tests / len(test_files) if test_files else 0
3034
- complexity = self._determine_test_complexity(
3035
- len(test_files), avg_test_size, slow_test_ratio
3036
- )
3037
- return {
3038
- "total_files": len(py_files),
3039
- "test_files": len(test_files),
3040
- "total_test_size": total_test_size,
3041
- "avg_test_size": avg_test_size,
3042
- "slow_tests": slow_tests,
3043
- "slow_test_ratio": slow_test_ratio,
3044
- "complexity": complexity,
3045
- }
3046
- except (OSError, PermissionError):
3047
- return {"complexity": "medium", "total_files": 0, "test_files": 0}
2895
+ from rich.live import Live
2896
+ from rich.spinner import Spinner
3048
2897
 
3049
- def _calculate_optimal_test_workers(self, workload: dict[str, t.Any]) -> int:
3050
- import os
2898
+ result: subprocess.CompletedProcess[str] | None = None
2899
+ start_time = time.time()
3051
2900
 
3052
- cpu_count = os.cpu_count() or 4
3053
- if workload["complexity"] == "high":
3054
- return min(cpu_count // 3, 2)
3055
- elif workload["complexity"] == "medium":
3056
- return min(cpu_count // 2, 4)
3057
- return min(cpu_count, 8)
2901
+ def run_publish() -> None:
2902
+ nonlocal result
2903
+ result = self.execute_command(publish_cmd, capture_output=True, text=True)
3058
2904
 
3059
- def _print_ai_agent_files(self, options: t.Any) -> None:
3060
- if getattr(options, "ai_agent", False):
2905
+ publish_thread = Thread(target=run_publish)
2906
+ publish_thread.start()
2907
+
2908
+ elapsed_time = 0
2909
+ while publish_thread.is_alive():
2910
+ elapsed_time = time.time() - start_time
2911
+
2912
+ if elapsed_time < 5:
2913
+ text = "[dim]📤 Uploading to PyPI...[/dim]"
2914
+ elif elapsed_time < 15:
2915
+ text = "[dim]📤 Uploading to PyPI... (this may take a moment)[/dim]"
2916
+ else:
2917
+ text = "[dim]📤 Uploading to PyPI... (large package or slow connection)[/dim]"
2918
+
2919
+ spinner = Spinner("dots", text=text)
2920
+ with Live(spinner, refresh_per_second=10, transient=True):
2921
+ time.sleep(0.5)
2922
+
2923
+ if not publish_thread.is_alive():
2924
+ break
2925
+
2926
+ publish_thread.join()
2927
+
2928
+ elapsed_time = time.time() - start_time
2929
+
2930
+ if result and result.returncode == 0:
3061
2931
  self.console.print(
3062
- "[bold bright_black] Structured test results: test-results.xml[/bold bright_black]"
2932
+ f"[green] Package uploaded successfully! ({elapsed_time:.1f}s)[/green]"
3063
2933
  )
3064
- self.console.print(
3065
- "[bold bright_black] Coverage report: coverage.json[/bold bright_black]"
2934
+ elif result and result.returncode != 0:
2935
+ self.console.print(f"[red] Upload failed after {elapsed_time:.1f}s[/red]")
2936
+ if result.stdout:
2937
+ self.console.print(f"[dim]stdout: {result.stdout}[/dim]")
2938
+ if result.stderr:
2939
+ self.console.print(f"[red]stderr: {result.stderr}[/red]")
2940
+
2941
+ if result is None:
2942
+ return subprocess.CompletedProcess(
2943
+ args=publish_cmd,
2944
+ returncode=1,
2945
+ stdout="",
2946
+ stderr="Thread execution failed",
3066
2947
  )
3067
- if options.benchmark or options.benchmark_regression:
3068
- self.console.print(
3069
- "[bold bright_black]→ Benchmark results: benchmark.json[/bold bright_black]"
3070
- )
3071
2948
 
3072
- def _handle_test_failure(self, result: t.Any, options: t.Any) -> None:
3073
- if result.stderr:
3074
- self.console.print(result.stderr)
2949
+ return result
2950
+
2951
+ def _verify_pypi_upload(self) -> None:
2952
+ if self.options and getattr(self.options, "ai_agent", False):
2953
+ return
2954
+ import time
2955
+
2956
+ package_name = self._get_package_name()
2957
+ current_version = self._get_current_version()
3075
2958
  self.console.print(
3076
- "\n\n[bold bright_red] Tests failed. Please fix errors.[/bold bright_red]\n"
2959
+ f"[dim]🔍 Verifying upload of {package_name} v{current_version}...[/dim]"
3077
2960
  )
3078
- self._print_ai_agent_files(options)
3079
- raise SystemExit(1)
2961
+ time.sleep(2)
2962
+ self._retry_pypi_verification(package_name, current_version)
3080
2963
 
3081
- def _handle_test_success(self, options: t.Any) -> None:
2964
+ def _retry_pypi_verification(self, package_name: str, current_version: str) -> None:
2965
+ import time
2966
+
2967
+ max_attempts = 3
2968
+ for attempt in range(max_attempts):
2969
+ try:
2970
+ if self._check_pypi_package_exists(package_name, current_version):
2971
+ self._show_pypi_success(package_name, current_version)
2972
+ return
2973
+ if attempt < max_attempts - 1:
2974
+ self._show_pypi_retry_message(attempt, max_attempts)
2975
+ time.sleep(5)
2976
+ continue
2977
+ else:
2978
+ self._show_pypi_not_visible(package_name, current_version)
2979
+ return
2980
+ except Exception as e:
2981
+ if attempt < max_attempts - 1:
2982
+ self._show_pypi_error_retry(attempt, max_attempts, e)
2983
+ time.sleep(5)
2984
+ continue
2985
+ else:
2986
+ self._show_pypi_final_error(package_name, current_version, e)
2987
+ return
2988
+
2989
+ def _check_pypi_package_exists(
2990
+ self, package_name: str, current_version: str
2991
+ ) -> bool:
2992
+ import json
2993
+ import urllib.error
2994
+ import urllib.request
2995
+
2996
+ url = f"https://pypi.org/pypi/{package_name}/{current_version}/json"
2997
+ try:
2998
+ with urllib.request.urlopen(url, timeout=10) as response: # nosec B310
2999
+ data = json.loads(response.read().decode())
3000
+ return data.get("info", {}).get("version") == current_version
3001
+ except urllib.error.HTTPError as e:
3002
+ if e.code == 404:
3003
+ return False
3004
+ raise
3005
+
3006
+ def _show_pypi_success(self, package_name: str, current_version: str) -> None:
3082
3007
  self.console.print(
3083
- "\n\n[bold bright_green]✅ Tests passed successfully![/bold bright_green]\n"
3008
+ f"[green]✅ Verified: {package_name} v{current_version} is available on PyPI![/green]"
3084
3009
  )
3085
- self._print_ai_agent_files(options)
3010
+ pypi_url = f"https://pypi.org/project/{package_name}/{current_version}/"
3011
+ self.console.print(f"[dim] 📦 Package URL: {pypi_url}[/dim]")
3086
3012
 
3087
- def _run_tests(self, options: t.Any) -> None:
3088
- if not options.test:
3089
- return
3090
- self.console.print("\n" + "-" * 80)
3013
+ def _show_pypi_retry_message(self, attempt: int, max_attempts: int) -> None:
3091
3014
  self.console.print(
3092
- "[bold bright_green]🧪 TESTING[/bold bright_green] [bold bright_white]Executing test suite[/bold bright_white]"
3015
+ f"[yellow] Package not yet available on PyPI (attempt {attempt + 1}/{max_attempts}), retrying...[/yellow]"
3093
3016
  )
3094
- self.console.print("-" * 80 + "\n")
3095
- test_command = self._prepare_pytest_command(options)
3096
- result = self.execute_command(test_command, capture_output=True, text=True)
3097
- if result.stdout:
3098
- self.console.print(result.stdout)
3099
- if result.returncode > 0:
3100
- self._handle_test_failure(result, options)
3101
- else:
3102
- self._handle_test_success(options)
3103
3017
 
3104
- async def _run_tests_async(self, options: t.Any) -> None:
3105
- if not options.test:
3106
- return
3107
- self.console.print("\n" + "-" * 80)
3018
+ def _show_pypi_not_visible(self, package_name: str, current_version: str) -> None:
3108
3019
  self.console.print(
3109
- "[bold bright_green]🧪 TESTING[/bold bright_green] [bold bright_white]Executing test suite (async optimized)[/bold bright_white]"
3020
+ "[yellow]⚠️ Package uploaded but not yet visible on PyPI (this is normal)[/yellow]"
3110
3021
  )
3111
- self.console.print("-" * 80 + "\n")
3112
- test_command = self._prepare_pytest_command(options)
3113
- result = await self.execute_command_async(test_command)
3114
- if result.stdout:
3115
- self.console.print(result.stdout)
3116
- if result.returncode > 0:
3117
- self._handle_test_failure(result, options)
3022
+ self.console.print(
3023
+ f"[dim] Check later at: https://pypi.org/project/{package_name}/{current_version}/[/dim]"
3024
+ )
3025
+
3026
+ def _show_pypi_error_retry(
3027
+ self, attempt: int, max_attempts: int, error: Exception
3028
+ ) -> None:
3029
+ self.console.print(
3030
+ f"[yellow]⏳ Error checking PyPI (attempt {attempt + 1}/{max_attempts}): {error}[/yellow]"
3031
+ )
3032
+
3033
+ def _show_pypi_final_error(
3034
+ self, package_name: str, current_version: str, error: Exception
3035
+ ) -> None:
3036
+ self.console.print(f"[yellow]⚠️ Could not verify PyPI upload: {error}[/yellow]")
3037
+ self.console.print(
3038
+ f"[dim] Check manually at: https://pypi.org/project/{package_name}/{current_version}/[/dim]"
3039
+ )
3040
+
3041
+ def _get_package_name(self) -> str:
3042
+ import tomllib
3043
+ from pathlib import Path
3044
+
3045
+ pyproject_path = Path("pyproject.toml")
3046
+ if pyproject_path.exists():
3047
+ with pyproject_path.open("rb") as f:
3048
+ data = tomllib.load(f)
3049
+ return data.get("project", {}).get("name", "unknown")
3050
+ return "unknown"
3051
+
3052
+ def _handle_publish_failure(
3053
+ self, result: "subprocess.CompletedProcess[str]", attempt: int, max_retries: int
3054
+ ) -> bool:
3055
+ if self._is_auth_error(result):
3056
+ return self._handle_auth_error(attempt, max_retries)
3118
3057
  else:
3119
- self._handle_test_success(options)
3058
+ self._handle_non_auth_error(result)
3059
+ return False
3120
3060
 
3121
- def _bump_version(self, options: OptionsProtocol) -> None:
3122
- for option in (options.publish, options.bump):
3123
- if option:
3124
- version_type = str(option)
3125
- if self._has_version_been_bumped(version_type):
3126
- self.console.print("\n" + "-" * 80)
3061
+ def _handle_auth_error(self, attempt: int, max_retries: int) -> bool:
3062
+ if attempt < max_retries - 1:
3063
+ self.console.print(
3064
+ f"[yellow]⚠️ Authentication failed (attempt {attempt + 1}/{max_retries})[/yellow]"
3065
+ )
3066
+ return self._prompt_for_token()
3067
+ self._display_authentication_help()
3068
+ return False
3069
+
3070
+ def _handle_non_auth_error(
3071
+ self, result: "subprocess.CompletedProcess[str]"
3072
+ ) -> None:
3073
+ self.console.print(result.stdout)
3074
+ self.console.print(result.stderr)
3075
+
3076
+ def _is_auth_error(self, result: "subprocess.CompletedProcess[str]") -> bool:
3077
+ error_text = (result.stdout + result.stderr).lower()
3078
+ auth_indicators = (
3079
+ "authentication",
3080
+ "unauthorized",
3081
+ "403",
3082
+ "401",
3083
+ "invalid credentials",
3084
+ "token",
3085
+ "password",
3086
+ "username",
3087
+ )
3088
+ return any(indicator in error_text for indicator in auth_indicators)
3089
+
3090
+ def _prompt_for_token(self) -> bool:
3091
+ import getpass
3092
+ import os
3093
+ import shutil
3094
+
3095
+ if self.options and getattr(self.options, "ai_agent", False):
3096
+ return False
3097
+ self.console.print("\n[bold yellow]🔐 PyPI Token Required[/bold yellow]")
3098
+ self.console.print(
3099
+ "[dim]Please enter your PyPI token (starts with 'pypi-'):[/dim]"
3100
+ )
3101
+ try:
3102
+ token = getpass.getpass("PyPI Token: ")
3103
+ if not token or not token.startswith("pypi-"):
3104
+ self.console.print(
3105
+ "[red]❌ Invalid token format. Token must start with 'pypi-'[/red]"
3106
+ )
3107
+ return False
3108
+ if shutil.which("keyring"):
3109
+ try:
3110
+ result = self.execute_command(
3111
+ [
3112
+ "keyring",
3113
+ "set",
3114
+ "https://upload.pypi.org/legacy/",
3115
+ "__token__",
3116
+ ],
3117
+ input=token,
3118
+ capture_output=True,
3119
+ text=True,
3120
+ )
3121
+ if result.returncode == 0:
3122
+ self.console.print("[green]✅ Token stored in keyring[/green]")
3123
+ else:
3124
+ os.environ["UV_PUBLISH_TOKEN"] = token
3125
+ self.console.print(
3126
+ "[yellow]⚠️ Keyring storage failed, using environment variable[/yellow]"
3127
+ )
3128
+ except Exception:
3129
+ os.environ["UV_PUBLISH_TOKEN"] = token
3127
3130
  self.console.print(
3128
- f"[bold yellow]📦 VERSION[/bold yellow] [bold bright_white]Version already bumped ({version_type}), skipping to avoid duplicate bump[/bold bright_white]"
3131
+ "[yellow]⚠️ Keyring storage failed, using environment variable[/yellow]"
3129
3132
  )
3130
- self.console.print("-" * 80 + "\n")
3131
- return
3132
- self.console.print("\n" + "-" * 80)
3133
+ else:
3134
+ os.environ["UV_PUBLISH_TOKEN"] = token
3133
3135
  self.console.print(
3134
- f"[bold bright_magenta]📦 VERSION[/bold bright_magenta] [bold bright_white]Bumping {option} version[/bold bright_white]"
3136
+ "[yellow]⚠️ Keyring not available, using environment variable[/yellow]"
3135
3137
  )
3136
- self.console.print("-" * 80 + "\n")
3137
- if version_type in ("minor", "major"):
3138
- from rich.prompt import Confirm
3139
3138
 
3140
- if not Confirm.ask(
3141
- f"Are you sure you want to bump the {option} version?",
3142
- default=False,
3143
- ):
3144
- self.console.print(
3145
- f"[bold yellow]⏭️ Skipping {option} version bump[/bold yellow]"
3146
- )
3147
- return
3148
- self.execute_command(["uv", "version", "--bump", option])
3149
- self._mark_version_bumped(version_type)
3150
- break
3139
+ return True
3140
+ except KeyboardInterrupt:
3141
+ self.console.print("\n[yellow]⚠️ Token entry cancelled[/yellow]")
3142
+ return False
3143
+ except Exception as e:
3144
+ self.console.print(f"[red] Error storing token: {e}[/red]")
3145
+ return False
3146
+
3147
+ def _display_authentication_help(self) -> None:
3148
+ self.console.print(
3149
+ "\n[bold bright_red]❌ Publish failed. Run crackerjack again to retry publishing without re-bumping version.[/bold bright_red]"
3150
+ )
3151
+ if not (self.options and getattr(self.options, "ai_agent", False)):
3152
+ self.console.print("\n[bold yellow]🔐 Authentication Help:[/bold yellow]")
3153
+ self.console.print(" [dim]To fix authentication issues, you can:[/dim]")
3154
+ self.console.print(
3155
+ " [dim]1. Set PyPI token: export UV_PUBLISH_TOKEN=pypi-your-token-here[/dim]"
3156
+ )
3157
+ self.console.print(
3158
+ " [dim]2. Install keyring: uv tool install keyring[/dim]"
3159
+ )
3160
+ self.console.print(
3161
+ " [dim]3. Store token in keyring: keyring set https://upload.pypi.org/legacy/ __token__[/dim]"
3162
+ )
3163
+ self.console.print(
3164
+ " [dim]4. Ensure keyring-provider is set in pyproject.toml:[/dim]"
3165
+ )
3166
+ self.console.print(" [dim] [tool.uv][/dim]")
3167
+ self.console.print(' [dim] keyring-provider = "subprocess"[/dim]')
3151
3168
 
3152
3169
  def _publish_project(self, options: OptionsProtocol) -> None:
3153
3170
  if options.publish:
@@ -3156,28 +3173,187 @@ class Crackerjack(BaseModel, arbitrary_types_allowed=True):
3156
3173
  "[bold bright_cyan]🚀 PUBLISH[/bold bright_cyan] [bold bright_white]Building and publishing package[/bold bright_white]"
3157
3174
  )
3158
3175
  self.console.print("-" * 80 + "\n")
3159
- build = self.execute_command(
3160
- ["uv", "build"], capture_output=True, text=True
3161
- )
3162
- self.console.print(build.stdout)
3163
- if build.returncode > 0:
3164
- self.console.print(build.stderr)
3165
- self.console.print(
3166
- "[bold bright_red]❌ Build failed. Please fix errors.[/bold bright_red]"
3167
- )
3168
- raise SystemExit(1)
3169
- try:
3170
- self.execute_command(["uv", "publish"])
3171
- self._mark_publish_completed()
3172
- self._clear_state()
3173
- self.console.print(
3174
- "\n[bold bright_green]✅ Package published successfully![/bold bright_green]"
3176
+ if not options.skip_version_check:
3177
+ if not self._verify_version_consistency():
3178
+ self.console.print(
3179
+ "[bold red]❌ Publishing aborted due to version mismatch. Please ensure pyproject.toml version matches git tag.[/bold red]"
3180
+ )
3181
+ raise SystemExit(1)
3182
+ state = self._get_state()
3183
+ if not state.get("build_completed", False):
3184
+ build = self.execute_command(
3185
+ ["uv", "build"], capture_output=True, text=True
3175
3186
  )
3176
- except SystemExit:
3187
+ self.console.print(build.stdout)
3188
+ if build.returncode > 0:
3189
+ self.console.print(build.stderr)
3190
+ self.console.print(
3191
+ "[bold bright_red]❌ Build failed. Please fix errors.[/bold bright_red]"
3192
+ )
3193
+ raise SystemExit(1)
3194
+ state["build_completed"] = True
3195
+ self._save_state(state)
3196
+ else:
3177
3197
  self.console.print(
3178
- "\n[bold bright_red] Publish failed. Run crackerjack again to retry publishing without re-bumping version.[/bold bright_red]"
3198
+ "[dim]📦 Using existing build artifacts (retry mode)[/dim]"
3179
3199
  )
3180
- raise
3200
+ self._publish_with_retry()
3201
+ self._mark_publish_completed()
3202
+ self._clear_state()
3203
+ self.console.print(
3204
+ "\n[bold bright_green]🏆 Package published successfully![/bold bright_green]"
3205
+ )
3206
+
3207
+ def _analyze_git_changes(self) -> dict[str, t.Any]:
3208
+ diff_result = self._get_git_diff_output()
3209
+ changes = self._parse_git_diff_output(diff_result)
3210
+ changes["stats"] = self._get_git_stats()
3211
+ return changes
3212
+
3213
+ def _get_git_diff_output(self) -> t.Any:
3214
+ diff_cmd = ["git", "diff", "--cached", "--name-status"]
3215
+ diff_result = self.execute_command(diff_cmd, capture_output=True, text=True)
3216
+ if not diff_result.stdout and diff_result.returncode == 0:
3217
+ diff_cmd = ["git", "diff", "--name-status"]
3218
+ diff_result = self.execute_command(diff_cmd, capture_output=True, text=True)
3219
+ return diff_result
3220
+
3221
+ def _parse_git_diff_output(self, diff_result: t.Any) -> dict[str, t.Any]:
3222
+ changes = {
3223
+ "added": [],
3224
+ "modified": [],
3225
+ "deleted": [],
3226
+ "renamed": [],
3227
+ "total_changes": 0,
3228
+ }
3229
+ if diff_result.returncode == 0 and diff_result.stdout:
3230
+ self._process_diff_lines(diff_result.stdout, changes)
3231
+ return changes
3232
+
3233
+ def _process_diff_lines(self, stdout: str, changes: dict[str, t.Any]) -> None:
3234
+ for line in stdout.strip().split("\n"):
3235
+ if not line:
3236
+ continue
3237
+ self._process_single_diff_line(line, changes)
3238
+
3239
+ def _process_single_diff_line(self, line: str, changes: dict[str, t.Any]) -> None:
3240
+ parts = line.split("\t")
3241
+ if len(parts) >= 2:
3242
+ status, filename = parts[0], parts[1]
3243
+ self._categorize_file_change(status, filename, parts, changes)
3244
+ changes["total_changes"] += 1
3245
+
3246
+ def _categorize_file_change(
3247
+ self, status: str, filename: str, parts: list[str], changes: dict[str, t.Any]
3248
+ ) -> None:
3249
+ if status == "A":
3250
+ changes["added"].append(filename)
3251
+ elif status == "M":
3252
+ changes["modified"].append(filename)
3253
+ elif status == "D":
3254
+ changes["deleted"].append(filename)
3255
+ elif status.startswith("R"):
3256
+ if len(parts) >= 3:
3257
+ changes["renamed"].append((parts[1], parts[2]))
3258
+ else:
3259
+ changes["renamed"].append((filename, "unknown"))
3260
+
3261
+ def _get_git_stats(self) -> str:
3262
+ stat_cmd = ["git", "diff", "--cached", "--stat"]
3263
+ stat_result = self.execute_command(stat_cmd, capture_output=True, text=True)
3264
+ if not stat_result.stdout and stat_result.returncode == 0:
3265
+ stat_cmd = ["git", "diff", "--stat"]
3266
+ stat_result = self.execute_command(stat_cmd, capture_output=True, text=True)
3267
+ return stat_result.stdout if stat_result.returncode == 0 else ""
3268
+
3269
+ def _categorize_changes(self, changes: dict[str, t.Any]) -> dict[str, list[str]]:
3270
+ categories = {
3271
+ "docs": [],
3272
+ "tests": [],
3273
+ "config": [],
3274
+ "core": [],
3275
+ "ci": [],
3276
+ "deps": [],
3277
+ }
3278
+ file_patterns = {
3279
+ "docs": ["README.md", "CLAUDE.md", "RULES.md", "docs/", ".md"],
3280
+ "tests": ["test_", "_test.py", "tests/", "conftest.py"],
3281
+ "config": ["pyproject.toml", ".yaml", ".yml", ".json", ".gitignore"],
3282
+ "ci": [".github/", "ci/", ".pre-commit"],
3283
+ "deps": ["requirements", "pyproject.toml", "uv.lock"],
3284
+ }
3285
+ for file_list in ("added", "modified", "deleted"):
3286
+ for filename in changes.get(file_list, []):
3287
+ categorized = False
3288
+ for category, patterns in file_patterns.items():
3289
+ if any(pattern in filename for pattern in patterns):
3290
+ categories[category].append(filename)
3291
+ categorized = True
3292
+ break
3293
+ if not categorized:
3294
+ categories["core"].append(filename)
3295
+
3296
+ return categories
3297
+
3298
+ def _get_primary_changes(self, categories: dict[str, list[str]]) -> list[str]:
3299
+ primary_changes = []
3300
+ category_mapping = [
3301
+ ("core", "core functionality"),
3302
+ ("tests", "tests"),
3303
+ ("docs", "documentation"),
3304
+ ("config", "configuration"),
3305
+ ("deps", "dependencies"),
3306
+ ]
3307
+ for key, label in category_mapping:
3308
+ if categories[key]:
3309
+ primary_changes.append(label)
3310
+
3311
+ return primary_changes or ["project files"]
3312
+
3313
+ def _determine_primary_action(self, changes: dict[str, t.Any]) -> str:
3314
+ added_count = len(changes["added"])
3315
+ modified_count = len(changes["modified"])
3316
+ deleted_count = len(changes["deleted"])
3317
+ if added_count > modified_count + deleted_count:
3318
+ return "Add"
3319
+ elif deleted_count > modified_count + added_count:
3320
+ return "Remove"
3321
+ elif changes["renamed"]:
3322
+ return "Refactor"
3323
+ return "Update"
3324
+
3325
+ def _generate_body_lines(self, changes: dict[str, t.Any]) -> list[str]:
3326
+ body_lines = []
3327
+ change_types = [
3328
+ ("added", "Added"),
3329
+ ("modified", "Modified"),
3330
+ ("deleted", "Deleted"),
3331
+ ("renamed", "Renamed"),
3332
+ ]
3333
+ for change_type, label in change_types:
3334
+ items = changes.get(change_type, [])
3335
+ if items:
3336
+ count = len(items)
3337
+ body_lines.append(f"- {label} {count} file(s)")
3338
+ if change_type not in ("deleted", "renamed"):
3339
+ for file in items[:3]:
3340
+ body_lines.append(f" * {file}")
3341
+ if count > 3:
3342
+ body_lines.append(f" * ... and {count - 3} more")
3343
+
3344
+ return body_lines
3345
+
3346
+ def _generate_commit_message(self, changes: dict[str, t.Any]) -> str:
3347
+ if changes["total_changes"] == 0:
3348
+ return "Update project files"
3349
+ categories = self._categorize_changes(changes)
3350
+ primary_changes = self._get_primary_changes(categories)
3351
+ primary_action = self._determine_primary_action(changes)
3352
+ commit_subject = f"{primary_action} {' and '.join(primary_changes[:2])}"
3353
+ body_lines = self._generate_body_lines(changes)
3354
+ if body_lines:
3355
+ return f"{commit_subject}\n\n" + "\n".join(body_lines)
3356
+ return commit_subject
3181
3357
 
3182
3358
  def _commit_and_push(self, options: OptionsProtocol) -> None:
3183
3359
  if options.commit:
@@ -3186,23 +3362,73 @@ class Crackerjack(BaseModel, arbitrary_types_allowed=True):
3186
3362
  "[bold bright_white]📝 COMMIT[/bold bright_white] [bold bright_white]Saving changes to git[/bold bright_white]"
3187
3363
  )
3188
3364
  self.console.print("-" * 80 + "\n")
3189
- commit_msg = input("\nCommit message: ")
3365
+ if not options.skip_version_check:
3366
+ if not self._verify_version_consistency():
3367
+ self.console.print(
3368
+ "[bold red]❌ Commit aborted due to version mismatch. Please ensure pyproject.toml version matches git tag.[/bold red]"
3369
+ )
3370
+ raise SystemExit(1)
3371
+ changes = self._analyze_git_changes()
3372
+ if changes["total_changes"] > 0:
3373
+ self.console.print("[dim]🔍 Analyzing changes...[/dim]\n")
3374
+ if changes["stats"]:
3375
+ self.console.print(changes["stats"])
3376
+ suggested_msg = self._generate_commit_message(changes)
3377
+ self.console.print(
3378
+ "\n[bold cyan]📋 Suggested commit message:[/bold cyan]"
3379
+ )
3380
+ self.console.print(f"[cyan]{suggested_msg}[/cyan]\n")
3381
+ user_choice = (
3382
+ input("Use suggested message? [Y/n/e to edit]: ").strip().lower()
3383
+ )
3384
+ if user_choice in ("", "y"):
3385
+ commit_msg = suggested_msg
3386
+ elif user_choice == "e":
3387
+ import os
3388
+ import tempfile
3389
+
3390
+ with tempfile.NamedTemporaryFile(
3391
+ mode="w", suffix=".txt", delete=False
3392
+ ) as f:
3393
+ f.write(suggested_msg)
3394
+ temp_path = f.name
3395
+ editor = os.environ.get("EDITOR", "vi")
3396
+ self.execute_command([editor, temp_path])
3397
+ with open(temp_path) as f:
3398
+ commit_msg = f.read().strip()
3399
+ Path(temp_path).unlink()
3400
+ else:
3401
+ commit_msg = input("\nEnter custom commit message: ")
3402
+ else:
3403
+ commit_msg = input("\nCommit message: ")
3190
3404
  self.execute_command(
3191
3405
  ["git", "commit", "-m", commit_msg, "--no-verify", "--", "."]
3192
3406
  )
3193
3407
  self.execute_command(["git", "push", "origin", "main", "--no-verify"])
3408
+ self._push_git_tags()
3194
3409
 
3195
3410
  def _update_precommit(self, options: OptionsProtocol) -> None:
3196
3411
  if options.update_precommit:
3412
+ self.console.print(
3413
+ "\n[bold yellow]⚠️ DEPRECATION WARNING[/bold yellow]: The --update-precommit (-u) flag is deprecated.\n"
3414
+ " Pre-commit hooks are now updated automatically on a weekly basis.\n"
3415
+ " This manual update will still work but is no longer needed.\n"
3416
+ )
3197
3417
  self.console.print("\n" + "-" * 80)
3198
3418
  self.console.print(
3199
3419
  "[bold bright_blue]🔄 UPDATE[/bold bright_blue] [bold bright_white]Updating pre-commit hooks[/bold bright_white]"
3200
3420
  )
3201
3421
  self.console.print("-" * 80 + "\n")
3202
3422
  if self.pkg_path.stem == "crackerjack":
3203
- update_cmd = ["uv", "run", "pre-commit", "autoupdate"]
3204
- if getattr(options, "ai_agent", False):
3205
- update_cmd.extend(["-c", ".pre-commit-config-ai.yaml"])
3423
+ config_path = self.project_manager._select_precommit_config()
3424
+ update_cmd = [
3425
+ "uv",
3426
+ "run",
3427
+ "pre-commit",
3428
+ "autoupdate",
3429
+ "-c",
3430
+ config_path,
3431
+ ]
3206
3432
  self.execute_command(update_cmd)
3207
3433
  else:
3208
3434
  self.project_manager.update_precommit_hooks()
@@ -3215,7 +3441,8 @@ class Crackerjack(BaseModel, arbitrary_types_allowed=True):
3215
3441
  )
3216
3442
  self.console.print("-" * 80 + "\n")
3217
3443
  self.config_manager.copy_documentation_templates(
3218
- force_update=options.force_update_docs
3444
+ force_update=options.force_update_docs,
3445
+ compress_docs=options.compress_docs,
3219
3446
  )
3220
3447
 
3221
3448
  def execute_command(
@@ -3268,6 +3495,7 @@ class Crackerjack(BaseModel, arbitrary_types_allowed=True):
3268
3495
  "[bold bright_magenta]🔍 COMPREHENSIVE QUALITY[/bold bright_magenta] [bold bright_white]Running all quality checks before publish/commit[/bold bright_white]"
3269
3496
  )
3270
3497
  self.console.print("-" * 80 + "\n")
3498
+ config_path = self.project_manager._select_precommit_config()
3271
3499
  cmd = [
3272
3500
  "uv",
3273
3501
  "run",
@@ -3276,7 +3504,7 @@ class Crackerjack(BaseModel, arbitrary_types_allowed=True):
3276
3504
  "--all-files",
3277
3505
  "--hook-stage=manual",
3278
3506
  "-c",
3279
- ".pre-commit-config.yaml",
3507
+ config_path,
3280
3508
  ]
3281
3509
  result = self.execute_command(cmd)
3282
3510
  if result.returncode > 0:
@@ -3289,7 +3517,7 @@ class Crackerjack(BaseModel, arbitrary_types_allowed=True):
3289
3517
  raise SystemExit(1)
3290
3518
  else:
3291
3519
  self.console.print(
3292
- "\n[bold bright_green] All comprehensive quality checks passed![/bold bright_green]"
3520
+ "\n[bold bright_green]🏆 All comprehensive quality checks passed![/bold bright_green]"
3293
3521
  )
3294
3522
 
3295
3523
  async def _run_comprehensive_quality_checks_async(
@@ -3314,6 +3542,7 @@ class Crackerjack(BaseModel, arbitrary_types_allowed=True):
3314
3542
  )
3315
3543
  self.console.print("-" * 80 + "\n")
3316
3544
 
3545
+ config_path = self.project_manager._select_precommit_config()
3317
3546
  cmd = [
3318
3547
  "uv",
3319
3548
  "run",
@@ -3322,7 +3551,7 @@ class Crackerjack(BaseModel, arbitrary_types_allowed=True):
3322
3551
  "--all-files",
3323
3552
  "--hook-stage=manual",
3324
3553
  "-c",
3325
- ".pre-commit-config.yaml",
3554
+ config_path,
3326
3555
  ]
3327
3556
 
3328
3557
  result = await self.execute_command_async(cmd)
@@ -3339,7 +3568,7 @@ class Crackerjack(BaseModel, arbitrary_types_allowed=True):
3339
3568
  raise SystemExit(1)
3340
3569
  else:
3341
3570
  self.console.print(
3342
- "[bold bright_green] All comprehensive quality checks passed![/bold bright_green]"
3571
+ "[bold bright_green]🏆 All comprehensive quality checks passed![/bold bright_green]"
3343
3572
  )
3344
3573
 
3345
3574
  def _run_tracked_task(
@@ -3358,7 +3587,9 @@ class Crackerjack(BaseModel, arbitrary_types_allowed=True):
3358
3587
 
3359
3588
  def _run_pre_commit_task(self, options: OptionsProtocol) -> None:
3360
3589
  if not options.skip_hooks:
3361
- if getattr(options, "ai_agent", False):
3590
+ if getattr(options, "comprehensive", False):
3591
+ self._run_comprehensive_hooks()
3592
+ elif getattr(options, "ai_agent", False):
3362
3593
  self.project_manager.run_pre_commit_with_analysis()
3363
3594
  else:
3364
3595
  self.project_manager.run_pre_commit()
@@ -3369,6 +3600,33 @@ class Crackerjack(BaseModel, arbitrary_types_allowed=True):
3369
3600
  if self.session_tracker:
3370
3601
  self.session_tracker.skip_task("pre_commit", "Skipped by user request")
3371
3602
 
3603
+ def _run_comprehensive_hooks(self) -> None:
3604
+ self.console.print("\n" + "-" * 80)
3605
+ self.console.print(
3606
+ "[bold bright_cyan]🔍 HOOKS[/bold bright_cyan] [bold bright_white]Running comprehensive quality checks[/bold bright_white]"
3607
+ )
3608
+ self.console.print("-" * 80 + "\n")
3609
+ stages = ["pre-commit", "pre-push", "manual"]
3610
+ config_path = self.project_manager._select_precommit_config()
3611
+ for stage in stages:
3612
+ self.console.print(f"[dim]Running {stage} stage hooks...[/dim]")
3613
+ cmd = [
3614
+ "uv",
3615
+ "run",
3616
+ "pre-commit",
3617
+ "run",
3618
+ "--all-files",
3619
+ f"--hook-stage={stage}",
3620
+ "-c",
3621
+ config_path,
3622
+ ]
3623
+ result = self.execute_command(cmd)
3624
+ if result.returncode > 0:
3625
+ self.console.print(
3626
+ f"\n[bold red]❌ {stage} hooks failed. Please fix errors.[/bold red]\n"
3627
+ )
3628
+ raise SystemExit(1)
3629
+
3372
3630
  def _initialize_session_tracking(self, options: OptionsProtocol) -> None:
3373
3631
  if options.resume_from:
3374
3632
  try:
@@ -3429,6 +3687,8 @@ class Crackerjack(BaseModel, arbitrary_types_allowed=True):
3429
3687
  options.test = True
3430
3688
  options.publish = options.all
3431
3689
  options.commit = True
3690
+ if options.comprehensive:
3691
+ options.test = True
3432
3692
  self._run_tracked_task(
3433
3693
  "setup", "Initialize project structure", self._setup_package
3434
3694
  )
@@ -3450,7 +3710,8 @@ class Crackerjack(BaseModel, arbitrary_types_allowed=True):
3450
3710
  self._run_tracked_task(
3451
3711
  "clean_project", "Clean project code", lambda: self._clean_project(options)
3452
3712
  )
3453
- self.project_manager.options = options
3713
+ if self.project_manager is not None:
3714
+ self.project_manager.options = options
3454
3715
  if not options.skip_hooks:
3455
3716
  self._run_tracked_task(
3456
3717
  "pre_commit",
@@ -3480,7 +3741,7 @@ class Crackerjack(BaseModel, arbitrary_types_allowed=True):
3480
3741
  )
3481
3742
  self.console.print("\n" + "-" * 80)
3482
3743
  self.console.print(
3483
- "[bold bright_green] CRACKERJACK COMPLETE[/bold bright_green] [bold bright_white]Workflow completed successfully![/bold bright_white]"
3744
+ "[bold bright_green]🏆 CRACKERJACK COMPLETE[/bold bright_green] [bold bright_white]Workflow completed successfully![/bold bright_white]"
3484
3745
  )
3485
3746
  self.console.print("-" * 80 + "\n")
3486
3747
 
@@ -3496,11 +3757,14 @@ class Crackerjack(BaseModel, arbitrary_types_allowed=True):
3496
3757
  options.test = True
3497
3758
  options.publish = options.all
3498
3759
  options.commit = True
3760
+ if options.comprehensive:
3761
+ options.test = True
3499
3762
  self._setup_package()
3500
3763
  self._update_project(options)
3501
3764
  self._update_precommit(options)
3502
3765
  await self._clean_project_async(options)
3503
- self.project_manager.options = options
3766
+ if self.project_manager is not None:
3767
+ self.project_manager.options = options
3504
3768
  if not options.skip_hooks:
3505
3769
  if getattr(options, "ai_agent", False):
3506
3770
  await self.project_manager.run_pre_commit_with_analysis_async()
@@ -3517,7 +3781,7 @@ class Crackerjack(BaseModel, arbitrary_types_allowed=True):
3517
3781
  self._publish_project(options)
3518
3782
  self.console.print("\n" + "-" * 80)
3519
3783
  self.console.print(
3520
- "[bold bright_green] CRACKERJACK COMPLETE[/bold bright_green] [bold bright_white]Workflow completed successfully![/bold bright_white]"
3784
+ "[bold bright_green]🏆 CRACKERJACK COMPLETE[/bold bright_green] [bold bright_white]Workflow completed successfully![/bold bright_white]"
3521
3785
  )
3522
3786
  self.console.print("-" * 80 + "\n")
3523
3787