thailint 0.8.0__py3-none-any.whl → 0.10.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. src/cli.py +242 -0
  2. src/config.py +2 -3
  3. src/core/base.py +4 -0
  4. src/core/rule_discovery.py +143 -84
  5. src/core/violation_builder.py +75 -15
  6. src/linter_config/loader.py +43 -11
  7. src/linters/collection_pipeline/__init__.py +90 -0
  8. src/linters/collection_pipeline/config.py +63 -0
  9. src/linters/collection_pipeline/continue_analyzer.py +100 -0
  10. src/linters/collection_pipeline/detector.py +130 -0
  11. src/linters/collection_pipeline/linter.py +437 -0
  12. src/linters/collection_pipeline/suggestion_builder.py +63 -0
  13. src/linters/dry/block_filter.py +6 -8
  14. src/linters/dry/block_grouper.py +4 -0
  15. src/linters/dry/cache_query.py +4 -0
  16. src/linters/dry/python_analyzer.py +34 -18
  17. src/linters/dry/token_hasher.py +5 -1
  18. src/linters/dry/typescript_analyzer.py +61 -31
  19. src/linters/dry/violation_builder.py +4 -0
  20. src/linters/dry/violation_filter.py +4 -0
  21. src/linters/file_header/bash_parser.py +4 -0
  22. src/linters/file_header/linter.py +7 -11
  23. src/linters/file_placement/directory_matcher.py +4 -0
  24. src/linters/file_placement/linter.py +28 -8
  25. src/linters/file_placement/pattern_matcher.py +4 -0
  26. src/linters/file_placement/pattern_validator.py +4 -0
  27. src/linters/magic_numbers/context_analyzer.py +4 -0
  28. src/linters/magic_numbers/typescript_analyzer.py +4 -0
  29. src/linters/nesting/python_analyzer.py +4 -0
  30. src/linters/nesting/typescript_function_extractor.py +4 -0
  31. src/linters/print_statements/typescript_analyzer.py +4 -0
  32. src/linters/srp/class_analyzer.py +4 -0
  33. src/linters/srp/heuristics.py +4 -3
  34. src/linters/srp/linter.py +2 -3
  35. src/linters/srp/python_analyzer.py +55 -20
  36. src/linters/srp/typescript_metrics_calculator.py +83 -47
  37. src/linters/srp/violation_builder.py +4 -0
  38. src/linters/stateless_class/__init__.py +25 -0
  39. src/linters/stateless_class/config.py +58 -0
  40. src/linters/stateless_class/linter.py +355 -0
  41. src/linters/stateless_class/python_analyzer.py +299 -0
  42. {thailint-0.8.0.dist-info → thailint-0.10.0.dist-info}/METADATA +226 -3
  43. {thailint-0.8.0.dist-info → thailint-0.10.0.dist-info}/RECORD +46 -36
  44. {thailint-0.8.0.dist-info → thailint-0.10.0.dist-info}/WHEEL +0 -0
  45. {thailint-0.8.0.dist-info → thailint-0.10.0.dist-info}/entry_points.txt +0 -0
  46. {thailint-0.8.0.dist-info → thailint-0.10.0.dist-info}/licenses/LICENSE +0 -0
@@ -16,10 +16,10 @@ Overview: Loads linter configuration from .thailint.yaml or .thailint.json files
16
16
  Dependencies: PyYAML for YAML parsing with safe_load(), json (stdlib) for JSON parsing,
17
17
  pathlib for file path handling
18
18
 
19
- Exports: LinterConfigLoader class
19
+ Exports: load_config function, get_defaults function, LinterConfigLoader class (compat)
20
20
 
21
- Interfaces: load(config_path: Path) -> dict[str, Any] for loading config files,
22
- defaults property -> dict[str, Any] for default configuration structure
21
+ Interfaces: load_config(config_path: Path) -> dict[str, Any] for loading config files,
22
+ get_defaults() -> dict[str, Any] for default configuration structure
23
23
 
24
24
  Implementation: Extension-based format detection (.yaml/.yml vs .json), yaml.safe_load()
25
25
  for security, empty dict handling for null YAML, ValueError for unsupported formats
@@ -31,13 +31,51 @@ from typing import Any
31
31
  from src.core.config_parser import parse_config_file
32
32
 
33
33
 
34
+ def get_defaults() -> dict[str, Any]:
35
+ """Get default configuration.
36
+
37
+ Returns:
38
+ Default configuration with empty rules and ignore lists.
39
+ """
40
+ return {
41
+ "rules": {},
42
+ "ignore": [],
43
+ }
44
+
45
+
46
+ def load_config(config_path: Path) -> dict[str, Any]:
47
+ """Load configuration from file.
48
+
49
+ Args:
50
+ config_path: Path to YAML or JSON config file.
51
+
52
+ Returns:
53
+ Configuration dictionary.
54
+
55
+ Raises:
56
+ ConfigParseError: If file format is unsupported or parsing fails.
57
+ """
58
+ if not config_path.exists():
59
+ return get_defaults()
60
+
61
+ return parse_config_file(config_path)
62
+
63
+
64
+ # Legacy class wrapper for backward compatibility
34
65
  class LinterConfigLoader:
35
66
  """Load linter configuration from YAML or JSON files.
36
67
 
37
68
  Supports loading from .thailint.yaml, .thailint.json, or custom paths.
38
69
  Provides sensible defaults when config files don't exist.
70
+
71
+ Note: This class is a thin wrapper around module-level functions
72
+ for backward compatibility.
39
73
  """
40
74
 
75
+ def __init__(self) -> None:
76
+ """Initialize the loader."""
77
+ pass # No state needed
78
+
41
79
  def load(self, config_path: Path) -> dict[str, Any]:
42
80
  """Load configuration from file.
43
81
 
@@ -50,10 +88,7 @@ class LinterConfigLoader:
50
88
  Raises:
51
89
  ConfigParseError: If file format is unsupported or parsing fails.
52
90
  """
53
- if not config_path.exists():
54
- return self.defaults
55
-
56
- return parse_config_file(config_path)
91
+ return load_config(config_path)
57
92
 
58
93
  @property
59
94
  def defaults(self) -> dict[str, Any]:
@@ -62,7 +97,4 @@ class LinterConfigLoader:
62
97
  Returns:
63
98
  Default configuration with empty rules and ignore lists.
64
99
  """
65
- return {
66
- "rules": {},
67
- "ignore": [],
68
- }
100
+ return get_defaults()
@@ -0,0 +1,90 @@
1
+ """
2
+ Purpose: Collection pipeline linter package initialization
3
+
4
+ Scope: Exports for collection-pipeline linter module
5
+
6
+ Overview: Initializes the collection-pipeline linter package and exposes the main rule class
7
+ for external use. Exports CollectionPipelineRule as the primary interface for the linter,
8
+ allowing the orchestrator to discover and instantiate the rule. Also exports configuration
9
+ and detector classes for advanced use cases. Provides a convenience lint() function for
10
+ direct usage without orchestrator setup. This module serves as the entry point for
11
+ the collection-pipeline linter functionality within the thai-lint framework.
12
+
13
+ Dependencies: CollectionPipelineRule, CollectionPipelineConfig, PipelinePatternDetector
14
+
15
+ Exports: CollectionPipelineRule (primary), CollectionPipelineConfig, PipelinePatternDetector, lint
16
+
17
+ Interfaces: Standard Python package initialization with __all__ for explicit exports
18
+
19
+ Implementation: Simple re-export pattern for package interface, convenience lint function
20
+ """
21
+
22
+ from pathlib import Path
23
+ from typing import Any
24
+
25
+ from .config import DEFAULT_MIN_CONTINUES, CollectionPipelineConfig
26
+ from .detector import PatternMatch, PipelinePatternDetector
27
+ from .linter import CollectionPipelineRule
28
+
29
+ __all__ = [
30
+ "CollectionPipelineRule",
31
+ "CollectionPipelineConfig",
32
+ "PipelinePatternDetector",
33
+ "PatternMatch",
34
+ "lint",
35
+ ]
36
+
37
+
38
+ def lint(
39
+ path: Path | str,
40
+ config: dict[str, Any] | None = None,
41
+ min_continues: int = DEFAULT_MIN_CONTINUES,
42
+ ) -> list:
43
+ """Lint a file or directory for collection pipeline violations.
44
+
45
+ Args:
46
+ path: Path to file or directory to lint
47
+ config: Configuration dict (optional, uses defaults if not provided)
48
+ min_continues: Minimum if/continue patterns to flag (default: 1)
49
+
50
+ Returns:
51
+ List of violations found
52
+
53
+ Example:
54
+ >>> from src.linters.collection_pipeline import lint
55
+ >>> violations = lint('src/my_module.py', min_continues=2)
56
+ >>> for v in violations:
57
+ ... print(f"{v.file_path}:{v.line} - {v.message}")
58
+ """
59
+ path_obj = Path(path) if isinstance(path, str) else path
60
+ project_root = path_obj if path_obj.is_dir() else path_obj.parent
61
+
62
+ orchestrator = _setup_pipeline_orchestrator(project_root, config, min_continues)
63
+ violations = _execute_pipeline_lint(orchestrator, path_obj)
64
+
65
+ return [v for v in violations if "collection-pipeline" in v.rule_id]
66
+
67
+
68
+ def _setup_pipeline_orchestrator(
69
+ project_root: Path, config: dict[str, Any] | None, min_continues: int
70
+ ) -> Any:
71
+ """Set up orchestrator with collection-pipeline config."""
72
+ from src.orchestrator.core import Orchestrator
73
+
74
+ orchestrator = Orchestrator(project_root=project_root)
75
+
76
+ if config:
77
+ orchestrator.config["collection-pipeline"] = config
78
+ else:
79
+ orchestrator.config["collection-pipeline"] = {"min_continues": min_continues}
80
+
81
+ return orchestrator
82
+
83
+
84
+ def _execute_pipeline_lint(orchestrator: Any, path_obj: Path) -> list:
85
+ """Execute linting on file or directory."""
86
+ if path_obj.is_file():
87
+ return orchestrator.lint_file(path_obj)
88
+ if path_obj.is_dir():
89
+ return orchestrator.lint_directory(path_obj)
90
+ return []
@@ -0,0 +1,63 @@
1
+ """
2
+ Purpose: Configuration dataclass for collection-pipeline linter
3
+
4
+ Scope: Define configurable options for embedded filtering pattern detection
5
+
6
+ Overview: Provides CollectionPipelineConfig for customizing linter behavior including
7
+ minimum number of continue patterns to flag, enable/disable toggle, and ignore
8
+ patterns. Integrates with the orchestrator's configuration system to allow users
9
+ to customize collection-pipeline detection via .thailint.yaml configuration files.
10
+ Follows the same configuration pattern as other thai-lint linters.
11
+
12
+ Dependencies: dataclasses, typing
13
+
14
+ Exports: CollectionPipelineConfig dataclass, DEFAULT_MIN_CONTINUES constant
15
+
16
+ Interfaces: CollectionPipelineConfig.from_dict() class method for configuration loading
17
+
18
+ Implementation: Dataclass with sensible defaults and config loading from dictionary
19
+ """
20
+
21
+ from dataclasses import dataclass, field
22
+ from typing import Any
23
+
24
+ # Default threshold for minimum continue guards to flag
25
+ DEFAULT_MIN_CONTINUES = 1
26
+
27
+
28
+ @dataclass
29
+ class CollectionPipelineConfig:
30
+ """Configuration for collection-pipeline linter."""
31
+
32
+ enabled: bool = True
33
+ """Whether the linter is enabled."""
34
+
35
+ min_continues: int = DEFAULT_MIN_CONTINUES
36
+ """Minimum number of if/continue patterns required to flag a violation."""
37
+
38
+ ignore: list[str] = field(default_factory=list)
39
+ """File patterns to ignore."""
40
+
41
+ def __post_init__(self) -> None:
42
+ """Validate configuration values."""
43
+ if self.min_continues < 1:
44
+ raise ValueError(f"min_continues must be at least 1, got {self.min_continues}")
45
+
46
+ @classmethod
47
+ def from_dict(
48
+ cls, config: dict[str, Any], language: str | None = None
49
+ ) -> "CollectionPipelineConfig":
50
+ """Load configuration from dictionary.
51
+
52
+ Args:
53
+ config: Dictionary containing configuration values
54
+ language: Programming language (unused, for interface compatibility)
55
+
56
+ Returns:
57
+ CollectionPipelineConfig instance with values from dictionary
58
+ """
59
+ return cls(
60
+ enabled=config.get("enabled", True),
61
+ min_continues=config.get("min_continues", DEFAULT_MIN_CONTINUES),
62
+ ignore=config.get("ignore", []),
63
+ )
@@ -0,0 +1,100 @@
1
+ """
2
+ Purpose: Analyze continue guard patterns in for loops
3
+
4
+ Scope: Extract and validate if/continue patterns from loop bodies
5
+
6
+ Overview: Provides helper functions for analyzing continue guard patterns in for loop
7
+ bodies. Handles extraction of sequential if/continue statements, validation of
8
+ simple continue-only patterns, and detection of side effects in conditions.
9
+ Separates pattern analysis logic from main detection for better maintainability.
10
+
11
+ Dependencies: ast module for Python AST processing
12
+
13
+ Exports: extract_continue_patterns, is_continue_only, has_side_effects, has_body_after_continues
14
+
15
+ Interfaces: Functions for analyzing continue patterns in AST structures
16
+
17
+ Implementation: AST-based pattern matching for continue guard identification
18
+ """
19
+
20
+ import ast
21
+
22
+
23
+ def extract_continue_patterns(body: list[ast.stmt]) -> list[ast.If]:
24
+ """Extract leading if statements that only contain continue.
25
+
26
+ Args:
27
+ body: List of statements in for loop body
28
+
29
+ Returns:
30
+ List of ast.If nodes that are continue guards
31
+ """
32
+ continues: list[ast.If] = []
33
+ for stmt in body:
34
+ if not isinstance(stmt, ast.If):
35
+ break
36
+ if not is_continue_only(stmt):
37
+ break
38
+ continues.append(stmt)
39
+ return continues
40
+
41
+
42
+ def is_continue_only(if_node: ast.If) -> bool:
43
+ """Check if an if statement only contains continue.
44
+
45
+ Args:
46
+ if_node: AST If node to check
47
+
48
+ Returns:
49
+ True if the if statement is a simple continue guard
50
+ """
51
+ if len(if_node.body) != 1:
52
+ return False
53
+ if not isinstance(if_node.body[0], ast.Continue):
54
+ return False
55
+ if if_node.orelse:
56
+ return False
57
+ return True
58
+
59
+
60
+ def has_side_effects(continues: list[ast.If]) -> bool:
61
+ """Check if any condition has side effects.
62
+
63
+ Args:
64
+ continues: List of continue guard if statements
65
+
66
+ Returns:
67
+ True if any condition has side effects (e.g., walrus operator)
68
+ """
69
+ for if_node in continues:
70
+ if _condition_has_side_effects(if_node.test):
71
+ return True
72
+ return False
73
+
74
+
75
+ def _condition_has_side_effects(node: ast.expr) -> bool:
76
+ """Check if expression has side effects.
77
+
78
+ Args:
79
+ node: AST expression node to check
80
+
81
+ Returns:
82
+ True if expression has side effects
83
+ """
84
+ for child in ast.walk(node):
85
+ if isinstance(child, ast.NamedExpr):
86
+ return True
87
+ return False
88
+
89
+
90
+ def has_body_after_continues(body: list[ast.stmt], num_continues: int) -> bool:
91
+ """Check if there are statements after continue guards.
92
+
93
+ Args:
94
+ body: List of statements in for loop body
95
+ num_continues: Number of continue guards detected
96
+
97
+ Returns:
98
+ True if there are statements after the continue guards
99
+ """
100
+ return len(body) > num_continues
@@ -0,0 +1,130 @@
1
+ """
2
+ Purpose: AST-based detection of collection pipeline anti-patterns
3
+
4
+ Scope: Pattern matching for for loops with embedded filtering via if/continue
5
+
6
+ Overview: Implements the core detection logic for identifying imperative loop patterns
7
+ that use if/continue for filtering instead of collection pipelines. Uses Python's
8
+ AST module to analyze code structure and identify refactoring opportunities. Detects
9
+ patterns like 'for x in iter: if not cond: continue; action(x)' and suggests
10
+ refactoring to generator expressions or filter(). Handles edge cases like walrus
11
+ operators (side effects), else branches, and empty loop bodies.
12
+
13
+ Dependencies: ast module, continue_analyzer, suggestion_builder
14
+
15
+ Exports: PipelinePatternDetector class, PatternMatch dataclass
16
+
17
+ Interfaces: PipelinePatternDetector.detect_patterns() -> list[PatternMatch]
18
+
19
+ Implementation: AST visitor pattern with delegated pattern matching and suggestion generation
20
+ """
21
+
22
+ import ast
23
+ from dataclasses import dataclass
24
+
25
+ from . import continue_analyzer, suggestion_builder
26
+
27
+
28
+ @dataclass
29
+ class PatternMatch:
30
+ """Represents a detected anti-pattern."""
31
+
32
+ line_number: int
33
+ """Line number where the for loop starts (1-indexed)."""
34
+
35
+ loop_var: str
36
+ """Name of the loop variable."""
37
+
38
+ iterable: str
39
+ """Source representation of the iterable."""
40
+
41
+ conditions: list[str]
42
+ """List of filter conditions (inverted from continue guards)."""
43
+
44
+ has_side_effects: bool
45
+ """Whether any condition has side effects."""
46
+
47
+ suggestion: str
48
+ """Refactoring suggestion as a code snippet."""
49
+
50
+
51
+ class PipelinePatternDetector(ast.NodeVisitor):
52
+ """Detects for loops with embedded filtering via if/continue patterns."""
53
+
54
+ def __init__(self, source_code: str) -> None:
55
+ """Initialize detector with source code.
56
+
57
+ Args:
58
+ source_code: Python source code to analyze
59
+ """
60
+ self.source_code = source_code
61
+ self.matches: list[PatternMatch] = []
62
+
63
+ def detect_patterns(self) -> list[PatternMatch]:
64
+ """Analyze source code and return detected patterns.
65
+
66
+ Returns:
67
+ List of PatternMatch objects for each detected anti-pattern
68
+ """
69
+ try:
70
+ tree = ast.parse(self.source_code)
71
+ self.visit(tree)
72
+ except SyntaxError:
73
+ pass # Invalid Python, return empty list
74
+ return self.matches
75
+
76
+ def visit_For(self, node: ast.For) -> None: # pylint: disable=invalid-name
77
+ """Visit for loop and check for filtering patterns.
78
+
79
+ Args:
80
+ node: AST For node to analyze
81
+ """
82
+ match = self._analyze_for_loop(node)
83
+ if match is not None:
84
+ self.matches.append(match)
85
+ self.generic_visit(node)
86
+
87
+ def _analyze_for_loop(self, node: ast.For) -> PatternMatch | None:
88
+ """Analyze a for loop for embedded filtering patterns.
89
+
90
+ Args:
91
+ node: AST For node to analyze
92
+
93
+ Returns:
94
+ PatternMatch if pattern detected, None otherwise
95
+ """
96
+ continues = continue_analyzer.extract_continue_patterns(node.body)
97
+ if not continues:
98
+ return None
99
+
100
+ if continue_analyzer.has_side_effects(continues):
101
+ return None
102
+
103
+ if not continue_analyzer.has_body_after_continues(node.body, len(continues)):
104
+ return None
105
+
106
+ return self._create_match(node, continues)
107
+
108
+ def _create_match(self, for_node: ast.For, continues: list[ast.If]) -> PatternMatch:
109
+ """Create a PatternMatch from detected pattern.
110
+
111
+ Args:
112
+ for_node: AST For node
113
+ continues: List of continue guard if statements
114
+
115
+ Returns:
116
+ PatternMatch object with detection information
117
+ """
118
+ loop_var = suggestion_builder.get_target_name(for_node.target)
119
+ iterable = ast.unparse(for_node.iter)
120
+ conditions = [suggestion_builder.invert_condition(c.test) for c in continues]
121
+ suggestion = suggestion_builder.build_suggestion(loop_var, iterable, conditions)
122
+
123
+ return PatternMatch(
124
+ line_number=for_node.lineno,
125
+ loop_var=loop_var,
126
+ iterable=iterable,
127
+ conditions=conditions,
128
+ has_side_effects=False,
129
+ suggestion=suggestion,
130
+ )