thailint 0.8.0__py3-none-any.whl → 0.10.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. src/cli.py +242 -0
  2. src/config.py +2 -3
  3. src/core/base.py +4 -0
  4. src/core/rule_discovery.py +143 -84
  5. src/core/violation_builder.py +75 -15
  6. src/linter_config/loader.py +43 -11
  7. src/linters/collection_pipeline/__init__.py +90 -0
  8. src/linters/collection_pipeline/config.py +63 -0
  9. src/linters/collection_pipeline/continue_analyzer.py +100 -0
  10. src/linters/collection_pipeline/detector.py +130 -0
  11. src/linters/collection_pipeline/linter.py +437 -0
  12. src/linters/collection_pipeline/suggestion_builder.py +63 -0
  13. src/linters/dry/block_filter.py +6 -8
  14. src/linters/dry/block_grouper.py +4 -0
  15. src/linters/dry/cache_query.py +4 -0
  16. src/linters/dry/python_analyzer.py +34 -18
  17. src/linters/dry/token_hasher.py +5 -1
  18. src/linters/dry/typescript_analyzer.py +61 -31
  19. src/linters/dry/violation_builder.py +4 -0
  20. src/linters/dry/violation_filter.py +4 -0
  21. src/linters/file_header/bash_parser.py +4 -0
  22. src/linters/file_header/linter.py +7 -11
  23. src/linters/file_placement/directory_matcher.py +4 -0
  24. src/linters/file_placement/linter.py +28 -8
  25. src/linters/file_placement/pattern_matcher.py +4 -0
  26. src/linters/file_placement/pattern_validator.py +4 -0
  27. src/linters/magic_numbers/context_analyzer.py +4 -0
  28. src/linters/magic_numbers/typescript_analyzer.py +4 -0
  29. src/linters/nesting/python_analyzer.py +4 -0
  30. src/linters/nesting/typescript_function_extractor.py +4 -0
  31. src/linters/print_statements/typescript_analyzer.py +4 -0
  32. src/linters/srp/class_analyzer.py +4 -0
  33. src/linters/srp/heuristics.py +4 -3
  34. src/linters/srp/linter.py +2 -3
  35. src/linters/srp/python_analyzer.py +55 -20
  36. src/linters/srp/typescript_metrics_calculator.py +83 -47
  37. src/linters/srp/violation_builder.py +4 -0
  38. src/linters/stateless_class/__init__.py +25 -0
  39. src/linters/stateless_class/config.py +58 -0
  40. src/linters/stateless_class/linter.py +355 -0
  41. src/linters/stateless_class/python_analyzer.py +299 -0
  42. {thailint-0.8.0.dist-info → thailint-0.10.0.dist-info}/METADATA +226 -3
  43. {thailint-0.8.0.dist-info → thailint-0.10.0.dist-info}/RECORD +46 -36
  44. {thailint-0.8.0.dist-info → thailint-0.10.0.dist-info}/WHEEL +0 -0
  45. {thailint-0.8.0.dist-info → thailint-0.10.0.dist-info}/entry_points.txt +0 -0
  46. {thailint-0.8.0.dist-info → thailint-0.10.0.dist-info}/licenses/LICENSE +0 -0
@@ -30,7 +30,7 @@ SRP Exception: TypeScriptDuplicateAnalyzer has 20 methods and 324 lines (exceeds
30
30
  responsibility: accurately detecting duplicate TypeScript/JavaScript code while minimizing false positives.
31
31
  """
32
32
 
33
- from collections.abc import Generator
33
+ from collections.abc import Generator, Iterable
34
34
  from pathlib import Path
35
35
 
36
36
  from src.analyzers.typescript_base import TREE_SITTER_AVAILABLE
@@ -84,16 +84,33 @@ class TypeScriptDuplicateAnalyzer(BaseTokenAnalyzer): # thailint: ignore[srp.vi
84
84
  # Generate rolling hash windows
85
85
  windows = self._rolling_hash_with_tracking(lines_with_numbers, config.min_duplicate_lines)
86
86
 
87
- blocks = []
88
- for hash_val, start_line, end_line, snippet in windows:
89
- # Filter interface/type definitions
90
- if not self._should_include_block(content, start_line, end_line):
91
- continue
87
+ # Filter out interface/type definitions and single statement patterns
88
+ valid_windows = (
89
+ (hash_val, start_line, end_line, snippet)
90
+ for hash_val, start_line, end_line, snippet in windows
91
+ if self._should_include_block(content, start_line, end_line)
92
+ and not self._is_single_statement_in_source(content, start_line, end_line)
93
+ )
94
+ return self._build_blocks(valid_windows, file_path, content)
95
+
96
+ def _build_blocks(
97
+ self,
98
+ windows: Iterable[tuple[int, int, int, str]],
99
+ file_path: Path,
100
+ content: str,
101
+ ) -> list[CodeBlock]:
102
+ """Build CodeBlock objects from valid windows, applying filters.
92
103
 
93
- # Filter single statement patterns
94
- if self._is_single_statement_in_source(content, start_line, end_line):
95
- continue
104
+ Args:
105
+ windows: Iterable of (hash_val, start_line, end_line, snippet) tuples
106
+ file_path: Path to source file
107
+ content: File content
96
108
 
109
+ Returns:
110
+ List of CodeBlock instances that pass all filters
111
+ """
112
+ blocks = []
113
+ for hash_val, start_line, end_line, snippet in windows:
97
114
  block = CodeBlock(
98
115
  file_path=file_path,
99
116
  start_line=start_line,
@@ -101,13 +118,8 @@ class TypeScriptDuplicateAnalyzer(BaseTokenAnalyzer): # thailint: ignore[srp.vi
101
118
  snippet=snippet,
102
119
  hash_value=hash_val,
103
120
  )
104
-
105
- # Apply extensible filters (keyword arguments, imports, etc.)
106
- if self._filter_registry.should_filter_block(block, content):
107
- continue
108
-
109
- blocks.append(block)
110
-
121
+ if not self._filter_registry.should_filter_block(block, content):
122
+ blocks.append(block)
111
123
  return blocks
112
124
 
113
125
  def _get_jsdoc_ranges_from_content(self, content: str) -> set[int]:
@@ -188,26 +200,44 @@ class TypeScriptDuplicateAnalyzer(BaseTokenAnalyzer): # thailint: ignore[srp.vi
188
200
  lines_with_numbers = []
189
201
  in_multiline_import = False
190
202
 
191
- for line_num, line in enumerate(content.split("\n"), start=1):
192
- # Skip JSDoc comment lines
193
- if line_num in jsdoc_lines:
194
- continue
195
-
196
- line = self._hasher._normalize_line(line) # pylint: disable=protected-access
197
- if not line:
198
- continue
199
-
200
- # Update multi-line import state and check if line should be skipped
201
- in_multiline_import, should_skip = self._hasher._should_skip_import_line( # pylint: disable=protected-access
203
+ # Skip JSDoc comment lines
204
+ non_jsdoc_lines = (
205
+ (line_num, line)
206
+ for line_num, line in enumerate(content.split("\n"), start=1)
207
+ if line_num not in jsdoc_lines
208
+ )
209
+ for line_num, line in non_jsdoc_lines:
210
+ in_multiline_import, normalized = self._normalize_and_filter_line(
202
211
  line, in_multiline_import
203
212
  )
204
- if should_skip:
205
- continue
206
-
207
- lines_with_numbers.append((line_num, line))
213
+ if normalized is not None:
214
+ lines_with_numbers.append((line_num, normalized))
208
215
 
209
216
  return lines_with_numbers
210
217
 
218
+ def _normalize_and_filter_line(
219
+ self, line: str, in_multiline_import: bool
220
+ ) -> tuple[bool, str | None]:
221
+ """Normalize line and check if it should be included.
222
+
223
+ Args:
224
+ line: Raw source line
225
+ in_multiline_import: Current multi-line import state
226
+
227
+ Returns:
228
+ Tuple of (new_import_state, normalized_line or None if should skip)
229
+ """
230
+ normalized = self._hasher._normalize_line(line) # pylint: disable=protected-access
231
+ if not normalized:
232
+ return in_multiline_import, None
233
+
234
+ new_state, should_skip = self._hasher._should_skip_import_line( # pylint: disable=protected-access
235
+ normalized, in_multiline_import
236
+ )
237
+ if should_skip:
238
+ return new_state, None
239
+ return new_state, normalized
240
+
211
241
  def _rolling_hash_with_tracking(
212
242
  self, lines_with_numbers: list[tuple[int, str]], window_size: int
213
243
  ) -> list[tuple[int, int, int, str]]:
@@ -27,6 +27,10 @@ from .cache import CodeBlock
27
27
  class DRYViolationBuilder:
28
28
  """Builds violation messages for duplicate code."""
29
29
 
30
+ def __init__(self) -> None:
31
+ """Initialize the DRY violation builder."""
32
+ pass # Stateless builder for duplicate code violations
33
+
30
34
  def build_violation(
31
35
  self, block: CodeBlock, all_duplicates: list[CodeBlock], rule_id: str
32
36
  ) -> Violation:
@@ -25,6 +25,10 @@ DEFAULT_FALLBACK_LINE_COUNT = 5
25
25
  class ViolationFilter:
26
26
  """Filters overlapping violations."""
27
27
 
28
+ def __init__(self) -> None:
29
+ """Initialize the violation filter."""
30
+ pass # Stateless filter for overlapping violations
31
+
28
32
  def filter_overlapping(self, sorted_violations: list[Violation]) -> list[Violation]:
29
33
  """Filter overlapping violations, keeping first occurrence.
30
34
 
@@ -24,6 +24,10 @@ from src.linters.file_header.base_parser import BaseHeaderParser
24
24
  class BashHeaderParser(BaseHeaderParser):
25
25
  """Extracts and parses Bash file headers from comment blocks."""
26
26
 
27
+ def __init__(self) -> None:
28
+ """Initialize the Bash header parser."""
29
+ pass # BaseHeaderParser has no __init__, but we need this for stateless-class
30
+
27
31
  def extract_header(self, code: str) -> str | None:
28
32
  """Extract comment header from Bash script."""
29
33
  if not code or not code.strip():
@@ -273,17 +273,13 @@ class FileHeaderRule(BaseLintRule): # thailint: ignore[srp]
273
273
  file_content = context.file_content or ""
274
274
  lines = file_content.splitlines()
275
275
 
276
- filtered = []
277
- for v in violations:
278
- if self._ignore_parser.should_ignore_violation(v, file_content):
279
- continue
280
-
281
- if self._has_line_level_ignore(lines, v):
282
- continue
283
-
284
- filtered.append(v)
285
-
286
- return filtered
276
+ non_ignored = (
277
+ v
278
+ for v in violations
279
+ if not self._ignore_parser.should_ignore_violation(v, file_content)
280
+ and not self._has_line_level_ignore(lines, v)
281
+ )
282
+ return list(non_ignored)
287
283
 
288
284
  def _has_line_level_ignore(self, lines: list[str], violation: Violation) -> bool:
289
285
  """Check for thailint-ignore-line directive."""
@@ -23,6 +23,10 @@ from typing import Any
23
23
  class DirectoryMatcher:
24
24
  """Finds matching directory rules based on path prefixes."""
25
25
 
26
+ def __init__(self) -> None:
27
+ """Initialize the directory matcher."""
28
+ pass # Stateless matcher for directory rules
29
+
26
30
  def find_matching_rule(
27
31
  self, path_str: str, directories: dict[str, Any]
28
32
  ) -> tuple[dict[str, Any] | None, str | None]:
@@ -124,20 +124,40 @@ class FilePlacementLinter:
124
124
  Returns:
125
125
  List of all violations found
126
126
  """
127
+ valid_files = self._get_valid_files(dir_path, recursive)
128
+ return self._lint_files(valid_files)
129
+
130
+ def _get_valid_files(self, dir_path: Path, recursive: bool) -> list[Path]:
131
+ """Get list of valid files to lint from directory.
132
+
133
+ Args:
134
+ dir_path: Directory to scan
135
+ recursive: Scan recursively
136
+
137
+ Returns:
138
+ List of file paths to lint
139
+ """
127
140
  from src.linter_config.ignore import IgnoreDirectiveParser
128
141
 
129
142
  ignore_parser = IgnoreDirectiveParser(self.project_root)
130
143
  pattern = "**/*" if recursive else "*"
131
144
 
132
- violations = []
133
- for file_path in dir_path.glob(pattern):
134
- if not file_path.is_file():
135
- continue
136
- if ignore_parser.is_ignored(file_path):
137
- continue
138
- file_violations = self.lint_path(file_path)
139
- violations.extend(file_violations)
145
+ return [
146
+ f for f in dir_path.glob(pattern) if f.is_file() and not ignore_parser.is_ignored(f)
147
+ ]
148
+
149
+ def _lint_files(self, file_paths: list[Path]) -> list[Violation]:
150
+ """Lint multiple files and collect violations.
151
+
152
+ Args:
153
+ file_paths: List of file paths to lint
140
154
 
155
+ Returns:
156
+ List of all violations found
157
+ """
158
+ violations = []
159
+ for file_path in file_paths:
160
+ violations.extend(self.lint_path(file_path))
141
161
  return violations
142
162
 
143
163
 
@@ -23,6 +23,10 @@ import re
23
23
  class PatternMatcher:
24
24
  """Handles regex pattern matching for file paths."""
25
25
 
26
+ def __init__(self) -> None:
27
+ """Initialize the pattern matcher."""
28
+ pass # Stateless matcher for regex patterns
29
+
26
30
  def match_deny_patterns(
27
31
  self, path_str: str, deny_patterns: list[dict[str, str]]
28
32
  ) -> tuple[bool, str | None]:
@@ -24,6 +24,10 @@ from typing import Any
24
24
  class PatternValidator:
25
25
  """Validates regex patterns in file placement configuration."""
26
26
 
27
+ def __init__(self) -> None:
28
+ """Initialize the pattern validator."""
29
+ pass # Stateless validator for regex patterns
30
+
27
31
  def validate_config(self, config: dict[str, Any]) -> None:
28
32
  """Validate all regex patterns in configuration.
29
33
 
@@ -30,6 +30,10 @@ from pathlib import Path
30
30
  class ContextAnalyzer: # thailint: ignore[srp]
31
31
  """Analyzes contexts to determine if numeric literals are acceptable."""
32
32
 
33
+ def __init__(self) -> None:
34
+ """Initialize the context analyzer."""
35
+ pass # Stateless analyzer for context checking
36
+
33
37
  def is_acceptable_context(
34
38
  self,
35
39
  node: ast.Constant,
@@ -44,6 +44,10 @@ class TypeScriptMagicNumberAnalyzer(TypeScriptBaseAnalyzer): # thailint: ignore
44
44
  of TypeScript magic number detection - all methods support this core purpose.
45
45
  """
46
46
 
47
+ def __init__(self) -> None: # pylint: disable=useless-parent-delegation
48
+ """Initialize the TypeScript magic number analyzer."""
49
+ super().__init__() # Sets self.tree_sitter_available from base class
50
+
47
51
  def find_numeric_literals(self, root_node: Node) -> list[tuple[Node, float | int, int]]:
48
52
  """Find all numeric literal nodes in TypeScript/JavaScript AST.
49
53
 
@@ -25,6 +25,10 @@ import ast
25
25
  class PythonNestingAnalyzer:
26
26
  """Calculates maximum nesting depth in Python functions."""
27
27
 
28
+ def __init__(self) -> None:
29
+ """Initialize the Python nesting analyzer."""
30
+ pass # Stateless analyzer for nesting depth calculation
31
+
28
32
  def calculate_max_depth(
29
33
  self, func_node: ast.FunctionDef | ast.AsyncFunctionDef
30
34
  ) -> tuple[int, int]:
@@ -27,6 +27,10 @@ from src.analyzers.typescript_base import TypeScriptBaseAnalyzer
27
27
  class TypeScriptFunctionExtractor(TypeScriptBaseAnalyzer):
28
28
  """Extracts function information from TypeScript AST nodes."""
29
29
 
30
+ def __init__(self) -> None: # pylint: disable=useless-parent-delegation
31
+ """Initialize the TypeScript function extractor."""
32
+ super().__init__() # Sets self.tree_sitter_available from base class
33
+
30
34
  def collect_all_functions(self, root_node: Any) -> list[tuple[Any, str]]:
31
35
  """Collect all function nodes from TypeScript AST.
32
36
 
@@ -40,6 +40,10 @@ except ImportError:
40
40
  class TypeScriptPrintStatementAnalyzer(TypeScriptBaseAnalyzer):
41
41
  """Analyzes TypeScript/JavaScript code for console.* calls using Tree-sitter."""
42
42
 
43
+ def __init__(self) -> None: # pylint: disable=useless-parent-delegation
44
+ """Initialize the TypeScript print statement analyzer."""
45
+ super().__init__() # Sets self.tree_sitter_available from base class
46
+
43
47
  def find_console_calls(self, root_node: Node, methods: set[str]) -> list[tuple[Node, str, int]]:
44
48
  """Find all console.* calls matching the specified methods.
45
49
 
@@ -31,6 +31,10 @@ from .typescript_analyzer import TypeScriptSRPAnalyzer
31
31
  class ClassAnalyzer:
32
32
  """Coordinates class analysis for Python and TypeScript."""
33
33
 
34
+ def __init__(self) -> None:
35
+ """Initialize the class analyzer."""
36
+ pass # Coordinates analysis between language-specific analyzers
37
+
34
38
  def analyze_python(
35
39
  self, context: BaseLintContext, config: SRPConfig
36
40
  ) -> list[dict[str, Any]] | list[Violation]:
@@ -33,9 +33,10 @@ def count_methods(class_node: ast.ClassDef) -> int:
33
33
  Number of methods in the class
34
34
  """
35
35
  methods = 0
36
- for node in class_node.body:
37
- if not isinstance(node, (ast.FunctionDef, ast.AsyncFunctionDef)):
38
- continue
36
+ func_nodes = (
37
+ n for n in class_node.body if isinstance(n, (ast.FunctionDef, ast.AsyncFunctionDef))
38
+ )
39
+ for node in func_nodes:
39
40
  # Don't count @property decorators as methods
40
41
  if not has_property_decorator(node):
41
42
  methods += 1
src/linters/srp/linter.py CHANGED
@@ -171,9 +171,8 @@ class SRPRule(MultiLanguageLintRule):
171
171
  List of violations
172
172
  """
173
173
  violations = []
174
- for metrics in metrics_list:
175
- if not isinstance(metrics, dict):
176
- continue
174
+ valid_metrics = (m for m in metrics_list if isinstance(m, dict))
175
+ for metrics in valid_metrics:
177
176
  violation = self._create_violation_if_needed(metrics, config, context)
178
177
  if violation:
179
178
  violations.append(violation)
@@ -1,7 +1,7 @@
1
1
  """
2
2
  Purpose: Python AST analyzer for detecting SRP violations in Python classes
3
3
 
4
- Scope: PythonSRPAnalyzer class for analyzing Python classes using AST
4
+ Scope: Functions for analyzing Python classes using AST
5
5
 
6
6
  Overview: Implements Python-specific SRP analysis using the ast module to parse and analyze
7
7
  class definitions. Walks the AST to find all class definitions, then analyzes each class
@@ -13,7 +13,7 @@ Overview: Implements Python-specific SRP analysis using the ast module to parse
13
13
 
14
14
  Dependencies: ast module for Python AST parsing, typing for type hints, heuristics module
15
15
 
16
- Exports: PythonSRPAnalyzer class
16
+ Exports: find_all_classes function, analyze_class function, PythonSRPAnalyzer class (compat)
17
17
 
18
18
  Interfaces: find_all_classes(tree), analyze_class(class_node, source, config)
19
19
 
@@ -27,8 +27,58 @@ from .config import SRPConfig
27
27
  from .heuristics import count_loc, count_methods, has_responsibility_keyword
28
28
 
29
29
 
30
+ def find_all_classes(tree: ast.AST) -> list[ast.ClassDef]:
31
+ """Find all class definitions in AST.
32
+
33
+ Args:
34
+ tree: Root AST node to search
35
+
36
+ Returns:
37
+ List of all class definition nodes
38
+ """
39
+ classes = []
40
+ for node in ast.walk(tree):
41
+ if isinstance(node, ast.ClassDef):
42
+ classes.append(node)
43
+ return classes
44
+
45
+
46
+ def analyze_class(class_node: ast.ClassDef, source: str, config: SRPConfig) -> dict[str, Any]:
47
+ """Analyze a class for SRP metrics.
48
+
49
+ Args:
50
+ class_node: AST node representing a class definition
51
+ source: Full source code of the file
52
+ config: SRP configuration with thresholds and keywords
53
+
54
+ Returns:
55
+ Dictionary with class metrics (name, method_count, loc, etc.)
56
+ """
57
+ method_count = count_methods(class_node)
58
+ loc = count_loc(class_node, source)
59
+ has_keyword = has_responsibility_keyword(class_node.name, config.keywords)
60
+
61
+ return {
62
+ "class_name": class_node.name,
63
+ "method_count": method_count,
64
+ "loc": loc,
65
+ "has_keyword": has_keyword,
66
+ "line": class_node.lineno,
67
+ "column": class_node.col_offset,
68
+ }
69
+
70
+
71
+ # Legacy class wrapper for backward compatibility
30
72
  class PythonSRPAnalyzer:
31
- """Analyzes Python classes for SRP violations."""
73
+ """Analyzes Python classes for SRP violations.
74
+
75
+ Note: This class is a thin wrapper around module-level functions
76
+ for backward compatibility.
77
+ """
78
+
79
+ def __init__(self) -> None:
80
+ """Initialize the analyzer."""
81
+ pass # No state needed
32
82
 
33
83
  def find_all_classes(self, tree: ast.AST) -> list[ast.ClassDef]:
34
84
  """Find all class definitions in AST.
@@ -39,11 +89,7 @@ class PythonSRPAnalyzer:
39
89
  Returns:
40
90
  List of all class definition nodes
41
91
  """
42
- classes = []
43
- for node in ast.walk(tree):
44
- if isinstance(node, ast.ClassDef):
45
- classes.append(node)
46
- return classes
92
+ return find_all_classes(tree)
47
93
 
48
94
  def analyze_class(
49
95
  self, class_node: ast.ClassDef, source: str, config: SRPConfig
@@ -58,15 +104,4 @@ class PythonSRPAnalyzer:
58
104
  Returns:
59
105
  Dictionary with class metrics (name, method_count, loc, etc.)
60
106
  """
61
- method_count = count_methods(class_node)
62
- loc = count_loc(class_node, source)
63
- has_keyword = has_responsibility_keyword(class_node.name, config.keywords)
64
-
65
- return {
66
- "class_name": class_node.name,
67
- "method_count": method_count,
68
- "loc": loc,
69
- "has_keyword": has_keyword,
70
- "line": class_node.lineno,
71
- "column": class_node.col_offset,
72
- }
107
+ return analyze_class(class_node, source, config)
@@ -10,7 +10,7 @@ Overview: Provides metrics calculation functionality for TypeScript classes in S
10
10
 
11
11
  Dependencies: typing
12
12
 
13
- Exports: TypeScriptMetricsCalculator
13
+ Exports: count_methods function, count_loc function, TypeScriptMetricsCalculator class (compat)
14
14
 
15
15
  Interfaces: count_methods(class_node), count_loc(class_node, source)
16
16
 
@@ -20,8 +20,87 @@ Implementation: Tree-sitter node type matching, AST position arithmetic
20
20
  from typing import Any
21
21
 
22
22
 
23
+ def count_methods(class_node: Any) -> int:
24
+ """Count number of methods in a TypeScript class.
25
+
26
+ Args:
27
+ class_node: Class declaration tree-sitter node
28
+
29
+ Returns:
30
+ Number of public methods (excludes constructor)
31
+ """
32
+ class_body = _get_class_body(class_node)
33
+ if not class_body:
34
+ return 0
35
+
36
+ method_count = 0
37
+ for child in class_body.children:
38
+ if _is_countable_method(child):
39
+ method_count += 1
40
+
41
+ return method_count
42
+
43
+
44
+ def count_loc(class_node: Any, source: str) -> int:
45
+ """Count lines of code in a TypeScript class.
46
+
47
+ Args:
48
+ class_node: Class declaration tree-sitter node
49
+ source: Full source code string
50
+
51
+ Returns:
52
+ Number of lines in class definition
53
+ """
54
+ start_line = class_node.start_point[0]
55
+ end_line = class_node.end_point[0]
56
+ return end_line - start_line + 1
57
+
58
+
59
+ def _get_class_body(class_node: Any) -> Any:
60
+ """Get the class_body node from a class declaration.
61
+
62
+ Args:
63
+ class_node: Class declaration node
64
+
65
+ Returns:
66
+ Class body node or None
67
+ """
68
+ for child in class_node.children:
69
+ if child.type == "class_body":
70
+ return child
71
+ return None
72
+
73
+
74
+ def _is_countable_method(node: Any) -> bool:
75
+ """Check if node is a method that should be counted.
76
+
77
+ Args:
78
+ node: Tree-sitter node to check
79
+
80
+ Returns:
81
+ True if node is a countable method
82
+ """
83
+ if node.type != "method_definition":
84
+ return False
85
+
86
+ # Check if it's a constructor
87
+ return all(
88
+ not (child.type == "property_identifier" and child.text.decode() == "constructor")
89
+ for child in node.children
90
+ )
91
+
92
+
93
+ # Legacy class wrapper for backward compatibility
23
94
  class TypeScriptMetricsCalculator:
24
- """Calculates metrics for TypeScript classes."""
95
+ """Calculates metrics for TypeScript classes.
96
+
97
+ Note: This class is a thin wrapper around module-level functions
98
+ for backward compatibility.
99
+ """
100
+
101
+ def __init__(self) -> None:
102
+ """Initialize the metrics calculator."""
103
+ pass # No state needed
25
104
 
26
105
  def count_methods(self, class_node: Any) -> int:
27
106
  """Count number of methods in a TypeScript class.
@@ -32,16 +111,7 @@ class TypeScriptMetricsCalculator:
32
111
  Returns:
33
112
  Number of public methods (excludes constructor)
34
113
  """
35
- class_body = self._get_class_body(class_node)
36
- if not class_body:
37
- return 0
38
-
39
- method_count = 0
40
- for child in class_body.children:
41
- if self._is_countable_method(child):
42
- method_count += 1
43
-
44
- return method_count
114
+ return count_methods(class_node)
45
115
 
46
116
  def count_loc(self, class_node: Any, source: str) -> int:
47
117
  """Count lines of code in a TypeScript class.
@@ -53,38 +123,4 @@ class TypeScriptMetricsCalculator:
53
123
  Returns:
54
124
  Number of lines in class definition
55
125
  """
56
- start_line = class_node.start_point[0]
57
- end_line = class_node.end_point[0]
58
- return end_line - start_line + 1
59
-
60
- def _get_class_body(self, class_node: Any) -> Any:
61
- """Get the class_body node from a class declaration.
62
-
63
- Args:
64
- class_node: Class declaration node
65
-
66
- Returns:
67
- Class body node or None
68
- """
69
- for child in class_node.children:
70
- if child.type == "class_body":
71
- return child
72
- return None
73
-
74
- def _is_countable_method(self, node: Any) -> bool:
75
- """Check if node is a method that should be counted.
76
-
77
- Args:
78
- node: Tree-sitter node to check
79
-
80
- Returns:
81
- True if node is a countable method
82
- """
83
- if node.type != "method_definition":
84
- return False
85
-
86
- # Check if it's a constructor
87
- return all(
88
- not (child.type == "property_identifier" and child.text.decode() == "constructor")
89
- for child in node.children
90
- )
126
+ return count_loc(class_node, source)
@@ -29,6 +29,10 @@ from src.core.violation_builder import BaseViolationBuilder, ViolationInfo
29
29
  class ViolationBuilder(BaseViolationBuilder):
30
30
  """Builds SRP violations with messages and suggestions."""
31
31
 
32
+ def __init__(self) -> None: # pylint: disable=useless-parent-delegation
33
+ """Initialize the violation builder."""
34
+ super().__init__() # Inherits from BaseViolationBuilder
35
+
32
36
  def build_violation(
33
37
  self,
34
38
  metrics: dict[str, Any],