thailint 0.12.0__py3-none-any.whl → 0.14.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (135) hide show
  1. src/analyzers/__init__.py +4 -3
  2. src/analyzers/ast_utils.py +54 -0
  3. src/analyzers/typescript_base.py +4 -0
  4. src/cli/__init__.py +3 -0
  5. src/cli/config.py +12 -12
  6. src/cli/config_merge.py +241 -0
  7. src/cli/linters/__init__.py +9 -0
  8. src/cli/linters/code_patterns.py +107 -257
  9. src/cli/linters/code_smells.py +48 -165
  10. src/cli/linters/documentation.py +21 -95
  11. src/cli/linters/performance.py +274 -0
  12. src/cli/linters/shared.py +232 -6
  13. src/cli/linters/structure.py +26 -21
  14. src/cli/linters/structure_quality.py +28 -21
  15. src/cli_main.py +3 -0
  16. src/config.py +2 -1
  17. src/core/base.py +3 -2
  18. src/core/cli_utils.py +3 -1
  19. src/core/config_parser.py +5 -2
  20. src/core/constants.py +54 -0
  21. src/core/linter_utils.py +95 -6
  22. src/core/rule_discovery.py +5 -1
  23. src/core/violation_builder.py +3 -0
  24. src/linter_config/directive_markers.py +109 -0
  25. src/linter_config/ignore.py +225 -383
  26. src/linter_config/pattern_utils.py +65 -0
  27. src/linter_config/rule_matcher.py +89 -0
  28. src/linters/collection_pipeline/any_all_analyzer.py +281 -0
  29. src/linters/collection_pipeline/ast_utils.py +40 -0
  30. src/linters/collection_pipeline/config.py +12 -0
  31. src/linters/collection_pipeline/continue_analyzer.py +2 -8
  32. src/linters/collection_pipeline/detector.py +262 -32
  33. src/linters/collection_pipeline/filter_map_analyzer.py +402 -0
  34. src/linters/collection_pipeline/linter.py +18 -35
  35. src/linters/collection_pipeline/suggestion_builder.py +68 -1
  36. src/linters/dry/base_token_analyzer.py +16 -9
  37. src/linters/dry/block_filter.py +7 -4
  38. src/linters/dry/cache.py +7 -2
  39. src/linters/dry/config.py +7 -1
  40. src/linters/dry/constant_matcher.py +34 -25
  41. src/linters/dry/file_analyzer.py +4 -2
  42. src/linters/dry/inline_ignore.py +7 -16
  43. src/linters/dry/linter.py +48 -25
  44. src/linters/dry/python_analyzer.py +18 -10
  45. src/linters/dry/python_constant_extractor.py +51 -52
  46. src/linters/dry/single_statement_detector.py +14 -12
  47. src/linters/dry/token_hasher.py +115 -115
  48. src/linters/dry/typescript_analyzer.py +11 -6
  49. src/linters/dry/typescript_constant_extractor.py +4 -0
  50. src/linters/dry/typescript_statement_detector.py +208 -208
  51. src/linters/dry/typescript_value_extractor.py +3 -0
  52. src/linters/dry/violation_filter.py +1 -4
  53. src/linters/dry/violation_generator.py +1 -4
  54. src/linters/file_header/atemporal_detector.py +58 -40
  55. src/linters/file_header/base_parser.py +4 -0
  56. src/linters/file_header/bash_parser.py +4 -0
  57. src/linters/file_header/config.py +14 -0
  58. src/linters/file_header/field_validator.py +5 -8
  59. src/linters/file_header/linter.py +19 -12
  60. src/linters/file_header/markdown_parser.py +6 -0
  61. src/linters/file_placement/config_loader.py +3 -1
  62. src/linters/file_placement/linter.py +22 -8
  63. src/linters/file_placement/pattern_matcher.py +21 -4
  64. src/linters/file_placement/pattern_validator.py +21 -7
  65. src/linters/file_placement/rule_checker.py +2 -2
  66. src/linters/lazy_ignores/__init__.py +43 -0
  67. src/linters/lazy_ignores/config.py +66 -0
  68. src/linters/lazy_ignores/directive_utils.py +121 -0
  69. src/linters/lazy_ignores/header_parser.py +177 -0
  70. src/linters/lazy_ignores/linter.py +158 -0
  71. src/linters/lazy_ignores/matcher.py +135 -0
  72. src/linters/lazy_ignores/python_analyzer.py +205 -0
  73. src/linters/lazy_ignores/rule_id_utils.py +180 -0
  74. src/linters/lazy_ignores/skip_detector.py +298 -0
  75. src/linters/lazy_ignores/types.py +69 -0
  76. src/linters/lazy_ignores/typescript_analyzer.py +146 -0
  77. src/linters/lazy_ignores/violation_builder.py +131 -0
  78. src/linters/lbyl/__init__.py +29 -0
  79. src/linters/lbyl/config.py +63 -0
  80. src/linters/lbyl/pattern_detectors/__init__.py +25 -0
  81. src/linters/lbyl/pattern_detectors/base.py +46 -0
  82. src/linters/magic_numbers/context_analyzer.py +227 -229
  83. src/linters/magic_numbers/linter.py +20 -15
  84. src/linters/magic_numbers/python_analyzer.py +4 -16
  85. src/linters/magic_numbers/typescript_analyzer.py +9 -16
  86. src/linters/method_property/config.py +4 -1
  87. src/linters/method_property/linter.py +5 -10
  88. src/linters/method_property/python_analyzer.py +5 -4
  89. src/linters/method_property/violation_builder.py +3 -0
  90. src/linters/nesting/linter.py +11 -6
  91. src/linters/nesting/typescript_analyzer.py +6 -12
  92. src/linters/nesting/typescript_function_extractor.py +0 -4
  93. src/linters/nesting/violation_builder.py +1 -0
  94. src/linters/performance/__init__.py +91 -0
  95. src/linters/performance/config.py +43 -0
  96. src/linters/performance/constants.py +49 -0
  97. src/linters/performance/linter.py +149 -0
  98. src/linters/performance/python_analyzer.py +365 -0
  99. src/linters/performance/regex_analyzer.py +312 -0
  100. src/linters/performance/regex_linter.py +139 -0
  101. src/linters/performance/typescript_analyzer.py +236 -0
  102. src/linters/performance/violation_builder.py +160 -0
  103. src/linters/print_statements/linter.py +6 -4
  104. src/linters/print_statements/python_analyzer.py +85 -81
  105. src/linters/print_statements/typescript_analyzer.py +6 -15
  106. src/linters/srp/heuristics.py +4 -4
  107. src/linters/srp/linter.py +12 -12
  108. src/linters/srp/violation_builder.py +0 -4
  109. src/linters/stateless_class/linter.py +30 -36
  110. src/linters/stateless_class/python_analyzer.py +11 -20
  111. src/linters/stringly_typed/config.py +4 -5
  112. src/linters/stringly_typed/context_filter.py +410 -410
  113. src/linters/stringly_typed/function_call_violation_builder.py +93 -95
  114. src/linters/stringly_typed/linter.py +48 -16
  115. src/linters/stringly_typed/python/analyzer.py +5 -1
  116. src/linters/stringly_typed/python/call_tracker.py +8 -5
  117. src/linters/stringly_typed/python/comparison_tracker.py +10 -5
  118. src/linters/stringly_typed/python/condition_extractor.py +3 -0
  119. src/linters/stringly_typed/python/conditional_detector.py +4 -1
  120. src/linters/stringly_typed/python/match_analyzer.py +8 -2
  121. src/linters/stringly_typed/python/validation_detector.py +3 -0
  122. src/linters/stringly_typed/storage.py +14 -14
  123. src/linters/stringly_typed/typescript/call_tracker.py +9 -3
  124. src/linters/stringly_typed/typescript/comparison_tracker.py +9 -3
  125. src/linters/stringly_typed/violation_generator.py +288 -259
  126. src/orchestrator/core.py +13 -4
  127. src/templates/thailint_config_template.yaml +196 -0
  128. src/utils/project_root.py +3 -0
  129. thailint-0.14.0.dist-info/METADATA +185 -0
  130. thailint-0.14.0.dist-info/RECORD +199 -0
  131. thailint-0.12.0.dist-info/METADATA +0 -1667
  132. thailint-0.12.0.dist-info/RECORD +0 -164
  133. {thailint-0.12.0.dist-info → thailint-0.14.0.dist-info}/WHEEL +0 -0
  134. {thailint-0.12.0.dist-info → thailint-0.14.0.dist-info}/entry_points.txt +0 -0
  135. {thailint-0.12.0.dist-info → thailint-0.14.0.dist-info}/licenses/LICENSE +0 -0
@@ -10,246 +10,246 @@ Overview: Provides sophisticated single-statement pattern detection to filter fa
10
10
 
11
11
  Dependencies: tree-sitter for TypeScript AST parsing
12
12
 
13
- Exports: TypeScriptStatementDetector class
13
+ Exports: is_single_statement, should_include_block functions
14
14
 
15
- Interfaces: TypeScriptStatementDetector.is_single_statement(content, start_line, end_line) -> bool
15
+ Interfaces: is_single_statement(content, start_line, end_line) -> bool,
16
+ should_include_block(content, start_line, end_line) -> bool
16
17
 
17
18
  Implementation: Tree-sitter AST walking with pattern matching for TypeScript constructs
18
19
 
19
- SRP Exception: TypeScriptStatementDetector has 20 methods (exceeds max 8 methods)
20
- Justification: Complex tree-sitter AST analysis algorithm for single-statement pattern detection.
21
- Methods form tightly coupled algorithm pipeline: decorator detection, call expression analysis,
22
- declaration patterns, JSX element handling, class body field definitions, and interface filtering.
23
- Similar to parser or compiler pass architecture where algorithmic cohesion is critical. Splitting
24
- would fragment the algorithm logic and make maintenance harder by separating interdependent
25
- tree-sitter AST analysis steps. All methods contribute to single responsibility: accurately
26
- detecting single-statement patterns to prevent false positives in TypeScript duplicate detection.
20
+ Suppressions:
21
+ - type:ignore[assignment,misc]: Tree-sitter Node type alias (optional dependency fallback)
27
22
  """
28
23
 
29
24
  from collections.abc import Generator
25
+ from typing import Any
30
26
 
31
27
  from src.analyzers.typescript_base import TREE_SITTER_AVAILABLE
32
28
 
33
29
  if TREE_SITTER_AVAILABLE:
34
30
  from tree_sitter import Node
35
31
  else:
36
- Node = None # type: ignore[assignment,misc] # pylint: disable=invalid-name
32
+ Node = Any # type: ignore[assignment,misc]
37
33
 
38
34
 
39
- class TypeScriptStatementDetector: # thailint: ignore[srp.violation]
40
- """Detects single-statement patterns in TypeScript/JavaScript for duplicate filtering.
35
+ def is_single_statement(content: str, start_line: int, end_line: int) -> bool:
36
+ """Check if a line range is a single logical statement.
41
37
 
42
- SRP suppression: Complex tree-sitter AST analysis algorithm requires 20 methods to implement
43
- sophisticated single-statement detection. See file header for justification.
38
+ Args:
39
+ content: TypeScript source code
40
+ start_line: Starting line number (1-indexed)
41
+ end_line: Ending line number (1-indexed)
42
+
43
+ Returns:
44
+ True if this range represents a single logical statement/expression
44
45
  """
46
+ if not TREE_SITTER_AVAILABLE:
47
+ return False
45
48
 
46
- def is_single_statement(self, content: str, start_line: int, end_line: int) -> bool:
47
- """Check if a line range is a single logical statement.
49
+ from src.analyzers.typescript_base import TypeScriptBaseAnalyzer
48
50
 
49
- Args:
50
- content: TypeScript source code
51
- start_line: Starting line number (1-indexed)
52
- end_line: Ending line number (1-indexed)
51
+ analyzer = TypeScriptBaseAnalyzer()
52
+ root = analyzer.parse_typescript(content)
53
+ if not root:
54
+ return False
53
55
 
54
- Returns:
55
- True if this range represents a single logical statement/expression
56
- """
57
- if not TREE_SITTER_AVAILABLE:
58
- return False
56
+ return _check_overlapping_nodes(root, start_line, end_line)
59
57
 
60
- from src.analyzers.typescript_base import TypeScriptBaseAnalyzer
61
58
 
62
- analyzer = TypeScriptBaseAnalyzer()
63
- root = analyzer.parse_typescript(content)
64
- if not root:
65
- return False
59
+ def should_include_block(content: str, start_line: int, end_line: int) -> bool:
60
+ """Check if block should be included (not overlapping interface definitions).
66
61
 
67
- return self._check_overlapping_nodes(root, start_line, end_line)
62
+ Args:
63
+ content: File content
64
+ start_line: Block start line
65
+ end_line: Block end line
68
66
 
69
- def _check_overlapping_nodes(self, root: Node, start_line: int, end_line: int) -> bool:
70
- """Check if any AST node overlaps and matches single-statement pattern."""
71
- ts_start = start_line - 1 # Convert to 0-indexed
72
- ts_end = end_line - 1
67
+ Returns:
68
+ False if block overlaps interface definition, True otherwise
69
+ """
70
+ interface_ranges = _find_interface_ranges(content)
71
+ return not _overlaps_interface(start_line, end_line, interface_ranges)
73
72
 
74
- for node in self._walk_nodes(root):
75
- if self._node_overlaps_and_matches(node, ts_start, ts_end):
76
- return True
77
- return False
78
73
 
79
- def _walk_nodes(self, node: Node) -> Generator[Node, None, None]:
80
- """Generator to walk all nodes in tree."""
81
- yield node
82
- for child in node.children:
83
- yield from self._walk_nodes(child)
84
-
85
- def _node_overlaps_and_matches(self, node: Node, ts_start: int, ts_end: int) -> bool:
86
- """Check if node overlaps with range and matches single-statement pattern."""
87
- node_start = node.start_point[0]
88
- node_end = node.end_point[0]
89
-
90
- overlaps = not (node_end < ts_start or node_start > ts_end)
91
- if not overlaps:
92
- return False
93
-
94
- return self._is_single_statement_pattern(node, ts_start, ts_end)
95
-
96
- def _is_single_statement_pattern(self, node: Node, ts_start: int, ts_end: int) -> bool:
97
- """Check if an AST node represents a single-statement pattern to filter."""
98
- node_start = node.start_point[0]
99
- node_end = node.end_point[0]
100
- contains = (node_start <= ts_start) and (node_end >= ts_end)
101
-
102
- matchers = [
103
- self._matches_simple_container_pattern(node, contains),
104
- self._matches_call_expression_pattern(node, ts_start, ts_end, contains),
105
- self._matches_declaration_pattern(node, contains),
106
- self._matches_jsx_pattern(node, contains),
107
- self._matches_class_body_pattern(node, ts_start, ts_end),
108
- ]
109
- return any(matchers)
110
-
111
- def _matches_simple_container_pattern(self, node: Node, contains: bool) -> bool:
112
- """Check if node is a simple container pattern (decorator, object, etc.)."""
113
- simple_types = (
114
- "decorator",
115
- "object",
116
- "member_expression",
117
- "as_expression",
118
- "array_pattern",
119
- )
120
- return node.type in simple_types and contains
121
-
122
- def _matches_call_expression_pattern(
123
- self, node: Node, ts_start: int, ts_end: int, contains: bool
124
- ) -> bool:
125
- """Check if node is a call expression pattern."""
126
- if node.type != "call_expression":
127
- return False
128
-
129
- node_start = node.start_point[0]
130
- node_end = node.end_point[0]
131
- is_multiline = node_start < node_end
132
- if is_multiline and node_start <= ts_start <= node_end:
133
- return True
74
+ def _check_overlapping_nodes(root: Node, start_line: int, end_line: int) -> bool:
75
+ """Check if any AST node overlaps and matches single-statement pattern."""
76
+ ts_start = start_line - 1 # Convert to 0-indexed
77
+ ts_end = end_line - 1
134
78
 
135
- return contains
79
+ return any(_node_overlaps_and_matches(node, ts_start, ts_end) for node in _walk_nodes(root))
136
80
 
137
- def _matches_declaration_pattern(self, node: Node, contains: bool) -> bool:
138
- """Check if node is a lexical declaration pattern."""
139
- if node.type != "lexical_declaration" or not contains:
140
- return False
141
81
 
142
- if self._contains_function_body(node):
143
- return False
82
+ def _walk_nodes(node: Node) -> Generator[Node, None, None]:
83
+ """Generator to walk all nodes in tree."""
84
+ yield node
85
+ for child in node.children:
86
+ yield from _walk_nodes(child)
87
+
88
+
89
+ def _node_overlaps_and_matches(node: Node, ts_start: int, ts_end: int) -> bool:
90
+ """Check if node overlaps with range and matches single-statement pattern."""
91
+ node_start = node.start_point[0]
92
+ node_end = node.end_point[0]
93
+
94
+ overlaps = not (node_end < ts_start or node_start > ts_end)
95
+ if not overlaps:
96
+ return False
97
+
98
+ return _is_single_statement_pattern(node, ts_start, ts_end)
99
+
100
+
101
+ def _is_single_statement_pattern(node: Node, ts_start: int, ts_end: int) -> bool:
102
+ """Check if an AST node represents a single-statement pattern to filter."""
103
+ node_start = node.start_point[0]
104
+ node_end = node.end_point[0]
105
+ contains = (node_start <= ts_start) and (node_end >= ts_end)
106
+
107
+ matchers = [
108
+ _matches_simple_container_pattern(node, contains),
109
+ _matches_call_expression_pattern(node, ts_start, ts_end, contains),
110
+ _matches_declaration_pattern(node, contains),
111
+ _matches_jsx_pattern(node, contains),
112
+ _matches_class_body_pattern(node, ts_start, ts_end),
113
+ ]
114
+ return any(matchers)
115
+
116
+
117
+ def _matches_simple_container_pattern(node: Node, contains: bool) -> bool:
118
+ """Check if node is a simple container pattern (decorator, object, etc.)."""
119
+ simple_types = (
120
+ "decorator",
121
+ "object",
122
+ "member_expression",
123
+ "as_expression",
124
+ "array_pattern",
125
+ )
126
+ return node.type in simple_types and contains
127
+
128
+
129
+ def _matches_call_expression_pattern(
130
+ node: Node, ts_start: int, ts_end: int, contains: bool
131
+ ) -> bool:
132
+ """Check if node is a call expression pattern."""
133
+ if node.type != "call_expression":
134
+ return False
144
135
 
136
+ node_start = node.start_point[0]
137
+ node_end = node.end_point[0]
138
+ is_multiline = node_start < node_end
139
+ if is_multiline and node_start <= ts_start <= node_end:
145
140
  return True
146
141
 
147
- def _matches_jsx_pattern(self, node: Node, contains: bool) -> bool:
148
- """Check if node is a JSX element pattern."""
149
- jsx_types = ("jsx_opening_element", "jsx_self_closing_element")
150
- return node.type in jsx_types and contains
151
-
152
- def _matches_class_body_pattern(self, node: Node, ts_start: int, ts_end: int) -> bool:
153
- """Check if node is a class body field definition pattern."""
154
- if node.type != "class_body":
155
- return False
156
-
157
- return self._is_in_class_field_area(node, ts_start, ts_end)
158
-
159
- def _contains_function_body(self, node: Node) -> bool:
160
- """Check if node contains an arrow function or function expression."""
161
- for child in node.children:
162
- if child.type in ("arrow_function", "function", "function_expression"):
163
- return True
164
- if self._contains_function_body(child):
165
- return True
142
+ return contains
143
+
144
+
145
+ def _matches_declaration_pattern(node: Node, contains: bool) -> bool:
146
+ """Check if node is a lexical declaration pattern."""
147
+ if node.type != "lexical_declaration" or not contains:
148
+ return False
149
+
150
+ if _contains_function_body(node):
166
151
  return False
167
152
 
168
- def _find_first_method_line(self, class_body: Node) -> int | None:
169
- """Find line number of first method in class body."""
170
- for child in class_body.children:
171
- if child.type in ("method_definition", "function_declaration"):
172
- return child.start_point[0]
173
- return None
174
-
175
- def _is_in_class_field_area(self, class_body: Node, ts_start: int, ts_end: int) -> bool:
176
- """Check if range is in class field definition area (before methods)."""
177
- first_method_line = self._find_first_method_line(class_body)
178
- class_start = class_body.start_point[0]
179
- class_end = class_body.end_point[0]
180
-
181
- if first_method_line is None:
182
- return class_start <= ts_start and class_end >= ts_end
183
-
184
- return class_start <= ts_start and ts_end < first_method_line
185
-
186
- def should_include_block(self, content: str, start_line: int, end_line: int) -> bool:
187
- """Check if block should be included (not overlapping interface definitions).
188
-
189
- Args:
190
- content: File content
191
- start_line: Block start line
192
- end_line: Block end line
193
-
194
- Returns:
195
- False if block overlaps interface definition, True otherwise
196
- """
197
- interface_ranges = self._find_interface_ranges(content)
198
- return not self._overlaps_interface(start_line, end_line, interface_ranges)
199
-
200
- def _find_interface_ranges(self, content: str) -> list[tuple[int, int]]:
201
- """Find line ranges of interface/type definitions."""
202
- ranges: list[tuple[int, int]] = []
203
- lines = content.split("\n")
204
- state = {"in_interface": False, "start_line": 0, "brace_count": 0}
205
-
206
- for i, line in enumerate(lines, start=1):
207
- stripped = line.strip()
208
- self._process_line_for_interface(stripped, i, state, ranges)
209
-
210
- return ranges
211
-
212
- def _process_line_for_interface(
213
- self, stripped: str, line_num: int, state: dict, ranges: list[tuple[int, int]]
214
- ) -> None:
215
- """Process single line for interface detection."""
216
- if self._is_interface_start(stripped):
217
- self._handle_interface_start(stripped, line_num, state, ranges)
218
- return
219
-
220
- if state["in_interface"]:
221
- self._handle_interface_continuation(stripped, line_num, state, ranges)
222
-
223
- def _is_interface_start(self, stripped: str) -> bool:
224
- """Check if line starts interface/type definition."""
225
- return stripped.startswith(("interface ", "type ")) and "{" in stripped
226
-
227
- def _handle_interface_start(
228
- self, stripped: str, line_num: int, state: dict, ranges: list[tuple[int, int]]
229
- ) -> None:
230
- """Handle start of interface definition."""
231
- state["in_interface"] = True
232
- state["start_line"] = line_num
233
- state["brace_count"] = stripped.count("{") - stripped.count("}")
234
-
235
- if state["brace_count"] == 0:
236
- ranges.append((line_num, line_num))
237
- state["in_interface"] = False
238
-
239
- def _handle_interface_continuation(
240
- self, stripped: str, line_num: int, state: dict, ranges: list[tuple[int, int]]
241
- ) -> None:
242
- """Handle continuation of interface definition."""
243
- state["brace_count"] += stripped.count("{") - stripped.count("}")
244
- if state["brace_count"] == 0:
245
- ranges.append((state["start_line"], line_num))
246
- state["in_interface"] = False
247
-
248
- def _overlaps_interface(
249
- self, start: int, end: int, interface_ranges: list[tuple[int, int]]
250
- ) -> bool:
251
- """Check if block overlaps with any interface range."""
252
- for if_start, if_end in interface_ranges:
253
- if start <= if_end and end >= if_start:
254
- return True
153
+ return True
154
+
155
+
156
+ def _matches_jsx_pattern(node: Node, contains: bool) -> bool:
157
+ """Check if node is a JSX element pattern."""
158
+ jsx_types = ("jsx_opening_element", "jsx_self_closing_element")
159
+ return node.type in jsx_types and contains
160
+
161
+
162
+ def _matches_class_body_pattern(node: Node, ts_start: int, ts_end: int) -> bool:
163
+ """Check if node is a class body field definition pattern."""
164
+ if node.type != "class_body":
255
165
  return False
166
+
167
+ return _is_in_class_field_area(node, ts_start, ts_end)
168
+
169
+
170
+ def _contains_function_body(node: Node) -> bool:
171
+ """Check if node contains an arrow function or function expression."""
172
+ for child in node.children:
173
+ if child.type in ("arrow_function", "function", "function_expression"):
174
+ return True
175
+ if _contains_function_body(child):
176
+ return True
177
+ return False
178
+
179
+
180
+ def _find_first_method_line(class_body: Node) -> int | None:
181
+ """Find line number of first method in class body."""
182
+ for child in class_body.children:
183
+ if child.type in ("method_definition", "function_declaration"):
184
+ return child.start_point[0]
185
+ return None
186
+
187
+
188
+ def _is_in_class_field_area(class_body: Node, ts_start: int, ts_end: int) -> bool:
189
+ """Check if range is in class field definition area (before methods)."""
190
+ first_method_line = _find_first_method_line(class_body)
191
+ class_start = class_body.start_point[0]
192
+ class_end = class_body.end_point[0]
193
+
194
+ if first_method_line is None:
195
+ return class_start <= ts_start and class_end >= ts_end
196
+
197
+ return class_start <= ts_start and ts_end < first_method_line
198
+
199
+
200
+ def _find_interface_ranges(content: str) -> list[tuple[int, int]]:
201
+ """Find line ranges of interface/type definitions."""
202
+ ranges: list[tuple[int, int]] = []
203
+ lines = content.split("\n")
204
+ state = {"in_interface": False, "start_line": 0, "brace_count": 0}
205
+
206
+ for i, line in enumerate(lines, start=1):
207
+ stripped = line.strip()
208
+ _process_line_for_interface(stripped, i, state, ranges)
209
+
210
+ return ranges
211
+
212
+
213
+ def _process_line_for_interface(
214
+ stripped: str, line_num: int, state: dict[str, Any], ranges: list[tuple[int, int]]
215
+ ) -> None:
216
+ """Process single line for interface detection."""
217
+ if _is_interface_start(stripped):
218
+ _handle_interface_start(stripped, line_num, state, ranges)
219
+ return
220
+
221
+ if state["in_interface"]:
222
+ _handle_interface_continuation(stripped, line_num, state, ranges)
223
+
224
+
225
+ def _is_interface_start(stripped: str) -> bool:
226
+ """Check if line starts interface/type definition."""
227
+ return stripped.startswith(("interface ", "type ")) and "{" in stripped
228
+
229
+
230
+ def _handle_interface_start(
231
+ stripped: str, line_num: int, state: dict[str, Any], ranges: list[tuple[int, int]]
232
+ ) -> None:
233
+ """Handle start of interface definition."""
234
+ state["in_interface"] = True
235
+ state["start_line"] = line_num
236
+ state["brace_count"] = stripped.count("{") - stripped.count("}")
237
+
238
+ if state["brace_count"] == 0:
239
+ ranges.append((line_num, line_num))
240
+ state["in_interface"] = False
241
+
242
+
243
+ def _handle_interface_continuation(
244
+ stripped: str, line_num: int, state: dict[str, Any], ranges: list[tuple[int, int]]
245
+ ) -> None:
246
+ """Handle continuation of interface definition."""
247
+ state["brace_count"] += stripped.count("{") - stripped.count("}")
248
+ if state["brace_count"] == 0:
249
+ ranges.append((state["start_line"], line_num))
250
+ state["in_interface"] = False
251
+
252
+
253
+ def _overlaps_interface(start: int, end: int, interface_ranges: list[tuple[int, int]]) -> bool:
254
+ """Check if block overlaps with any interface range."""
255
+ return any(start <= if_end and end >= if_start for if_start, if_end in interface_ranges)
@@ -14,6 +14,9 @@ Exports: TypeScriptValueExtractor class
14
14
  Interfaces: TypeScriptValueExtractor.get_value_string(node, content) -> str | None
15
15
 
16
16
  Implementation: Tree-sitter node traversal with type-specific string formatting
17
+
18
+ Suppressions:
19
+ - type:ignore[assignment,misc]: Tree-sitter Node type alias (optional dependency fallback)
17
20
  """
18
21
 
19
22
  from typing import Any
@@ -54,10 +54,7 @@ class ViolationFilter:
54
54
  Returns:
55
55
  True if violation overlaps with any kept violation
56
56
  """
57
- for kept in kept_violations:
58
- if self._overlaps(violation, kept):
59
- return True
60
- return False
57
+ return any(self._overlaps(violation, kept) for kept in kept_violations)
61
58
 
62
59
  def _overlaps(self, v1: Violation, v2: Violation) -> bool:
63
60
  """Check if two violations overlap.
@@ -128,10 +128,7 @@ class ViolationGenerator:
128
128
  True if file should be ignored
129
129
  """
130
130
  path_str = str(Path(file_path))
131
- for pattern in ignore_patterns:
132
- if pattern in path_str:
133
- return True
134
- return False
131
+ return any(pattern in path_str for pattern in ignore_patterns)
135
132
 
136
133
  def _filter_inline_ignored(
137
134
  self, violations: list[Violation], inline_ignore: InlineIgnoreParser
@@ -15,50 +15,68 @@ Exports: AtemporalDetector class with detect_violations method
15
15
 
16
16
  Interfaces: detect_violations(text) -> list[tuple[str, str, int]] returns pattern matches with line numbers
17
17
 
18
- Implementation: Regex-based pattern matching with predefined patterns organized by category
18
+ Implementation: Regex-based pattern matching with pre-compiled patterns organized by category
19
+
20
+ Suppressions:
21
+ - nesting: detect_violations iterates over pattern categories and their patterns.
22
+ Natural grouping by category requires nested loops.
19
23
  """
20
24
 
21
25
  import re
26
+ from re import Pattern
27
+
28
+
29
+ def _compile_patterns(patterns: list[tuple[str, str]]) -> list[tuple[Pattern[str], str]]:
30
+ """Compile regex patterns for efficient reuse."""
31
+ return [(re.compile(pattern, re.IGNORECASE), desc) for pattern, desc in patterns]
22
32
 
23
33
 
24
34
  class AtemporalDetector:
25
35
  """Detects temporal language patterns in text."""
26
36
 
27
- # Date patterns
28
- DATE_PATTERNS = [
29
- (r"\d{4}-\d{2}-\d{2}", "ISO date format (YYYY-MM-DD)"),
30
- (
31
- r"(?:January|February|March|April|May|June|July|August|September|October|November|December)\s+\d{4}",
32
- "Month Year format",
33
- ),
34
- (r"(?:Created|Updated|Modified):\s*\d{4}", "Date metadata"),
35
- ]
36
-
37
- # Temporal qualifiers
38
- TEMPORAL_QUALIFIERS = [
39
- (r"\bcurrently\b", 'temporal qualifier "currently"'),
40
- (r"\bnow\b", 'temporal qualifier "now"'),
41
- (r"\brecently\b", 'temporal qualifier "recently"'),
42
- (r"\bsoon\b", 'temporal qualifier "soon"'),
43
- (r"\bfor now\b", 'temporal qualifier "for now"'),
44
- ]
45
-
46
- # State change language
47
- STATE_CHANGE = [
48
- (r"\breplaces?\b", 'state change "replaces"'),
49
- (r"\bmigrated from\b", 'state change "migrated from"'),
50
- (r"\bformerly\b", 'state change "formerly"'),
51
- (r"\bold implementation\b", 'state change "old"'),
52
- (r"\bnew implementation\b", 'state change "new"'),
53
- ]
54
-
55
- # Future references
56
- FUTURE_REFS = [
57
- (r"\bwill be\b", 'future reference "will be"'),
58
- (r"\bplanned\b", 'future reference "planned"'),
59
- (r"\bto be added\b", 'future reference "to be added"'),
60
- (r"\bcoming soon\b", 'future reference "coming soon"'),
61
- ]
37
+ # Pre-compiled date patterns
38
+ DATE_PATTERNS = _compile_patterns(
39
+ [
40
+ (r"\d{4}-\d{2}-\d{2}", "ISO date format (YYYY-MM-DD)"),
41
+ (
42
+ r"(?:January|February|March|April|May|June|July|August|September|October|November|December)\s+\d{4}",
43
+ "Month Year format",
44
+ ),
45
+ (r"(?:Created|Updated|Modified):\s*\d{4}", "Date metadata"),
46
+ ]
47
+ )
48
+
49
+ # Pre-compiled temporal qualifiers
50
+ TEMPORAL_QUALIFIERS = _compile_patterns(
51
+ [
52
+ (r"\bcurrently\b", 'temporal qualifier "currently"'),
53
+ (r"\bnow\b", 'temporal qualifier "now"'),
54
+ (r"\brecently\b", 'temporal qualifier "recently"'),
55
+ (r"\bsoon\b", 'temporal qualifier "soon"'),
56
+ (r"\bfor now\b", 'temporal qualifier "for now"'),
57
+ ]
58
+ )
59
+
60
+ # Pre-compiled state change language
61
+ STATE_CHANGE = _compile_patterns(
62
+ [
63
+ (r"\breplaces?\b", 'state change "replaces"'),
64
+ (r"\bmigrated from\b", 'state change "migrated from"'),
65
+ (r"\bformerly\b", 'state change "formerly"'),
66
+ (r"\bold implementation\b", 'state change "old"'),
67
+ (r"\bnew implementation\b", 'state change "new"'),
68
+ ]
69
+ )
70
+
71
+ # Pre-compiled future references
72
+ FUTURE_REFS = _compile_patterns(
73
+ [
74
+ (r"\bwill be\b", 'future reference "will be"'),
75
+ (r"\bplanned\b", 'future reference "planned"'),
76
+ (r"\bto be added\b", 'future reference "to be added"'),
77
+ (r"\bcoming soon\b", 'future reference "coming soon"'),
78
+ ]
79
+ )
62
80
 
63
81
  def detect_violations( # thailint: ignore[nesting]
64
82
  self, text: str
@@ -73,15 +91,15 @@ class AtemporalDetector:
73
91
  """
74
92
  violations = []
75
93
 
76
- # Check all pattern categories
94
+ # Check all pattern categories (patterns are pre-compiled)
77
95
  all_patterns = (
78
96
  self.DATE_PATTERNS + self.TEMPORAL_QUALIFIERS + self.STATE_CHANGE + self.FUTURE_REFS
79
97
  )
80
98
 
81
99
  lines = text.split("\n")
82
100
  for line_num, line in enumerate(lines, start=1):
83
- for pattern, description in all_patterns:
84
- if re.search(pattern, line, re.IGNORECASE):
85
- violations.append((pattern, description, line_num))
101
+ for compiled_pattern, description in all_patterns:
102
+ if compiled_pattern.search(line):
103
+ violations.append((compiled_pattern.pattern, description, line_num))
86
104
 
87
105
  return violations
@@ -16,6 +16,10 @@ Exports: BaseHeaderParser abstract base class
16
16
  Interfaces: extract_header(code) abstract method, parse_fields(header) -> dict[str, str] for field extraction
17
17
 
18
18
  Implementation: Template method pattern with shared field parsing and language-specific extraction
19
+
20
+ Suppressions:
21
+ - nesting: parse_fields uses nested loops for multi-line field processing. Extracting
22
+ would fragment the field-building logic without improving clarity.
19
23
  """
20
24
 
21
25
  import re
@@ -16,6 +16,10 @@ Exports: BashHeaderParser class
16
16
  Interfaces: extract_header(code) -> str | None for comment extraction, parse_fields(header) inherited from base
17
17
 
18
18
  Implementation: Skips shebang and preamble, then extracts contiguous hash comment block
19
+
20
+ Suppressions:
21
+ - nesting: _skip_preamble uses conditional loops for shebang/preamble detection.
22
+ Sequential line processing requires nested state checks.
19
23
  """
20
24
 
21
25
  from src.linters.file_header.base_parser import BaseHeaderParser
@@ -111,6 +111,20 @@ class FileHeaderConfig:
111
111
  defaults = cls()
112
112
  required_fields = config_dict.get("required_fields", {})
113
113
 
114
+ # Handle both list format (applies to all languages) and dict format (language-specific)
115
+ if isinstance(required_fields, list):
116
+ # Simple list format: apply same fields to all languages
117
+ return cls(
118
+ required_fields_python=required_fields,
119
+ required_fields_typescript=required_fields,
120
+ required_fields_bash=required_fields,
121
+ required_fields_markdown=required_fields,
122
+ required_fields_css=required_fields,
123
+ enforce_atemporal=config_dict.get("enforce_atemporal", True),
124
+ ignore=config_dict.get("ignore", defaults.ignore),
125
+ )
126
+
127
+ # Dict format: language-specific fields
114
128
  return cls(
115
129
  required_fields_python=required_fields.get("python", defaults.required_fields_python),
116
130
  required_fields_typescript=required_fields.get(