thailint 0.12.0__py3-none-any.whl → 0.13.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (121) hide show
  1. src/analyzers/__init__.py +4 -3
  2. src/analyzers/ast_utils.py +54 -0
  3. src/analyzers/typescript_base.py +4 -0
  4. src/cli/__init__.py +3 -0
  5. src/cli/config.py +12 -12
  6. src/cli/config_merge.py +241 -0
  7. src/cli/linters/__init__.py +3 -0
  8. src/cli/linters/code_patterns.py +113 -5
  9. src/cli/linters/code_smells.py +4 -0
  10. src/cli/linters/documentation.py +3 -0
  11. src/cli/linters/structure.py +3 -0
  12. src/cli/linters/structure_quality.py +3 -0
  13. src/cli_main.py +3 -0
  14. src/config.py +2 -1
  15. src/core/base.py +3 -2
  16. src/core/cli_utils.py +3 -1
  17. src/core/config_parser.py +5 -2
  18. src/core/constants.py +54 -0
  19. src/core/linter_utils.py +4 -0
  20. src/core/rule_discovery.py +5 -1
  21. src/core/violation_builder.py +3 -0
  22. src/linter_config/directive_markers.py +109 -0
  23. src/linter_config/ignore.py +225 -383
  24. src/linter_config/pattern_utils.py +65 -0
  25. src/linter_config/rule_matcher.py +89 -0
  26. src/linters/collection_pipeline/any_all_analyzer.py +281 -0
  27. src/linters/collection_pipeline/ast_utils.py +40 -0
  28. src/linters/collection_pipeline/config.py +12 -0
  29. src/linters/collection_pipeline/continue_analyzer.py +2 -8
  30. src/linters/collection_pipeline/detector.py +262 -32
  31. src/linters/collection_pipeline/filter_map_analyzer.py +402 -0
  32. src/linters/collection_pipeline/linter.py +18 -35
  33. src/linters/collection_pipeline/suggestion_builder.py +68 -1
  34. src/linters/dry/base_token_analyzer.py +16 -9
  35. src/linters/dry/block_filter.py +7 -4
  36. src/linters/dry/cache.py +7 -2
  37. src/linters/dry/config.py +7 -1
  38. src/linters/dry/constant_matcher.py +34 -25
  39. src/linters/dry/file_analyzer.py +4 -2
  40. src/linters/dry/inline_ignore.py +7 -16
  41. src/linters/dry/linter.py +48 -25
  42. src/linters/dry/python_analyzer.py +18 -10
  43. src/linters/dry/python_constant_extractor.py +51 -52
  44. src/linters/dry/single_statement_detector.py +14 -12
  45. src/linters/dry/token_hasher.py +115 -115
  46. src/linters/dry/typescript_analyzer.py +11 -6
  47. src/linters/dry/typescript_constant_extractor.py +4 -0
  48. src/linters/dry/typescript_statement_detector.py +208 -208
  49. src/linters/dry/typescript_value_extractor.py +3 -0
  50. src/linters/dry/violation_filter.py +1 -4
  51. src/linters/dry/violation_generator.py +1 -4
  52. src/linters/file_header/atemporal_detector.py +4 -0
  53. src/linters/file_header/base_parser.py +4 -0
  54. src/linters/file_header/bash_parser.py +4 -0
  55. src/linters/file_header/field_validator.py +5 -8
  56. src/linters/file_header/linter.py +19 -12
  57. src/linters/file_header/markdown_parser.py +6 -0
  58. src/linters/file_placement/config_loader.py +3 -1
  59. src/linters/file_placement/linter.py +22 -8
  60. src/linters/file_placement/pattern_matcher.py +21 -4
  61. src/linters/file_placement/pattern_validator.py +21 -7
  62. src/linters/file_placement/rule_checker.py +2 -2
  63. src/linters/lazy_ignores/__init__.py +43 -0
  64. src/linters/lazy_ignores/config.py +66 -0
  65. src/linters/lazy_ignores/directive_utils.py +121 -0
  66. src/linters/lazy_ignores/header_parser.py +177 -0
  67. src/linters/lazy_ignores/linter.py +158 -0
  68. src/linters/lazy_ignores/matcher.py +135 -0
  69. src/linters/lazy_ignores/python_analyzer.py +201 -0
  70. src/linters/lazy_ignores/rule_id_utils.py +180 -0
  71. src/linters/lazy_ignores/skip_detector.py +298 -0
  72. src/linters/lazy_ignores/types.py +67 -0
  73. src/linters/lazy_ignores/typescript_analyzer.py +146 -0
  74. src/linters/lazy_ignores/violation_builder.py +131 -0
  75. src/linters/lbyl/__init__.py +29 -0
  76. src/linters/lbyl/config.py +63 -0
  77. src/linters/lbyl/pattern_detectors/__init__.py +25 -0
  78. src/linters/lbyl/pattern_detectors/base.py +46 -0
  79. src/linters/magic_numbers/context_analyzer.py +227 -229
  80. src/linters/magic_numbers/linter.py +20 -15
  81. src/linters/magic_numbers/python_analyzer.py +4 -16
  82. src/linters/magic_numbers/typescript_analyzer.py +9 -16
  83. src/linters/method_property/config.py +4 -0
  84. src/linters/method_property/linter.py +5 -4
  85. src/linters/method_property/python_analyzer.py +5 -4
  86. src/linters/method_property/violation_builder.py +3 -0
  87. src/linters/nesting/typescript_analyzer.py +6 -12
  88. src/linters/nesting/typescript_function_extractor.py +0 -4
  89. src/linters/print_statements/linter.py +6 -4
  90. src/linters/print_statements/python_analyzer.py +85 -81
  91. src/linters/print_statements/typescript_analyzer.py +6 -15
  92. src/linters/srp/heuristics.py +4 -4
  93. src/linters/srp/linter.py +12 -12
  94. src/linters/srp/violation_builder.py +0 -4
  95. src/linters/stateless_class/linter.py +30 -36
  96. src/linters/stateless_class/python_analyzer.py +11 -20
  97. src/linters/stringly_typed/config.py +4 -5
  98. src/linters/stringly_typed/context_filter.py +410 -410
  99. src/linters/stringly_typed/function_call_violation_builder.py +93 -95
  100. src/linters/stringly_typed/linter.py +48 -16
  101. src/linters/stringly_typed/python/analyzer.py +5 -1
  102. src/linters/stringly_typed/python/call_tracker.py +8 -5
  103. src/linters/stringly_typed/python/comparison_tracker.py +10 -5
  104. src/linters/stringly_typed/python/condition_extractor.py +3 -0
  105. src/linters/stringly_typed/python/conditional_detector.py +4 -1
  106. src/linters/stringly_typed/python/match_analyzer.py +8 -2
  107. src/linters/stringly_typed/python/validation_detector.py +3 -0
  108. src/linters/stringly_typed/storage.py +14 -14
  109. src/linters/stringly_typed/typescript/call_tracker.py +9 -3
  110. src/linters/stringly_typed/typescript/comparison_tracker.py +9 -3
  111. src/linters/stringly_typed/violation_generator.py +288 -259
  112. src/orchestrator/core.py +13 -4
  113. src/templates/thailint_config_template.yaml +166 -0
  114. src/utils/project_root.py +3 -0
  115. thailint-0.13.0.dist-info/METADATA +184 -0
  116. thailint-0.13.0.dist-info/RECORD +189 -0
  117. thailint-0.12.0.dist-info/METADATA +0 -1667
  118. thailint-0.12.0.dist-info/RECORD +0 -164
  119. {thailint-0.12.0.dist-info → thailint-0.13.0.dist-info}/WHEEL +0 -0
  120. {thailint-0.12.0.dist-info → thailint-0.13.0.dist-info}/entry_points.txt +0 -0
  121. {thailint-0.12.0.dist-info → thailint-0.13.0.dist-info}/licenses/LICENSE +0 -0
@@ -10,246 +10,246 @@ Overview: Provides sophisticated single-statement pattern detection to filter fa
10
10
 
11
11
  Dependencies: tree-sitter for TypeScript AST parsing
12
12
 
13
- Exports: TypeScriptStatementDetector class
13
+ Exports: is_single_statement, should_include_block functions
14
14
 
15
- Interfaces: TypeScriptStatementDetector.is_single_statement(content, start_line, end_line) -> bool
15
+ Interfaces: is_single_statement(content, start_line, end_line) -> bool,
16
+ should_include_block(content, start_line, end_line) -> bool
16
17
 
17
18
  Implementation: Tree-sitter AST walking with pattern matching for TypeScript constructs
18
19
 
19
- SRP Exception: TypeScriptStatementDetector has 20 methods (exceeds max 8 methods)
20
- Justification: Complex tree-sitter AST analysis algorithm for single-statement pattern detection.
21
- Methods form tightly coupled algorithm pipeline: decorator detection, call expression analysis,
22
- declaration patterns, JSX element handling, class body field definitions, and interface filtering.
23
- Similar to parser or compiler pass architecture where algorithmic cohesion is critical. Splitting
24
- would fragment the algorithm logic and make maintenance harder by separating interdependent
25
- tree-sitter AST analysis steps. All methods contribute to single responsibility: accurately
26
- detecting single-statement patterns to prevent false positives in TypeScript duplicate detection.
20
+ Suppressions:
21
+ - type:ignore[assignment,misc]: Tree-sitter Node type alias (optional dependency fallback)
27
22
  """
28
23
 
29
24
  from collections.abc import Generator
25
+ from typing import Any
30
26
 
31
27
  from src.analyzers.typescript_base import TREE_SITTER_AVAILABLE
32
28
 
33
29
  if TREE_SITTER_AVAILABLE:
34
30
  from tree_sitter import Node
35
31
  else:
36
- Node = None # type: ignore[assignment,misc] # pylint: disable=invalid-name
32
+ Node = Any # type: ignore[assignment,misc]
37
33
 
38
34
 
39
- class TypeScriptStatementDetector: # thailint: ignore[srp.violation]
40
- """Detects single-statement patterns in TypeScript/JavaScript for duplicate filtering.
35
+ def is_single_statement(content: str, start_line: int, end_line: int) -> bool:
36
+ """Check if a line range is a single logical statement.
41
37
 
42
- SRP suppression: Complex tree-sitter AST analysis algorithm requires 20 methods to implement
43
- sophisticated single-statement detection. See file header for justification.
38
+ Args:
39
+ content: TypeScript source code
40
+ start_line: Starting line number (1-indexed)
41
+ end_line: Ending line number (1-indexed)
42
+
43
+ Returns:
44
+ True if this range represents a single logical statement/expression
44
45
  """
46
+ if not TREE_SITTER_AVAILABLE:
47
+ return False
45
48
 
46
- def is_single_statement(self, content: str, start_line: int, end_line: int) -> bool:
47
- """Check if a line range is a single logical statement.
49
+ from src.analyzers.typescript_base import TypeScriptBaseAnalyzer
48
50
 
49
- Args:
50
- content: TypeScript source code
51
- start_line: Starting line number (1-indexed)
52
- end_line: Ending line number (1-indexed)
51
+ analyzer = TypeScriptBaseAnalyzer()
52
+ root = analyzer.parse_typescript(content)
53
+ if not root:
54
+ return False
53
55
 
54
- Returns:
55
- True if this range represents a single logical statement/expression
56
- """
57
- if not TREE_SITTER_AVAILABLE:
58
- return False
56
+ return _check_overlapping_nodes(root, start_line, end_line)
59
57
 
60
- from src.analyzers.typescript_base import TypeScriptBaseAnalyzer
61
58
 
62
- analyzer = TypeScriptBaseAnalyzer()
63
- root = analyzer.parse_typescript(content)
64
- if not root:
65
- return False
59
+ def should_include_block(content: str, start_line: int, end_line: int) -> bool:
60
+ """Check if block should be included (not overlapping interface definitions).
66
61
 
67
- return self._check_overlapping_nodes(root, start_line, end_line)
62
+ Args:
63
+ content: File content
64
+ start_line: Block start line
65
+ end_line: Block end line
68
66
 
69
- def _check_overlapping_nodes(self, root: Node, start_line: int, end_line: int) -> bool:
70
- """Check if any AST node overlaps and matches single-statement pattern."""
71
- ts_start = start_line - 1 # Convert to 0-indexed
72
- ts_end = end_line - 1
67
+ Returns:
68
+ False if block overlaps interface definition, True otherwise
69
+ """
70
+ interface_ranges = _find_interface_ranges(content)
71
+ return not _overlaps_interface(start_line, end_line, interface_ranges)
73
72
 
74
- for node in self._walk_nodes(root):
75
- if self._node_overlaps_and_matches(node, ts_start, ts_end):
76
- return True
77
- return False
78
73
 
79
- def _walk_nodes(self, node: Node) -> Generator[Node, None, None]:
80
- """Generator to walk all nodes in tree."""
81
- yield node
82
- for child in node.children:
83
- yield from self._walk_nodes(child)
84
-
85
- def _node_overlaps_and_matches(self, node: Node, ts_start: int, ts_end: int) -> bool:
86
- """Check if node overlaps with range and matches single-statement pattern."""
87
- node_start = node.start_point[0]
88
- node_end = node.end_point[0]
89
-
90
- overlaps = not (node_end < ts_start or node_start > ts_end)
91
- if not overlaps:
92
- return False
93
-
94
- return self._is_single_statement_pattern(node, ts_start, ts_end)
95
-
96
- def _is_single_statement_pattern(self, node: Node, ts_start: int, ts_end: int) -> bool:
97
- """Check if an AST node represents a single-statement pattern to filter."""
98
- node_start = node.start_point[0]
99
- node_end = node.end_point[0]
100
- contains = (node_start <= ts_start) and (node_end >= ts_end)
101
-
102
- matchers = [
103
- self._matches_simple_container_pattern(node, contains),
104
- self._matches_call_expression_pattern(node, ts_start, ts_end, contains),
105
- self._matches_declaration_pattern(node, contains),
106
- self._matches_jsx_pattern(node, contains),
107
- self._matches_class_body_pattern(node, ts_start, ts_end),
108
- ]
109
- return any(matchers)
110
-
111
- def _matches_simple_container_pattern(self, node: Node, contains: bool) -> bool:
112
- """Check if node is a simple container pattern (decorator, object, etc.)."""
113
- simple_types = (
114
- "decorator",
115
- "object",
116
- "member_expression",
117
- "as_expression",
118
- "array_pattern",
119
- )
120
- return node.type in simple_types and contains
121
-
122
- def _matches_call_expression_pattern(
123
- self, node: Node, ts_start: int, ts_end: int, contains: bool
124
- ) -> bool:
125
- """Check if node is a call expression pattern."""
126
- if node.type != "call_expression":
127
- return False
128
-
129
- node_start = node.start_point[0]
130
- node_end = node.end_point[0]
131
- is_multiline = node_start < node_end
132
- if is_multiline and node_start <= ts_start <= node_end:
133
- return True
74
+ def _check_overlapping_nodes(root: Node, start_line: int, end_line: int) -> bool:
75
+ """Check if any AST node overlaps and matches single-statement pattern."""
76
+ ts_start = start_line - 1 # Convert to 0-indexed
77
+ ts_end = end_line - 1
134
78
 
135
- return contains
79
+ return any(_node_overlaps_and_matches(node, ts_start, ts_end) for node in _walk_nodes(root))
136
80
 
137
- def _matches_declaration_pattern(self, node: Node, contains: bool) -> bool:
138
- """Check if node is a lexical declaration pattern."""
139
- if node.type != "lexical_declaration" or not contains:
140
- return False
141
81
 
142
- if self._contains_function_body(node):
143
- return False
82
+ def _walk_nodes(node: Node) -> Generator[Node, None, None]:
83
+ """Generator to walk all nodes in tree."""
84
+ yield node
85
+ for child in node.children:
86
+ yield from _walk_nodes(child)
87
+
88
+
89
+ def _node_overlaps_and_matches(node: Node, ts_start: int, ts_end: int) -> bool:
90
+ """Check if node overlaps with range and matches single-statement pattern."""
91
+ node_start = node.start_point[0]
92
+ node_end = node.end_point[0]
93
+
94
+ overlaps = not (node_end < ts_start or node_start > ts_end)
95
+ if not overlaps:
96
+ return False
97
+
98
+ return _is_single_statement_pattern(node, ts_start, ts_end)
99
+
100
+
101
+ def _is_single_statement_pattern(node: Node, ts_start: int, ts_end: int) -> bool:
102
+ """Check if an AST node represents a single-statement pattern to filter."""
103
+ node_start = node.start_point[0]
104
+ node_end = node.end_point[0]
105
+ contains = (node_start <= ts_start) and (node_end >= ts_end)
106
+
107
+ matchers = [
108
+ _matches_simple_container_pattern(node, contains),
109
+ _matches_call_expression_pattern(node, ts_start, ts_end, contains),
110
+ _matches_declaration_pattern(node, contains),
111
+ _matches_jsx_pattern(node, contains),
112
+ _matches_class_body_pattern(node, ts_start, ts_end),
113
+ ]
114
+ return any(matchers)
115
+
116
+
117
+ def _matches_simple_container_pattern(node: Node, contains: bool) -> bool:
118
+ """Check if node is a simple container pattern (decorator, object, etc.)."""
119
+ simple_types = (
120
+ "decorator",
121
+ "object",
122
+ "member_expression",
123
+ "as_expression",
124
+ "array_pattern",
125
+ )
126
+ return node.type in simple_types and contains
127
+
128
+
129
+ def _matches_call_expression_pattern(
130
+ node: Node, ts_start: int, ts_end: int, contains: bool
131
+ ) -> bool:
132
+ """Check if node is a call expression pattern."""
133
+ if node.type != "call_expression":
134
+ return False
144
135
 
136
+ node_start = node.start_point[0]
137
+ node_end = node.end_point[0]
138
+ is_multiline = node_start < node_end
139
+ if is_multiline and node_start <= ts_start <= node_end:
145
140
  return True
146
141
 
147
- def _matches_jsx_pattern(self, node: Node, contains: bool) -> bool:
148
- """Check if node is a JSX element pattern."""
149
- jsx_types = ("jsx_opening_element", "jsx_self_closing_element")
150
- return node.type in jsx_types and contains
151
-
152
- def _matches_class_body_pattern(self, node: Node, ts_start: int, ts_end: int) -> bool:
153
- """Check if node is a class body field definition pattern."""
154
- if node.type != "class_body":
155
- return False
156
-
157
- return self._is_in_class_field_area(node, ts_start, ts_end)
158
-
159
- def _contains_function_body(self, node: Node) -> bool:
160
- """Check if node contains an arrow function or function expression."""
161
- for child in node.children:
162
- if child.type in ("arrow_function", "function", "function_expression"):
163
- return True
164
- if self._contains_function_body(child):
165
- return True
142
+ return contains
143
+
144
+
145
+ def _matches_declaration_pattern(node: Node, contains: bool) -> bool:
146
+ """Check if node is a lexical declaration pattern."""
147
+ if node.type != "lexical_declaration" or not contains:
148
+ return False
149
+
150
+ if _contains_function_body(node):
166
151
  return False
167
152
 
168
- def _find_first_method_line(self, class_body: Node) -> int | None:
169
- """Find line number of first method in class body."""
170
- for child in class_body.children:
171
- if child.type in ("method_definition", "function_declaration"):
172
- return child.start_point[0]
173
- return None
174
-
175
- def _is_in_class_field_area(self, class_body: Node, ts_start: int, ts_end: int) -> bool:
176
- """Check if range is in class field definition area (before methods)."""
177
- first_method_line = self._find_first_method_line(class_body)
178
- class_start = class_body.start_point[0]
179
- class_end = class_body.end_point[0]
180
-
181
- if first_method_line is None:
182
- return class_start <= ts_start and class_end >= ts_end
183
-
184
- return class_start <= ts_start and ts_end < first_method_line
185
-
186
- def should_include_block(self, content: str, start_line: int, end_line: int) -> bool:
187
- """Check if block should be included (not overlapping interface definitions).
188
-
189
- Args:
190
- content: File content
191
- start_line: Block start line
192
- end_line: Block end line
193
-
194
- Returns:
195
- False if block overlaps interface definition, True otherwise
196
- """
197
- interface_ranges = self._find_interface_ranges(content)
198
- return not self._overlaps_interface(start_line, end_line, interface_ranges)
199
-
200
- def _find_interface_ranges(self, content: str) -> list[tuple[int, int]]:
201
- """Find line ranges of interface/type definitions."""
202
- ranges: list[tuple[int, int]] = []
203
- lines = content.split("\n")
204
- state = {"in_interface": False, "start_line": 0, "brace_count": 0}
205
-
206
- for i, line in enumerate(lines, start=1):
207
- stripped = line.strip()
208
- self._process_line_for_interface(stripped, i, state, ranges)
209
-
210
- return ranges
211
-
212
- def _process_line_for_interface(
213
- self, stripped: str, line_num: int, state: dict, ranges: list[tuple[int, int]]
214
- ) -> None:
215
- """Process single line for interface detection."""
216
- if self._is_interface_start(stripped):
217
- self._handle_interface_start(stripped, line_num, state, ranges)
218
- return
219
-
220
- if state["in_interface"]:
221
- self._handle_interface_continuation(stripped, line_num, state, ranges)
222
-
223
- def _is_interface_start(self, stripped: str) -> bool:
224
- """Check if line starts interface/type definition."""
225
- return stripped.startswith(("interface ", "type ")) and "{" in stripped
226
-
227
- def _handle_interface_start(
228
- self, stripped: str, line_num: int, state: dict, ranges: list[tuple[int, int]]
229
- ) -> None:
230
- """Handle start of interface definition."""
231
- state["in_interface"] = True
232
- state["start_line"] = line_num
233
- state["brace_count"] = stripped.count("{") - stripped.count("}")
234
-
235
- if state["brace_count"] == 0:
236
- ranges.append((line_num, line_num))
237
- state["in_interface"] = False
238
-
239
- def _handle_interface_continuation(
240
- self, stripped: str, line_num: int, state: dict, ranges: list[tuple[int, int]]
241
- ) -> None:
242
- """Handle continuation of interface definition."""
243
- state["brace_count"] += stripped.count("{") - stripped.count("}")
244
- if state["brace_count"] == 0:
245
- ranges.append((state["start_line"], line_num))
246
- state["in_interface"] = False
247
-
248
- def _overlaps_interface(
249
- self, start: int, end: int, interface_ranges: list[tuple[int, int]]
250
- ) -> bool:
251
- """Check if block overlaps with any interface range."""
252
- for if_start, if_end in interface_ranges:
253
- if start <= if_end and end >= if_start:
254
- return True
153
+ return True
154
+
155
+
156
+ def _matches_jsx_pattern(node: Node, contains: bool) -> bool:
157
+ """Check if node is a JSX element pattern."""
158
+ jsx_types = ("jsx_opening_element", "jsx_self_closing_element")
159
+ return node.type in jsx_types and contains
160
+
161
+
162
+ def _matches_class_body_pattern(node: Node, ts_start: int, ts_end: int) -> bool:
163
+ """Check if node is a class body field definition pattern."""
164
+ if node.type != "class_body":
255
165
  return False
166
+
167
+ return _is_in_class_field_area(node, ts_start, ts_end)
168
+
169
+
170
+ def _contains_function_body(node: Node) -> bool:
171
+ """Check if node contains an arrow function or function expression."""
172
+ for child in node.children:
173
+ if child.type in ("arrow_function", "function", "function_expression"):
174
+ return True
175
+ if _contains_function_body(child):
176
+ return True
177
+ return False
178
+
179
+
180
+ def _find_first_method_line(class_body: Node) -> int | None:
181
+ """Find line number of first method in class body."""
182
+ for child in class_body.children:
183
+ if child.type in ("method_definition", "function_declaration"):
184
+ return child.start_point[0]
185
+ return None
186
+
187
+
188
+ def _is_in_class_field_area(class_body: Node, ts_start: int, ts_end: int) -> bool:
189
+ """Check if range is in class field definition area (before methods)."""
190
+ first_method_line = _find_first_method_line(class_body)
191
+ class_start = class_body.start_point[0]
192
+ class_end = class_body.end_point[0]
193
+
194
+ if first_method_line is None:
195
+ return class_start <= ts_start and class_end >= ts_end
196
+
197
+ return class_start <= ts_start and ts_end < first_method_line
198
+
199
+
200
+ def _find_interface_ranges(content: str) -> list[tuple[int, int]]:
201
+ """Find line ranges of interface/type definitions."""
202
+ ranges: list[tuple[int, int]] = []
203
+ lines = content.split("\n")
204
+ state = {"in_interface": False, "start_line": 0, "brace_count": 0}
205
+
206
+ for i, line in enumerate(lines, start=1):
207
+ stripped = line.strip()
208
+ _process_line_for_interface(stripped, i, state, ranges)
209
+
210
+ return ranges
211
+
212
+
213
+ def _process_line_for_interface(
214
+ stripped: str, line_num: int, state: dict[str, Any], ranges: list[tuple[int, int]]
215
+ ) -> None:
216
+ """Process single line for interface detection."""
217
+ if _is_interface_start(stripped):
218
+ _handle_interface_start(stripped, line_num, state, ranges)
219
+ return
220
+
221
+ if state["in_interface"]:
222
+ _handle_interface_continuation(stripped, line_num, state, ranges)
223
+
224
+
225
+ def _is_interface_start(stripped: str) -> bool:
226
+ """Check if line starts interface/type definition."""
227
+ return stripped.startswith(("interface ", "type ")) and "{" in stripped
228
+
229
+
230
+ def _handle_interface_start(
231
+ stripped: str, line_num: int, state: dict[str, Any], ranges: list[tuple[int, int]]
232
+ ) -> None:
233
+ """Handle start of interface definition."""
234
+ state["in_interface"] = True
235
+ state["start_line"] = line_num
236
+ state["brace_count"] = stripped.count("{") - stripped.count("}")
237
+
238
+ if state["brace_count"] == 0:
239
+ ranges.append((line_num, line_num))
240
+ state["in_interface"] = False
241
+
242
+
243
+ def _handle_interface_continuation(
244
+ stripped: str, line_num: int, state: dict[str, Any], ranges: list[tuple[int, int]]
245
+ ) -> None:
246
+ """Handle continuation of interface definition."""
247
+ state["brace_count"] += stripped.count("{") - stripped.count("}")
248
+ if state["brace_count"] == 0:
249
+ ranges.append((state["start_line"], line_num))
250
+ state["in_interface"] = False
251
+
252
+
253
+ def _overlaps_interface(start: int, end: int, interface_ranges: list[tuple[int, int]]) -> bool:
254
+ """Check if block overlaps with any interface range."""
255
+ return any(start <= if_end and end >= if_start for if_start, if_end in interface_ranges)
@@ -14,6 +14,9 @@ Exports: TypeScriptValueExtractor class
14
14
  Interfaces: TypeScriptValueExtractor.get_value_string(node, content) -> str | None
15
15
 
16
16
  Implementation: Tree-sitter node traversal with type-specific string formatting
17
+
18
+ Suppressions:
19
+ - type:ignore[assignment,misc]: Tree-sitter Node type alias (optional dependency fallback)
17
20
  """
18
21
 
19
22
  from typing import Any
@@ -54,10 +54,7 @@ class ViolationFilter:
54
54
  Returns:
55
55
  True if violation overlaps with any kept violation
56
56
  """
57
- for kept in kept_violations:
58
- if self._overlaps(violation, kept):
59
- return True
60
- return False
57
+ return any(self._overlaps(violation, kept) for kept in kept_violations)
61
58
 
62
59
  def _overlaps(self, v1: Violation, v2: Violation) -> bool:
63
60
  """Check if two violations overlap.
@@ -128,10 +128,7 @@ class ViolationGenerator:
128
128
  True if file should be ignored
129
129
  """
130
130
  path_str = str(Path(file_path))
131
- for pattern in ignore_patterns:
132
- if pattern in path_str:
133
- return True
134
- return False
131
+ return any(pattern in path_str for pattern in ignore_patterns)
135
132
 
136
133
  def _filter_inline_ignored(
137
134
  self, violations: list[Violation], inline_ignore: InlineIgnoreParser
@@ -16,6 +16,10 @@ Exports: AtemporalDetector class with detect_violations method
16
16
  Interfaces: detect_violations(text) -> list[tuple[str, str, int]] returns pattern matches with line numbers
17
17
 
18
18
  Implementation: Regex-based pattern matching with predefined patterns organized by category
19
+
20
+ Suppressions:
21
+ - nesting: detect_violations iterates over pattern categories and their patterns.
22
+ Natural grouping by category requires nested loops.
19
23
  """
20
24
 
21
25
  import re
@@ -16,6 +16,10 @@ Exports: BaseHeaderParser abstract base class
16
16
  Interfaces: extract_header(code) abstract method, parse_fields(header) -> dict[str, str] for field extraction
17
17
 
18
18
  Implementation: Template method pattern with shared field parsing and language-specific extraction
19
+
20
+ Suppressions:
21
+ - nesting: parse_fields uses nested loops for multi-line field processing. Extracting
22
+ would fragment the field-building logic without improving clarity.
19
23
  """
20
24
 
21
25
  import re
@@ -16,6 +16,10 @@ Exports: BashHeaderParser class
16
16
  Interfaces: extract_header(code) -> str | None for comment extraction, parse_fields(header) inherited from base
17
17
 
18
18
  Implementation: Skips shebang and preamble, then extracts contiguous hash comment block
19
+
20
+ Suppressions:
21
+ - nesting: _skip_preamble uses conditional loops for shebang/preamble detection.
22
+ Sequential line processing requires nested state checks.
19
23
  """
20
24
 
21
25
  from src.linters.file_header.base_parser import BaseHeaderParser
@@ -42,15 +42,12 @@ class FieldValidator:
42
42
  Returns:
43
43
  List of (field_name, error_message) tuples for missing/invalid fields
44
44
  """
45
- violations = []
46
45
  required_fields = self._get_required_fields(language)
47
-
48
- for field_name in required_fields:
49
- error = self._check_field(fields, field_name)
50
- if error:
51
- violations.append(error)
52
-
53
- return violations
46
+ return [
47
+ error
48
+ for field_name in required_fields
49
+ if (error := self._check_field(fields, field_name))
50
+ ]
54
51
 
55
52
  def _check_field(self, fields: dict[str, str], field_name: str) -> tuple[str, str] | None:
56
53
  """Check a single field for presence and content."""
@@ -20,15 +20,22 @@ Interfaces: check(context) -> list[Violation] for rule validation, standard rule
20
20
 
21
21
  Implementation: Composition pattern with helper classes for parsing, validation,
22
22
  and violation building
23
+
24
+ Suppressions:
25
+ - type:ignore[type-var]: Protocol pattern with generic type matching
26
+ - srp: Rule class coordinates parsing, validation, and violation building for multiple
27
+ languages. Methods support single responsibility of file header validation.
23
28
  """
24
29
 
25
30
  from pathlib import Path
26
31
  from typing import Protocol
27
32
 
28
33
  from src.core.base import BaseLintContext, BaseLintRule
34
+ from src.core.constants import HEADER_SCAN_LINES, Language
29
35
  from src.core.linter_utils import load_linter_config
30
36
  from src.core.types import Violation
31
- from src.linter_config.ignore import get_ignore_parser
37
+ from src.linter_config.directive_markers import check_general_ignore, has_ignore_directive_marker
38
+ from src.linter_config.ignore import _check_specific_rule_ignore, get_ignore_parser
32
39
 
33
40
  from .atemporal_detector import AtemporalDetector
34
41
  from .bash_parser import BashHeaderParser
@@ -111,7 +118,7 @@ class FileHeaderRule(BaseLintRule): # thailint: ignore[srp]
111
118
  return []
112
119
 
113
120
  # Markdown has special atemporal handling
114
- if context.language == "markdown":
121
+ if context.language == Language.MARKDOWN:
115
122
  return self._check_markdown_header(parser, context, config)
116
123
 
117
124
  return self._check_header_with_parser(parser, context, config)
@@ -158,20 +165,20 @@ class FileHeaderRule(BaseLintRule): # thailint: ignore[srp]
158
165
 
159
166
  return self._has_custom_ignore_syntax(file_content)
160
167
 
161
- def _has_standard_ignore(self, file_content: str) -> bool: # thailint: ignore[nesting]
168
+ def _has_standard_ignore(self, file_content: str) -> bool:
162
169
  """Check standard ignore parser for file-level ignores."""
163
- first_lines = file_content.splitlines()[:10]
164
- for line in first_lines:
165
- if self._ignore_parser._has_ignore_directive_marker(line): # pylint: disable=protected-access
166
- if self._ignore_parser._check_specific_rule_ignore(line, self.rule_id): # pylint: disable=protected-access
167
- return True
168
- if self._ignore_parser._check_general_ignore(line): # pylint: disable=protected-access
169
- return True
170
- return False
170
+ first_lines = file_content.splitlines()[:HEADER_SCAN_LINES]
171
+ return any(self._line_has_matching_ignore(line) for line in first_lines)
172
+
173
+ def _line_has_matching_ignore(self, line: str) -> bool:
174
+ """Check if line has matching ignore directive for this rule."""
175
+ if not has_ignore_directive_marker(line):
176
+ return False
177
+ return _check_specific_rule_ignore(line, self.rule_id) or check_general_ignore(line)
171
178
 
172
179
  def _has_custom_ignore_syntax(self, file_content: str) -> bool:
173
180
  """Check custom file-level ignore syntax."""
174
- first_lines = file_content.splitlines()[:10]
181
+ first_lines = file_content.splitlines()[:HEADER_SCAN_LINES]
175
182
  return any(self._is_ignore_line(line) for line in first_lines)
176
183
 
177
184
  def _is_ignore_line(self, line: str) -> bool:
@@ -17,6 +17,12 @@ Interfaces: extract_header(code) -> str | None for frontmatter extraction,
17
17
  parse_fields(header) -> dict[str, str] for field parsing
18
18
 
19
19
  Implementation: YAML frontmatter extraction with PyYAML parsing and regex fallback for robustness
20
+
21
+ Suppressions:
22
+ - BLE001: Broad exception catch for YAML parsing fallback (any exception triggers regex fallback)
23
+ - srp: Class coordinates YAML extraction, parsing, and field validation for Markdown.
24
+ Method count exceeds limit due to complexity refactoring.
25
+ - nesting,dry: _parse_simple_yaml uses nested loops for YAML structure traversal.
20
26
  """
21
27
 
22
28
  import logging
@@ -23,6 +23,8 @@ from typing import Any
23
23
 
24
24
  import yaml
25
25
 
26
+ from src.core.constants import CONFIG_EXTENSIONS
27
+
26
28
 
27
29
  class ConfigLoader:
28
30
  """Loads configuration files for file placement linter."""
@@ -79,7 +81,7 @@ class ConfigLoader:
79
81
  ValueError: If file format is unsupported
80
82
  """
81
83
  with config_path.open(encoding="utf-8") as f:
82
- if config_path.suffix in [".yaml", ".yml"]:
84
+ if config_path.suffix in CONFIG_EXTENSIONS:
83
85
  return yaml.safe_load(f) or {}
84
86
  if config_path.suffix == ".json":
85
87
  return json.load(f)