thailint 0.12.0__py3-none-any.whl → 0.14.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- src/analyzers/__init__.py +4 -3
- src/analyzers/ast_utils.py +54 -0
- src/analyzers/typescript_base.py +4 -0
- src/cli/__init__.py +3 -0
- src/cli/config.py +12 -12
- src/cli/config_merge.py +241 -0
- src/cli/linters/__init__.py +9 -0
- src/cli/linters/code_patterns.py +107 -257
- src/cli/linters/code_smells.py +48 -165
- src/cli/linters/documentation.py +21 -95
- src/cli/linters/performance.py +274 -0
- src/cli/linters/shared.py +232 -6
- src/cli/linters/structure.py +26 -21
- src/cli/linters/structure_quality.py +28 -21
- src/cli_main.py +3 -0
- src/config.py +2 -1
- src/core/base.py +3 -2
- src/core/cli_utils.py +3 -1
- src/core/config_parser.py +5 -2
- src/core/constants.py +54 -0
- src/core/linter_utils.py +95 -6
- src/core/rule_discovery.py +5 -1
- src/core/violation_builder.py +3 -0
- src/linter_config/directive_markers.py +109 -0
- src/linter_config/ignore.py +225 -383
- src/linter_config/pattern_utils.py +65 -0
- src/linter_config/rule_matcher.py +89 -0
- src/linters/collection_pipeline/any_all_analyzer.py +281 -0
- src/linters/collection_pipeline/ast_utils.py +40 -0
- src/linters/collection_pipeline/config.py +12 -0
- src/linters/collection_pipeline/continue_analyzer.py +2 -8
- src/linters/collection_pipeline/detector.py +262 -32
- src/linters/collection_pipeline/filter_map_analyzer.py +402 -0
- src/linters/collection_pipeline/linter.py +18 -35
- src/linters/collection_pipeline/suggestion_builder.py +68 -1
- src/linters/dry/base_token_analyzer.py +16 -9
- src/linters/dry/block_filter.py +7 -4
- src/linters/dry/cache.py +7 -2
- src/linters/dry/config.py +7 -1
- src/linters/dry/constant_matcher.py +34 -25
- src/linters/dry/file_analyzer.py +4 -2
- src/linters/dry/inline_ignore.py +7 -16
- src/linters/dry/linter.py +48 -25
- src/linters/dry/python_analyzer.py +18 -10
- src/linters/dry/python_constant_extractor.py +51 -52
- src/linters/dry/single_statement_detector.py +14 -12
- src/linters/dry/token_hasher.py +115 -115
- src/linters/dry/typescript_analyzer.py +11 -6
- src/linters/dry/typescript_constant_extractor.py +4 -0
- src/linters/dry/typescript_statement_detector.py +208 -208
- src/linters/dry/typescript_value_extractor.py +3 -0
- src/linters/dry/violation_filter.py +1 -4
- src/linters/dry/violation_generator.py +1 -4
- src/linters/file_header/atemporal_detector.py +58 -40
- src/linters/file_header/base_parser.py +4 -0
- src/linters/file_header/bash_parser.py +4 -0
- src/linters/file_header/config.py +14 -0
- src/linters/file_header/field_validator.py +5 -8
- src/linters/file_header/linter.py +19 -12
- src/linters/file_header/markdown_parser.py +6 -0
- src/linters/file_placement/config_loader.py +3 -1
- src/linters/file_placement/linter.py +22 -8
- src/linters/file_placement/pattern_matcher.py +21 -4
- src/linters/file_placement/pattern_validator.py +21 -7
- src/linters/file_placement/rule_checker.py +2 -2
- src/linters/lazy_ignores/__init__.py +43 -0
- src/linters/lazy_ignores/config.py +66 -0
- src/linters/lazy_ignores/directive_utils.py +121 -0
- src/linters/lazy_ignores/header_parser.py +177 -0
- src/linters/lazy_ignores/linter.py +158 -0
- src/linters/lazy_ignores/matcher.py +135 -0
- src/linters/lazy_ignores/python_analyzer.py +205 -0
- src/linters/lazy_ignores/rule_id_utils.py +180 -0
- src/linters/lazy_ignores/skip_detector.py +298 -0
- src/linters/lazy_ignores/types.py +69 -0
- src/linters/lazy_ignores/typescript_analyzer.py +146 -0
- src/linters/lazy_ignores/violation_builder.py +131 -0
- src/linters/lbyl/__init__.py +29 -0
- src/linters/lbyl/config.py +63 -0
- src/linters/lbyl/pattern_detectors/__init__.py +25 -0
- src/linters/lbyl/pattern_detectors/base.py +46 -0
- src/linters/magic_numbers/context_analyzer.py +227 -229
- src/linters/magic_numbers/linter.py +20 -15
- src/linters/magic_numbers/python_analyzer.py +4 -16
- src/linters/magic_numbers/typescript_analyzer.py +9 -16
- src/linters/method_property/config.py +4 -1
- src/linters/method_property/linter.py +5 -10
- src/linters/method_property/python_analyzer.py +5 -4
- src/linters/method_property/violation_builder.py +3 -0
- src/linters/nesting/linter.py +11 -6
- src/linters/nesting/typescript_analyzer.py +6 -12
- src/linters/nesting/typescript_function_extractor.py +0 -4
- src/linters/nesting/violation_builder.py +1 -0
- src/linters/performance/__init__.py +91 -0
- src/linters/performance/config.py +43 -0
- src/linters/performance/constants.py +49 -0
- src/linters/performance/linter.py +149 -0
- src/linters/performance/python_analyzer.py +365 -0
- src/linters/performance/regex_analyzer.py +312 -0
- src/linters/performance/regex_linter.py +139 -0
- src/linters/performance/typescript_analyzer.py +236 -0
- src/linters/performance/violation_builder.py +160 -0
- src/linters/print_statements/linter.py +6 -4
- src/linters/print_statements/python_analyzer.py +85 -81
- src/linters/print_statements/typescript_analyzer.py +6 -15
- src/linters/srp/heuristics.py +4 -4
- src/linters/srp/linter.py +12 -12
- src/linters/srp/violation_builder.py +0 -4
- src/linters/stateless_class/linter.py +30 -36
- src/linters/stateless_class/python_analyzer.py +11 -20
- src/linters/stringly_typed/config.py +4 -5
- src/linters/stringly_typed/context_filter.py +410 -410
- src/linters/stringly_typed/function_call_violation_builder.py +93 -95
- src/linters/stringly_typed/linter.py +48 -16
- src/linters/stringly_typed/python/analyzer.py +5 -1
- src/linters/stringly_typed/python/call_tracker.py +8 -5
- src/linters/stringly_typed/python/comparison_tracker.py +10 -5
- src/linters/stringly_typed/python/condition_extractor.py +3 -0
- src/linters/stringly_typed/python/conditional_detector.py +4 -1
- src/linters/stringly_typed/python/match_analyzer.py +8 -2
- src/linters/stringly_typed/python/validation_detector.py +3 -0
- src/linters/stringly_typed/storage.py +14 -14
- src/linters/stringly_typed/typescript/call_tracker.py +9 -3
- src/linters/stringly_typed/typescript/comparison_tracker.py +9 -3
- src/linters/stringly_typed/violation_generator.py +288 -259
- src/orchestrator/core.py +13 -4
- src/templates/thailint_config_template.yaml +196 -0
- src/utils/project_root.py +3 -0
- thailint-0.14.0.dist-info/METADATA +185 -0
- thailint-0.14.0.dist-info/RECORD +199 -0
- thailint-0.12.0.dist-info/METADATA +0 -1667
- thailint-0.12.0.dist-info/RECORD +0 -164
- {thailint-0.12.0.dist-info → thailint-0.14.0.dist-info}/WHEEL +0 -0
- {thailint-0.12.0.dist-info → thailint-0.14.0.dist-info}/entry_points.txt +0 -0
- {thailint-0.12.0.dist-info → thailint-0.14.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -10,246 +10,246 @@ Overview: Provides sophisticated single-statement pattern detection to filter fa
|
|
|
10
10
|
|
|
11
11
|
Dependencies: tree-sitter for TypeScript AST parsing
|
|
12
12
|
|
|
13
|
-
Exports:
|
|
13
|
+
Exports: is_single_statement, should_include_block functions
|
|
14
14
|
|
|
15
|
-
Interfaces:
|
|
15
|
+
Interfaces: is_single_statement(content, start_line, end_line) -> bool,
|
|
16
|
+
should_include_block(content, start_line, end_line) -> bool
|
|
16
17
|
|
|
17
18
|
Implementation: Tree-sitter AST walking with pattern matching for TypeScript constructs
|
|
18
19
|
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
Methods form tightly coupled algorithm pipeline: decorator detection, call expression analysis,
|
|
22
|
-
declaration patterns, JSX element handling, class body field definitions, and interface filtering.
|
|
23
|
-
Similar to parser or compiler pass architecture where algorithmic cohesion is critical. Splitting
|
|
24
|
-
would fragment the algorithm logic and make maintenance harder by separating interdependent
|
|
25
|
-
tree-sitter AST analysis steps. All methods contribute to single responsibility: accurately
|
|
26
|
-
detecting single-statement patterns to prevent false positives in TypeScript duplicate detection.
|
|
20
|
+
Suppressions:
|
|
21
|
+
- type:ignore[assignment,misc]: Tree-sitter Node type alias (optional dependency fallback)
|
|
27
22
|
"""
|
|
28
23
|
|
|
29
24
|
from collections.abc import Generator
|
|
25
|
+
from typing import Any
|
|
30
26
|
|
|
31
27
|
from src.analyzers.typescript_base import TREE_SITTER_AVAILABLE
|
|
32
28
|
|
|
33
29
|
if TREE_SITTER_AVAILABLE:
|
|
34
30
|
from tree_sitter import Node
|
|
35
31
|
else:
|
|
36
|
-
Node =
|
|
32
|
+
Node = Any # type: ignore[assignment,misc]
|
|
37
33
|
|
|
38
34
|
|
|
39
|
-
|
|
40
|
-
"""
|
|
35
|
+
def is_single_statement(content: str, start_line: int, end_line: int) -> bool:
|
|
36
|
+
"""Check if a line range is a single logical statement.
|
|
41
37
|
|
|
42
|
-
|
|
43
|
-
|
|
38
|
+
Args:
|
|
39
|
+
content: TypeScript source code
|
|
40
|
+
start_line: Starting line number (1-indexed)
|
|
41
|
+
end_line: Ending line number (1-indexed)
|
|
42
|
+
|
|
43
|
+
Returns:
|
|
44
|
+
True if this range represents a single logical statement/expression
|
|
44
45
|
"""
|
|
46
|
+
if not TREE_SITTER_AVAILABLE:
|
|
47
|
+
return False
|
|
45
48
|
|
|
46
|
-
|
|
47
|
-
"""Check if a line range is a single logical statement.
|
|
49
|
+
from src.analyzers.typescript_base import TypeScriptBaseAnalyzer
|
|
48
50
|
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
51
|
+
analyzer = TypeScriptBaseAnalyzer()
|
|
52
|
+
root = analyzer.parse_typescript(content)
|
|
53
|
+
if not root:
|
|
54
|
+
return False
|
|
53
55
|
|
|
54
|
-
|
|
55
|
-
True if this range represents a single logical statement/expression
|
|
56
|
-
"""
|
|
57
|
-
if not TREE_SITTER_AVAILABLE:
|
|
58
|
-
return False
|
|
56
|
+
return _check_overlapping_nodes(root, start_line, end_line)
|
|
59
57
|
|
|
60
|
-
from src.analyzers.typescript_base import TypeScriptBaseAnalyzer
|
|
61
58
|
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
if not root:
|
|
65
|
-
return False
|
|
59
|
+
def should_include_block(content: str, start_line: int, end_line: int) -> bool:
|
|
60
|
+
"""Check if block should be included (not overlapping interface definitions).
|
|
66
61
|
|
|
67
|
-
|
|
62
|
+
Args:
|
|
63
|
+
content: File content
|
|
64
|
+
start_line: Block start line
|
|
65
|
+
end_line: Block end line
|
|
68
66
|
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
67
|
+
Returns:
|
|
68
|
+
False if block overlaps interface definition, True otherwise
|
|
69
|
+
"""
|
|
70
|
+
interface_ranges = _find_interface_ranges(content)
|
|
71
|
+
return not _overlaps_interface(start_line, end_line, interface_ranges)
|
|
73
72
|
|
|
74
|
-
for node in self._walk_nodes(root):
|
|
75
|
-
if self._node_overlaps_and_matches(node, ts_start, ts_end):
|
|
76
|
-
return True
|
|
77
|
-
return False
|
|
78
73
|
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
yield from self._walk_nodes(child)
|
|
84
|
-
|
|
85
|
-
def _node_overlaps_and_matches(self, node: Node, ts_start: int, ts_end: int) -> bool:
|
|
86
|
-
"""Check if node overlaps with range and matches single-statement pattern."""
|
|
87
|
-
node_start = node.start_point[0]
|
|
88
|
-
node_end = node.end_point[0]
|
|
89
|
-
|
|
90
|
-
overlaps = not (node_end < ts_start or node_start > ts_end)
|
|
91
|
-
if not overlaps:
|
|
92
|
-
return False
|
|
93
|
-
|
|
94
|
-
return self._is_single_statement_pattern(node, ts_start, ts_end)
|
|
95
|
-
|
|
96
|
-
def _is_single_statement_pattern(self, node: Node, ts_start: int, ts_end: int) -> bool:
|
|
97
|
-
"""Check if an AST node represents a single-statement pattern to filter."""
|
|
98
|
-
node_start = node.start_point[0]
|
|
99
|
-
node_end = node.end_point[0]
|
|
100
|
-
contains = (node_start <= ts_start) and (node_end >= ts_end)
|
|
101
|
-
|
|
102
|
-
matchers = [
|
|
103
|
-
self._matches_simple_container_pattern(node, contains),
|
|
104
|
-
self._matches_call_expression_pattern(node, ts_start, ts_end, contains),
|
|
105
|
-
self._matches_declaration_pattern(node, contains),
|
|
106
|
-
self._matches_jsx_pattern(node, contains),
|
|
107
|
-
self._matches_class_body_pattern(node, ts_start, ts_end),
|
|
108
|
-
]
|
|
109
|
-
return any(matchers)
|
|
110
|
-
|
|
111
|
-
def _matches_simple_container_pattern(self, node: Node, contains: bool) -> bool:
|
|
112
|
-
"""Check if node is a simple container pattern (decorator, object, etc.)."""
|
|
113
|
-
simple_types = (
|
|
114
|
-
"decorator",
|
|
115
|
-
"object",
|
|
116
|
-
"member_expression",
|
|
117
|
-
"as_expression",
|
|
118
|
-
"array_pattern",
|
|
119
|
-
)
|
|
120
|
-
return node.type in simple_types and contains
|
|
121
|
-
|
|
122
|
-
def _matches_call_expression_pattern(
|
|
123
|
-
self, node: Node, ts_start: int, ts_end: int, contains: bool
|
|
124
|
-
) -> bool:
|
|
125
|
-
"""Check if node is a call expression pattern."""
|
|
126
|
-
if node.type != "call_expression":
|
|
127
|
-
return False
|
|
128
|
-
|
|
129
|
-
node_start = node.start_point[0]
|
|
130
|
-
node_end = node.end_point[0]
|
|
131
|
-
is_multiline = node_start < node_end
|
|
132
|
-
if is_multiline and node_start <= ts_start <= node_end:
|
|
133
|
-
return True
|
|
74
|
+
def _check_overlapping_nodes(root: Node, start_line: int, end_line: int) -> bool:
|
|
75
|
+
"""Check if any AST node overlaps and matches single-statement pattern."""
|
|
76
|
+
ts_start = start_line - 1 # Convert to 0-indexed
|
|
77
|
+
ts_end = end_line - 1
|
|
134
78
|
|
|
135
|
-
|
|
79
|
+
return any(_node_overlaps_and_matches(node, ts_start, ts_end) for node in _walk_nodes(root))
|
|
136
80
|
|
|
137
|
-
def _matches_declaration_pattern(self, node: Node, contains: bool) -> bool:
|
|
138
|
-
"""Check if node is a lexical declaration pattern."""
|
|
139
|
-
if node.type != "lexical_declaration" or not contains:
|
|
140
|
-
return False
|
|
141
81
|
|
|
142
|
-
|
|
143
|
-
|
|
82
|
+
def _walk_nodes(node: Node) -> Generator[Node, None, None]:
|
|
83
|
+
"""Generator to walk all nodes in tree."""
|
|
84
|
+
yield node
|
|
85
|
+
for child in node.children:
|
|
86
|
+
yield from _walk_nodes(child)
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def _node_overlaps_and_matches(node: Node, ts_start: int, ts_end: int) -> bool:
|
|
90
|
+
"""Check if node overlaps with range and matches single-statement pattern."""
|
|
91
|
+
node_start = node.start_point[0]
|
|
92
|
+
node_end = node.end_point[0]
|
|
93
|
+
|
|
94
|
+
overlaps = not (node_end < ts_start or node_start > ts_end)
|
|
95
|
+
if not overlaps:
|
|
96
|
+
return False
|
|
97
|
+
|
|
98
|
+
return _is_single_statement_pattern(node, ts_start, ts_end)
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
def _is_single_statement_pattern(node: Node, ts_start: int, ts_end: int) -> bool:
|
|
102
|
+
"""Check if an AST node represents a single-statement pattern to filter."""
|
|
103
|
+
node_start = node.start_point[0]
|
|
104
|
+
node_end = node.end_point[0]
|
|
105
|
+
contains = (node_start <= ts_start) and (node_end >= ts_end)
|
|
106
|
+
|
|
107
|
+
matchers = [
|
|
108
|
+
_matches_simple_container_pattern(node, contains),
|
|
109
|
+
_matches_call_expression_pattern(node, ts_start, ts_end, contains),
|
|
110
|
+
_matches_declaration_pattern(node, contains),
|
|
111
|
+
_matches_jsx_pattern(node, contains),
|
|
112
|
+
_matches_class_body_pattern(node, ts_start, ts_end),
|
|
113
|
+
]
|
|
114
|
+
return any(matchers)
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
def _matches_simple_container_pattern(node: Node, contains: bool) -> bool:
|
|
118
|
+
"""Check if node is a simple container pattern (decorator, object, etc.)."""
|
|
119
|
+
simple_types = (
|
|
120
|
+
"decorator",
|
|
121
|
+
"object",
|
|
122
|
+
"member_expression",
|
|
123
|
+
"as_expression",
|
|
124
|
+
"array_pattern",
|
|
125
|
+
)
|
|
126
|
+
return node.type in simple_types and contains
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
def _matches_call_expression_pattern(
|
|
130
|
+
node: Node, ts_start: int, ts_end: int, contains: bool
|
|
131
|
+
) -> bool:
|
|
132
|
+
"""Check if node is a call expression pattern."""
|
|
133
|
+
if node.type != "call_expression":
|
|
134
|
+
return False
|
|
144
135
|
|
|
136
|
+
node_start = node.start_point[0]
|
|
137
|
+
node_end = node.end_point[0]
|
|
138
|
+
is_multiline = node_start < node_end
|
|
139
|
+
if is_multiline and node_start <= ts_start <= node_end:
|
|
145
140
|
return True
|
|
146
141
|
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
return self._is_in_class_field_area(node, ts_start, ts_end)
|
|
158
|
-
|
|
159
|
-
def _contains_function_body(self, node: Node) -> bool:
|
|
160
|
-
"""Check if node contains an arrow function or function expression."""
|
|
161
|
-
for child in node.children:
|
|
162
|
-
if child.type in ("arrow_function", "function", "function_expression"):
|
|
163
|
-
return True
|
|
164
|
-
if self._contains_function_body(child):
|
|
165
|
-
return True
|
|
142
|
+
return contains
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
def _matches_declaration_pattern(node: Node, contains: bool) -> bool:
|
|
146
|
+
"""Check if node is a lexical declaration pattern."""
|
|
147
|
+
if node.type != "lexical_declaration" or not contains:
|
|
148
|
+
return False
|
|
149
|
+
|
|
150
|
+
if _contains_function_body(node):
|
|
166
151
|
return False
|
|
167
152
|
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
if first_method_line is None:
|
|
182
|
-
return class_start <= ts_start and class_end >= ts_end
|
|
183
|
-
|
|
184
|
-
return class_start <= ts_start and ts_end < first_method_line
|
|
185
|
-
|
|
186
|
-
def should_include_block(self, content: str, start_line: int, end_line: int) -> bool:
|
|
187
|
-
"""Check if block should be included (not overlapping interface definitions).
|
|
188
|
-
|
|
189
|
-
Args:
|
|
190
|
-
content: File content
|
|
191
|
-
start_line: Block start line
|
|
192
|
-
end_line: Block end line
|
|
193
|
-
|
|
194
|
-
Returns:
|
|
195
|
-
False if block overlaps interface definition, True otherwise
|
|
196
|
-
"""
|
|
197
|
-
interface_ranges = self._find_interface_ranges(content)
|
|
198
|
-
return not self._overlaps_interface(start_line, end_line, interface_ranges)
|
|
199
|
-
|
|
200
|
-
def _find_interface_ranges(self, content: str) -> list[tuple[int, int]]:
|
|
201
|
-
"""Find line ranges of interface/type definitions."""
|
|
202
|
-
ranges: list[tuple[int, int]] = []
|
|
203
|
-
lines = content.split("\n")
|
|
204
|
-
state = {"in_interface": False, "start_line": 0, "brace_count": 0}
|
|
205
|
-
|
|
206
|
-
for i, line in enumerate(lines, start=1):
|
|
207
|
-
stripped = line.strip()
|
|
208
|
-
self._process_line_for_interface(stripped, i, state, ranges)
|
|
209
|
-
|
|
210
|
-
return ranges
|
|
211
|
-
|
|
212
|
-
def _process_line_for_interface(
|
|
213
|
-
self, stripped: str, line_num: int, state: dict, ranges: list[tuple[int, int]]
|
|
214
|
-
) -> None:
|
|
215
|
-
"""Process single line for interface detection."""
|
|
216
|
-
if self._is_interface_start(stripped):
|
|
217
|
-
self._handle_interface_start(stripped, line_num, state, ranges)
|
|
218
|
-
return
|
|
219
|
-
|
|
220
|
-
if state["in_interface"]:
|
|
221
|
-
self._handle_interface_continuation(stripped, line_num, state, ranges)
|
|
222
|
-
|
|
223
|
-
def _is_interface_start(self, stripped: str) -> bool:
|
|
224
|
-
"""Check if line starts interface/type definition."""
|
|
225
|
-
return stripped.startswith(("interface ", "type ")) and "{" in stripped
|
|
226
|
-
|
|
227
|
-
def _handle_interface_start(
|
|
228
|
-
self, stripped: str, line_num: int, state: dict, ranges: list[tuple[int, int]]
|
|
229
|
-
) -> None:
|
|
230
|
-
"""Handle start of interface definition."""
|
|
231
|
-
state["in_interface"] = True
|
|
232
|
-
state["start_line"] = line_num
|
|
233
|
-
state["brace_count"] = stripped.count("{") - stripped.count("}")
|
|
234
|
-
|
|
235
|
-
if state["brace_count"] == 0:
|
|
236
|
-
ranges.append((line_num, line_num))
|
|
237
|
-
state["in_interface"] = False
|
|
238
|
-
|
|
239
|
-
def _handle_interface_continuation(
|
|
240
|
-
self, stripped: str, line_num: int, state: dict, ranges: list[tuple[int, int]]
|
|
241
|
-
) -> None:
|
|
242
|
-
"""Handle continuation of interface definition."""
|
|
243
|
-
state["brace_count"] += stripped.count("{") - stripped.count("}")
|
|
244
|
-
if state["brace_count"] == 0:
|
|
245
|
-
ranges.append((state["start_line"], line_num))
|
|
246
|
-
state["in_interface"] = False
|
|
247
|
-
|
|
248
|
-
def _overlaps_interface(
|
|
249
|
-
self, start: int, end: int, interface_ranges: list[tuple[int, int]]
|
|
250
|
-
) -> bool:
|
|
251
|
-
"""Check if block overlaps with any interface range."""
|
|
252
|
-
for if_start, if_end in interface_ranges:
|
|
253
|
-
if start <= if_end and end >= if_start:
|
|
254
|
-
return True
|
|
153
|
+
return True
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
def _matches_jsx_pattern(node: Node, contains: bool) -> bool:
|
|
157
|
+
"""Check if node is a JSX element pattern."""
|
|
158
|
+
jsx_types = ("jsx_opening_element", "jsx_self_closing_element")
|
|
159
|
+
return node.type in jsx_types and contains
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
def _matches_class_body_pattern(node: Node, ts_start: int, ts_end: int) -> bool:
|
|
163
|
+
"""Check if node is a class body field definition pattern."""
|
|
164
|
+
if node.type != "class_body":
|
|
255
165
|
return False
|
|
166
|
+
|
|
167
|
+
return _is_in_class_field_area(node, ts_start, ts_end)
|
|
168
|
+
|
|
169
|
+
|
|
170
|
+
def _contains_function_body(node: Node) -> bool:
|
|
171
|
+
"""Check if node contains an arrow function or function expression."""
|
|
172
|
+
for child in node.children:
|
|
173
|
+
if child.type in ("arrow_function", "function", "function_expression"):
|
|
174
|
+
return True
|
|
175
|
+
if _contains_function_body(child):
|
|
176
|
+
return True
|
|
177
|
+
return False
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
def _find_first_method_line(class_body: Node) -> int | None:
|
|
181
|
+
"""Find line number of first method in class body."""
|
|
182
|
+
for child in class_body.children:
|
|
183
|
+
if child.type in ("method_definition", "function_declaration"):
|
|
184
|
+
return child.start_point[0]
|
|
185
|
+
return None
|
|
186
|
+
|
|
187
|
+
|
|
188
|
+
def _is_in_class_field_area(class_body: Node, ts_start: int, ts_end: int) -> bool:
|
|
189
|
+
"""Check if range is in class field definition area (before methods)."""
|
|
190
|
+
first_method_line = _find_first_method_line(class_body)
|
|
191
|
+
class_start = class_body.start_point[0]
|
|
192
|
+
class_end = class_body.end_point[0]
|
|
193
|
+
|
|
194
|
+
if first_method_line is None:
|
|
195
|
+
return class_start <= ts_start and class_end >= ts_end
|
|
196
|
+
|
|
197
|
+
return class_start <= ts_start and ts_end < first_method_line
|
|
198
|
+
|
|
199
|
+
|
|
200
|
+
def _find_interface_ranges(content: str) -> list[tuple[int, int]]:
|
|
201
|
+
"""Find line ranges of interface/type definitions."""
|
|
202
|
+
ranges: list[tuple[int, int]] = []
|
|
203
|
+
lines = content.split("\n")
|
|
204
|
+
state = {"in_interface": False, "start_line": 0, "brace_count": 0}
|
|
205
|
+
|
|
206
|
+
for i, line in enumerate(lines, start=1):
|
|
207
|
+
stripped = line.strip()
|
|
208
|
+
_process_line_for_interface(stripped, i, state, ranges)
|
|
209
|
+
|
|
210
|
+
return ranges
|
|
211
|
+
|
|
212
|
+
|
|
213
|
+
def _process_line_for_interface(
|
|
214
|
+
stripped: str, line_num: int, state: dict[str, Any], ranges: list[tuple[int, int]]
|
|
215
|
+
) -> None:
|
|
216
|
+
"""Process single line for interface detection."""
|
|
217
|
+
if _is_interface_start(stripped):
|
|
218
|
+
_handle_interface_start(stripped, line_num, state, ranges)
|
|
219
|
+
return
|
|
220
|
+
|
|
221
|
+
if state["in_interface"]:
|
|
222
|
+
_handle_interface_continuation(stripped, line_num, state, ranges)
|
|
223
|
+
|
|
224
|
+
|
|
225
|
+
def _is_interface_start(stripped: str) -> bool:
|
|
226
|
+
"""Check if line starts interface/type definition."""
|
|
227
|
+
return stripped.startswith(("interface ", "type ")) and "{" in stripped
|
|
228
|
+
|
|
229
|
+
|
|
230
|
+
def _handle_interface_start(
|
|
231
|
+
stripped: str, line_num: int, state: dict[str, Any], ranges: list[tuple[int, int]]
|
|
232
|
+
) -> None:
|
|
233
|
+
"""Handle start of interface definition."""
|
|
234
|
+
state["in_interface"] = True
|
|
235
|
+
state["start_line"] = line_num
|
|
236
|
+
state["brace_count"] = stripped.count("{") - stripped.count("}")
|
|
237
|
+
|
|
238
|
+
if state["brace_count"] == 0:
|
|
239
|
+
ranges.append((line_num, line_num))
|
|
240
|
+
state["in_interface"] = False
|
|
241
|
+
|
|
242
|
+
|
|
243
|
+
def _handle_interface_continuation(
|
|
244
|
+
stripped: str, line_num: int, state: dict[str, Any], ranges: list[tuple[int, int]]
|
|
245
|
+
) -> None:
|
|
246
|
+
"""Handle continuation of interface definition."""
|
|
247
|
+
state["brace_count"] += stripped.count("{") - stripped.count("}")
|
|
248
|
+
if state["brace_count"] == 0:
|
|
249
|
+
ranges.append((state["start_line"], line_num))
|
|
250
|
+
state["in_interface"] = False
|
|
251
|
+
|
|
252
|
+
|
|
253
|
+
def _overlaps_interface(start: int, end: int, interface_ranges: list[tuple[int, int]]) -> bool:
|
|
254
|
+
"""Check if block overlaps with any interface range."""
|
|
255
|
+
return any(start <= if_end and end >= if_start for if_start, if_end in interface_ranges)
|
|
@@ -14,6 +14,9 @@ Exports: TypeScriptValueExtractor class
|
|
|
14
14
|
Interfaces: TypeScriptValueExtractor.get_value_string(node, content) -> str | None
|
|
15
15
|
|
|
16
16
|
Implementation: Tree-sitter node traversal with type-specific string formatting
|
|
17
|
+
|
|
18
|
+
Suppressions:
|
|
19
|
+
- type:ignore[assignment,misc]: Tree-sitter Node type alias (optional dependency fallback)
|
|
17
20
|
"""
|
|
18
21
|
|
|
19
22
|
from typing import Any
|
|
@@ -54,10 +54,7 @@ class ViolationFilter:
|
|
|
54
54
|
Returns:
|
|
55
55
|
True if violation overlaps with any kept violation
|
|
56
56
|
"""
|
|
57
|
-
for kept in kept_violations
|
|
58
|
-
if self._overlaps(violation, kept):
|
|
59
|
-
return True
|
|
60
|
-
return False
|
|
57
|
+
return any(self._overlaps(violation, kept) for kept in kept_violations)
|
|
61
58
|
|
|
62
59
|
def _overlaps(self, v1: Violation, v2: Violation) -> bool:
|
|
63
60
|
"""Check if two violations overlap.
|
|
@@ -128,10 +128,7 @@ class ViolationGenerator:
|
|
|
128
128
|
True if file should be ignored
|
|
129
129
|
"""
|
|
130
130
|
path_str = str(Path(file_path))
|
|
131
|
-
for pattern in ignore_patterns
|
|
132
|
-
if pattern in path_str:
|
|
133
|
-
return True
|
|
134
|
-
return False
|
|
131
|
+
return any(pattern in path_str for pattern in ignore_patterns)
|
|
135
132
|
|
|
136
133
|
def _filter_inline_ignored(
|
|
137
134
|
self, violations: list[Violation], inline_ignore: InlineIgnoreParser
|
|
@@ -15,50 +15,68 @@ Exports: AtemporalDetector class with detect_violations method
|
|
|
15
15
|
|
|
16
16
|
Interfaces: detect_violations(text) -> list[tuple[str, str, int]] returns pattern matches with line numbers
|
|
17
17
|
|
|
18
|
-
Implementation: Regex-based pattern matching with
|
|
18
|
+
Implementation: Regex-based pattern matching with pre-compiled patterns organized by category
|
|
19
|
+
|
|
20
|
+
Suppressions:
|
|
21
|
+
- nesting: detect_violations iterates over pattern categories and their patterns.
|
|
22
|
+
Natural grouping by category requires nested loops.
|
|
19
23
|
"""
|
|
20
24
|
|
|
21
25
|
import re
|
|
26
|
+
from re import Pattern
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def _compile_patterns(patterns: list[tuple[str, str]]) -> list[tuple[Pattern[str], str]]:
|
|
30
|
+
"""Compile regex patterns for efficient reuse."""
|
|
31
|
+
return [(re.compile(pattern, re.IGNORECASE), desc) for pattern, desc in patterns]
|
|
22
32
|
|
|
23
33
|
|
|
24
34
|
class AtemporalDetector:
|
|
25
35
|
"""Detects temporal language patterns in text."""
|
|
26
36
|
|
|
27
|
-
#
|
|
28
|
-
DATE_PATTERNS =
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
37
|
+
# Pre-compiled date patterns
|
|
38
|
+
DATE_PATTERNS = _compile_patterns(
|
|
39
|
+
[
|
|
40
|
+
(r"\d{4}-\d{2}-\d{2}", "ISO date format (YYYY-MM-DD)"),
|
|
41
|
+
(
|
|
42
|
+
r"(?:January|February|March|April|May|June|July|August|September|October|November|December)\s+\d{4}",
|
|
43
|
+
"Month Year format",
|
|
44
|
+
),
|
|
45
|
+
(r"(?:Created|Updated|Modified):\s*\d{4}", "Date metadata"),
|
|
46
|
+
]
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
# Pre-compiled temporal qualifiers
|
|
50
|
+
TEMPORAL_QUALIFIERS = _compile_patterns(
|
|
51
|
+
[
|
|
52
|
+
(r"\bcurrently\b", 'temporal qualifier "currently"'),
|
|
53
|
+
(r"\bnow\b", 'temporal qualifier "now"'),
|
|
54
|
+
(r"\brecently\b", 'temporal qualifier "recently"'),
|
|
55
|
+
(r"\bsoon\b", 'temporal qualifier "soon"'),
|
|
56
|
+
(r"\bfor now\b", 'temporal qualifier "for now"'),
|
|
57
|
+
]
|
|
58
|
+
)
|
|
59
|
+
|
|
60
|
+
# Pre-compiled state change language
|
|
61
|
+
STATE_CHANGE = _compile_patterns(
|
|
62
|
+
[
|
|
63
|
+
(r"\breplaces?\b", 'state change "replaces"'),
|
|
64
|
+
(r"\bmigrated from\b", 'state change "migrated from"'),
|
|
65
|
+
(r"\bformerly\b", 'state change "formerly"'),
|
|
66
|
+
(r"\bold implementation\b", 'state change "old"'),
|
|
67
|
+
(r"\bnew implementation\b", 'state change "new"'),
|
|
68
|
+
]
|
|
69
|
+
)
|
|
70
|
+
|
|
71
|
+
# Pre-compiled future references
|
|
72
|
+
FUTURE_REFS = _compile_patterns(
|
|
73
|
+
[
|
|
74
|
+
(r"\bwill be\b", 'future reference "will be"'),
|
|
75
|
+
(r"\bplanned\b", 'future reference "planned"'),
|
|
76
|
+
(r"\bto be added\b", 'future reference "to be added"'),
|
|
77
|
+
(r"\bcoming soon\b", 'future reference "coming soon"'),
|
|
78
|
+
]
|
|
79
|
+
)
|
|
62
80
|
|
|
63
81
|
def detect_violations( # thailint: ignore[nesting]
|
|
64
82
|
self, text: str
|
|
@@ -73,15 +91,15 @@ class AtemporalDetector:
|
|
|
73
91
|
"""
|
|
74
92
|
violations = []
|
|
75
93
|
|
|
76
|
-
# Check all pattern categories
|
|
94
|
+
# Check all pattern categories (patterns are pre-compiled)
|
|
77
95
|
all_patterns = (
|
|
78
96
|
self.DATE_PATTERNS + self.TEMPORAL_QUALIFIERS + self.STATE_CHANGE + self.FUTURE_REFS
|
|
79
97
|
)
|
|
80
98
|
|
|
81
99
|
lines = text.split("\n")
|
|
82
100
|
for line_num, line in enumerate(lines, start=1):
|
|
83
|
-
for
|
|
84
|
-
if
|
|
85
|
-
violations.append((pattern, description, line_num))
|
|
101
|
+
for compiled_pattern, description in all_patterns:
|
|
102
|
+
if compiled_pattern.search(line):
|
|
103
|
+
violations.append((compiled_pattern.pattern, description, line_num))
|
|
86
104
|
|
|
87
105
|
return violations
|
|
@@ -16,6 +16,10 @@ Exports: BaseHeaderParser abstract base class
|
|
|
16
16
|
Interfaces: extract_header(code) abstract method, parse_fields(header) -> dict[str, str] for field extraction
|
|
17
17
|
|
|
18
18
|
Implementation: Template method pattern with shared field parsing and language-specific extraction
|
|
19
|
+
|
|
20
|
+
Suppressions:
|
|
21
|
+
- nesting: parse_fields uses nested loops for multi-line field processing. Extracting
|
|
22
|
+
would fragment the field-building logic without improving clarity.
|
|
19
23
|
"""
|
|
20
24
|
|
|
21
25
|
import re
|
|
@@ -16,6 +16,10 @@ Exports: BashHeaderParser class
|
|
|
16
16
|
Interfaces: extract_header(code) -> str | None for comment extraction, parse_fields(header) inherited from base
|
|
17
17
|
|
|
18
18
|
Implementation: Skips shebang and preamble, then extracts contiguous hash comment block
|
|
19
|
+
|
|
20
|
+
Suppressions:
|
|
21
|
+
- nesting: _skip_preamble uses conditional loops for shebang/preamble detection.
|
|
22
|
+
Sequential line processing requires nested state checks.
|
|
19
23
|
"""
|
|
20
24
|
|
|
21
25
|
from src.linters.file_header.base_parser import BaseHeaderParser
|
|
@@ -111,6 +111,20 @@ class FileHeaderConfig:
|
|
|
111
111
|
defaults = cls()
|
|
112
112
|
required_fields = config_dict.get("required_fields", {})
|
|
113
113
|
|
|
114
|
+
# Handle both list format (applies to all languages) and dict format (language-specific)
|
|
115
|
+
if isinstance(required_fields, list):
|
|
116
|
+
# Simple list format: apply same fields to all languages
|
|
117
|
+
return cls(
|
|
118
|
+
required_fields_python=required_fields,
|
|
119
|
+
required_fields_typescript=required_fields,
|
|
120
|
+
required_fields_bash=required_fields,
|
|
121
|
+
required_fields_markdown=required_fields,
|
|
122
|
+
required_fields_css=required_fields,
|
|
123
|
+
enforce_atemporal=config_dict.get("enforce_atemporal", True),
|
|
124
|
+
ignore=config_dict.get("ignore", defaults.ignore),
|
|
125
|
+
)
|
|
126
|
+
|
|
127
|
+
# Dict format: language-specific fields
|
|
114
128
|
return cls(
|
|
115
129
|
required_fields_python=required_fields.get("python", defaults.required_fields_python),
|
|
116
130
|
required_fields_typescript=required_fields.get(
|