thailint 0.4.4__py3-none-any.whl → 0.4.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- src/linters/dry/python_analyzer.py +148 -39
- src/linters/dry/token_hasher.py +63 -9
- src/linters/dry/typescript_analyzer.py +7 -5
- src/linters/file_header/__init__.py +24 -0
- src/linters/file_header/atemporal_detector.py +87 -0
- src/linters/file_header/config.py +66 -0
- src/linters/file_header/field_validator.py +69 -0
- src/linters/file_header/linter.py +313 -0
- src/linters/file_header/python_parser.py +86 -0
- src/linters/file_header/violation_builder.py +78 -0
- {thailint-0.4.4.dist-info → thailint-0.4.6.dist-info}/METADATA +31 -30
- {thailint-0.4.4.dist-info → thailint-0.4.6.dist-info}/RECORD +15 -8
- {thailint-0.4.4.dist-info → thailint-0.4.6.dist-info}/WHEEL +0 -0
- {thailint-0.4.4.dist-info → thailint-0.4.6.dist-info}/entry_points.txt +0 -0
- {thailint-0.4.4.dist-info → thailint-0.4.6.dist-info}/licenses/LICENSE +0 -0
|
@@ -65,8 +65,12 @@ class PythonDuplicateAnalyzer(BaseTokenAnalyzer): # thailint: ignore[srp.violat
|
|
|
65
65
|
# Performance optimization: Cache parsed AST to avoid re-parsing for each hash window
|
|
66
66
|
self._cached_ast: ast.Module | None = None
|
|
67
67
|
self._cached_content: str | None = None
|
|
68
|
+
# Performance optimization: Line-to-node index for O(1) lookups instead of O(n) ast.walk()
|
|
69
|
+
self._line_to_nodes: dict[int, list[ast.AST]] | None = None
|
|
68
70
|
|
|
69
|
-
def analyze(
|
|
71
|
+
def analyze( # thailint: ignore[nesting.excessive-depth]
|
|
72
|
+
self, file_path: Path, content: str, config: DRYConfig
|
|
73
|
+
) -> list[CodeBlock]:
|
|
70
74
|
"""Analyze Python file for duplicate code blocks, excluding docstrings.
|
|
71
75
|
|
|
72
76
|
Args:
|
|
@@ -81,6 +85,9 @@ class PythonDuplicateAnalyzer(BaseTokenAnalyzer): # thailint: ignore[srp.violat
|
|
|
81
85
|
self._cached_ast = self._parse_content_safe(content)
|
|
82
86
|
self._cached_content = content
|
|
83
87
|
|
|
88
|
+
# Performance optimization: Build line-to-node index for O(1) lookups
|
|
89
|
+
self._line_to_nodes = self._build_line_to_node_index(self._cached_ast)
|
|
90
|
+
|
|
84
91
|
try:
|
|
85
92
|
# Get docstring line ranges
|
|
86
93
|
docstring_ranges = self._get_docstring_ranges_from_content(content)
|
|
@@ -89,34 +96,58 @@ class PythonDuplicateAnalyzer(BaseTokenAnalyzer): # thailint: ignore[srp.violat
|
|
|
89
96
|
lines_with_numbers = self._tokenize_with_line_numbers(content, docstring_ranges)
|
|
90
97
|
|
|
91
98
|
# Generate rolling hash windows
|
|
92
|
-
windows = self._rolling_hash_with_tracking(
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
for hash_val, start_line, end_line, snippet in windows:
|
|
96
|
-
# Skip blocks that are single logical statements
|
|
97
|
-
# Check the original source code, not the normalized snippet
|
|
98
|
-
if self._is_single_statement_in_source(content, start_line, end_line):
|
|
99
|
-
continue
|
|
100
|
-
|
|
101
|
-
block = CodeBlock(
|
|
102
|
-
file_path=file_path,
|
|
103
|
-
start_line=start_line,
|
|
104
|
-
end_line=end_line,
|
|
105
|
-
snippet=snippet,
|
|
106
|
-
hash_value=hash_val,
|
|
107
|
-
)
|
|
108
|
-
|
|
109
|
-
# Apply extensible filters (keyword arguments, imports, etc.)
|
|
110
|
-
if self._filter_registry.should_filter_block(block, content):
|
|
111
|
-
continue
|
|
112
|
-
|
|
113
|
-
blocks.append(block)
|
|
99
|
+
windows = self._rolling_hash_with_tracking(
|
|
100
|
+
lines_with_numbers, config.min_duplicate_lines
|
|
101
|
+
)
|
|
114
102
|
|
|
115
|
-
return
|
|
103
|
+
return self._filter_valid_blocks(windows, file_path, content)
|
|
116
104
|
finally:
|
|
117
105
|
# Clear cache after analysis to avoid memory leaks
|
|
118
106
|
self._cached_ast = None
|
|
119
107
|
self._cached_content = None
|
|
108
|
+
self._line_to_nodes = None
|
|
109
|
+
|
|
110
|
+
def _filter_valid_blocks(
|
|
111
|
+
self,
|
|
112
|
+
windows: list[tuple[int, int, int, str]],
|
|
113
|
+
file_path: Path,
|
|
114
|
+
content: str,
|
|
115
|
+
) -> list[CodeBlock]:
|
|
116
|
+
"""Filter hash windows and create valid CodeBlock instances."""
|
|
117
|
+
blocks = []
|
|
118
|
+
for hash_val, start_line, end_line, snippet in windows:
|
|
119
|
+
block = self._create_block_if_valid(
|
|
120
|
+
file_path, content, hash_val, start_line, end_line, snippet
|
|
121
|
+
)
|
|
122
|
+
if block:
|
|
123
|
+
blocks.append(block)
|
|
124
|
+
return blocks
|
|
125
|
+
|
|
126
|
+
def _create_block_if_valid( # pylint: disable=too-many-arguments,too-many-positional-arguments
|
|
127
|
+
self,
|
|
128
|
+
file_path: Path,
|
|
129
|
+
content: str,
|
|
130
|
+
hash_val: int,
|
|
131
|
+
start_line: int,
|
|
132
|
+
end_line: int,
|
|
133
|
+
snippet: str,
|
|
134
|
+
) -> CodeBlock | None:
|
|
135
|
+
"""Create CodeBlock if it passes all validation checks."""
|
|
136
|
+
if self._is_single_statement_in_source(content, start_line, end_line):
|
|
137
|
+
return None
|
|
138
|
+
|
|
139
|
+
block = CodeBlock(
|
|
140
|
+
file_path=file_path,
|
|
141
|
+
start_line=start_line,
|
|
142
|
+
end_line=end_line,
|
|
143
|
+
snippet=snippet,
|
|
144
|
+
hash_value=hash_val,
|
|
145
|
+
)
|
|
146
|
+
|
|
147
|
+
if self._filter_registry.should_filter_block(block, content):
|
|
148
|
+
return None
|
|
149
|
+
|
|
150
|
+
return block
|
|
120
151
|
|
|
121
152
|
def _get_docstring_ranges_from_content(self, content: str) -> set[int]:
|
|
122
153
|
"""Extract line numbers that are part of docstrings.
|
|
@@ -184,20 +215,21 @@ class PythonDuplicateAnalyzer(BaseTokenAnalyzer): # thailint: ignore[srp.violat
|
|
|
184
215
|
List of (original_line_number, normalized_code) tuples
|
|
185
216
|
"""
|
|
186
217
|
lines_with_numbers = []
|
|
218
|
+
in_multiline_import = False
|
|
187
219
|
|
|
188
220
|
for line_num, line in enumerate(content.split("\n"), start=1):
|
|
189
|
-
# Skip docstring lines
|
|
190
221
|
if line_num in docstring_lines:
|
|
191
222
|
continue
|
|
192
223
|
|
|
193
|
-
|
|
194
|
-
line = self._hasher._strip_comments(line) # pylint: disable=protected-access
|
|
195
|
-
line = " ".join(line.split())
|
|
196
|
-
|
|
224
|
+
line = self._hasher._normalize_line(line) # pylint: disable=protected-access
|
|
197
225
|
if not line:
|
|
198
226
|
continue
|
|
199
227
|
|
|
200
|
-
if
|
|
228
|
+
# Update multi-line import state and check if line should be skipped
|
|
229
|
+
in_multiline_import, should_skip = self._hasher._should_skip_import_line( # pylint: disable=protected-access
|
|
230
|
+
line, in_multiline_import
|
|
231
|
+
)
|
|
232
|
+
if should_skip:
|
|
201
233
|
continue
|
|
202
234
|
|
|
203
235
|
lines_with_numbers.append((line_num, line))
|
|
@@ -243,6 +275,7 @@ class PythonDuplicateAnalyzer(BaseTokenAnalyzer): # thailint: ignore[srp.violat
|
|
|
243
275
|
to avoid re-parsing the entire file for each hash window check.
|
|
244
276
|
"""
|
|
245
277
|
# Use cached AST if available and content matches
|
|
278
|
+
tree: ast.Module | None
|
|
246
279
|
if self._cached_ast is not None and content == self._cached_content:
|
|
247
280
|
tree = self._cached_ast
|
|
248
281
|
else:
|
|
@@ -261,23 +294,99 @@ class PythonDuplicateAnalyzer(BaseTokenAnalyzer): # thailint: ignore[srp.violat
|
|
|
261
294
|
except SyntaxError:
|
|
262
295
|
return None
|
|
263
296
|
|
|
264
|
-
|
|
265
|
-
|
|
297
|
+
@staticmethod
|
|
298
|
+
def _build_line_to_node_index(tree: ast.Module | None) -> dict[int, list[ast.AST]] | None:
|
|
299
|
+
"""Build an index mapping each line number to overlapping AST nodes.
|
|
300
|
+
|
|
301
|
+
Performance optimization: This allows O(1) lookups instead of O(n) ast.walk() calls.
|
|
302
|
+
For a file with 5,144 nodes and 673 hash windows, this reduces 3.46M node operations
|
|
303
|
+
to just ~3,365 relevant node checks (99.9% reduction).
|
|
266
304
|
|
|
267
|
-
|
|
305
|
+
Args:
|
|
306
|
+
tree: Parsed AST tree (None if parsing failed)
|
|
307
|
+
|
|
308
|
+
Returns:
|
|
309
|
+
Dictionary mapping line numbers to list of AST nodes overlapping that line,
|
|
310
|
+
or None if tree is None
|
|
268
311
|
"""
|
|
312
|
+
if tree is None:
|
|
313
|
+
return None
|
|
314
|
+
|
|
315
|
+
line_to_nodes: dict[int, list[ast.AST]] = {}
|
|
269
316
|
for node in ast.walk(tree):
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
continue # No overlap, skip expensive pattern matching
|
|
317
|
+
if PythonDuplicateAnalyzer._node_has_line_info(node):
|
|
318
|
+
PythonDuplicateAnalyzer._add_node_to_index(node, line_to_nodes)
|
|
319
|
+
|
|
320
|
+
return line_to_nodes
|
|
275
321
|
|
|
276
|
-
|
|
322
|
+
@staticmethod
|
|
323
|
+
def _node_has_line_info(node: ast.AST) -> bool:
|
|
324
|
+
"""Check if node has valid line number information."""
|
|
325
|
+
if not hasattr(node, "lineno") or not hasattr(node, "end_lineno"):
|
|
326
|
+
return False
|
|
327
|
+
return node.lineno is not None and node.end_lineno is not None
|
|
328
|
+
|
|
329
|
+
@staticmethod
|
|
330
|
+
def _add_node_to_index(node: ast.AST, line_to_nodes: dict[int, list[ast.AST]]) -> None:
|
|
331
|
+
"""Add node to all lines it overlaps in the index."""
|
|
332
|
+
for line_num in range(node.lineno, node.end_lineno + 1): # type: ignore[attr-defined]
|
|
333
|
+
if line_num not in line_to_nodes:
|
|
334
|
+
line_to_nodes[line_num] = []
|
|
335
|
+
line_to_nodes[line_num].append(node)
|
|
336
|
+
|
|
337
|
+
def _check_overlapping_nodes(self, tree: ast.Module, start_line: int, end_line: int) -> bool:
|
|
338
|
+
"""Check if any AST node overlaps and matches single-statement pattern.
|
|
339
|
+
|
|
340
|
+
Performance optimization: Use line-to-node index for O(1) lookups instead of O(n) ast.walk().
|
|
341
|
+
"""
|
|
342
|
+
if self._line_to_nodes is not None:
|
|
343
|
+
return self._check_nodes_via_index(start_line, end_line)
|
|
344
|
+
return self._check_nodes_via_walk(tree, start_line, end_line)
|
|
345
|
+
|
|
346
|
+
def _check_nodes_via_index(self, start_line: int, end_line: int) -> bool:
|
|
347
|
+
"""Check nodes using line-to-node index for O(1) lookups."""
|
|
348
|
+
candidates = self._collect_candidate_nodes_from_index(start_line, end_line)
|
|
349
|
+
return self._any_node_matches_pattern(candidates, start_line, end_line)
|
|
350
|
+
|
|
351
|
+
def _collect_candidate_nodes_from_index(self, start_line: int, end_line: int) -> set[ast.AST]:
|
|
352
|
+
"""Collect unique nodes that overlap with the line range from index."""
|
|
353
|
+
candidate_nodes: set[ast.AST] = set()
|
|
354
|
+
for line_num in range(start_line, end_line + 1):
|
|
355
|
+
if self._line_to_nodes and line_num in self._line_to_nodes:
|
|
356
|
+
candidate_nodes.update(self._line_to_nodes[line_num])
|
|
357
|
+
return candidate_nodes
|
|
358
|
+
|
|
359
|
+
def _any_node_matches_pattern(
|
|
360
|
+
self, nodes: set[ast.AST], start_line: int, end_line: int
|
|
361
|
+
) -> bool:
|
|
362
|
+
"""Check if any node matches single-statement pattern."""
|
|
363
|
+
for node in nodes:
|
|
277
364
|
if self._is_single_statement_pattern(node, start_line, end_line):
|
|
278
365
|
return True
|
|
279
366
|
return False
|
|
280
367
|
|
|
368
|
+
def _check_nodes_via_walk(self, tree: ast.Module, start_line: int, end_line: int) -> bool:
|
|
369
|
+
"""Check nodes using ast.walk() fallback for tests or standalone calls."""
|
|
370
|
+
for node in ast.walk(tree):
|
|
371
|
+
if self._node_matches_via_walk(node, start_line, end_line):
|
|
372
|
+
return True
|
|
373
|
+
return False
|
|
374
|
+
|
|
375
|
+
def _node_matches_via_walk(self, node: ast.AST, start_line: int, end_line: int) -> bool:
|
|
376
|
+
"""Check if a single node overlaps and matches pattern."""
|
|
377
|
+
if not self._node_overlaps_range(node, start_line, end_line):
|
|
378
|
+
return False
|
|
379
|
+
return self._is_single_statement_pattern(node, start_line, end_line)
|
|
380
|
+
|
|
381
|
+
@staticmethod
|
|
382
|
+
def _node_overlaps_range(node: ast.AST, start_line: int, end_line: int) -> bool:
|
|
383
|
+
"""Check if node overlaps with the given line range."""
|
|
384
|
+
if not hasattr(node, "lineno") or not hasattr(node, "end_lineno"):
|
|
385
|
+
return False
|
|
386
|
+
node_end = node.end_lineno
|
|
387
|
+
node_start = node.lineno
|
|
388
|
+
return not (node_end < start_line or node_start > end_line)
|
|
389
|
+
|
|
281
390
|
def _node_overlaps_and_matches(self, node: ast.AST, start_line: int, end_line: int) -> bool:
|
|
282
391
|
"""Check if node overlaps with range and matches single-statement pattern."""
|
|
283
392
|
if not hasattr(node, "lineno") or not hasattr(node, "end_lineno"):
|
src/linters/dry/token_hasher.py
CHANGED
|
@@ -33,26 +33,80 @@ class TokenHasher:
|
|
|
33
33
|
List of normalized code lines (non-empty, comments removed, imports filtered)
|
|
34
34
|
"""
|
|
35
35
|
lines = []
|
|
36
|
+
in_multiline_import = False
|
|
36
37
|
|
|
37
38
|
for line in code.split("\n"):
|
|
38
|
-
|
|
39
|
-
line = self._strip_comments(line)
|
|
40
|
-
|
|
41
|
-
# Normalize whitespace (collapse to single space)
|
|
42
|
-
line = " ".join(line.split())
|
|
43
|
-
|
|
44
|
-
# Skip empty lines
|
|
39
|
+
line = self._normalize_line(line)
|
|
45
40
|
if not line:
|
|
46
41
|
continue
|
|
47
42
|
|
|
48
|
-
#
|
|
49
|
-
|
|
43
|
+
# Update multi-line import state and check if line should be skipped
|
|
44
|
+
in_multiline_import, should_skip = self._should_skip_import_line(
|
|
45
|
+
line, in_multiline_import
|
|
46
|
+
)
|
|
47
|
+
if should_skip:
|
|
50
48
|
continue
|
|
51
49
|
|
|
52
50
|
lines.append(line)
|
|
53
51
|
|
|
54
52
|
return lines
|
|
55
53
|
|
|
54
|
+
def _normalize_line(self, line: str) -> str:
|
|
55
|
+
"""Normalize a line by removing comments and excess whitespace.
|
|
56
|
+
|
|
57
|
+
Args:
|
|
58
|
+
line: Raw source code line
|
|
59
|
+
|
|
60
|
+
Returns:
|
|
61
|
+
Normalized line (empty string if line has no content)
|
|
62
|
+
"""
|
|
63
|
+
line = self._strip_comments(line)
|
|
64
|
+
return " ".join(line.split())
|
|
65
|
+
|
|
66
|
+
def _should_skip_import_line(self, line: str, in_multiline_import: bool) -> tuple[bool, bool]:
|
|
67
|
+
"""Determine if an import line should be skipped.
|
|
68
|
+
|
|
69
|
+
Args:
|
|
70
|
+
line: Normalized code line
|
|
71
|
+
in_multiline_import: Whether we're currently inside a multi-line import
|
|
72
|
+
|
|
73
|
+
Returns:
|
|
74
|
+
Tuple of (new_in_multiline_import_state, should_skip_line)
|
|
75
|
+
"""
|
|
76
|
+
if self._is_multiline_import_start(line):
|
|
77
|
+
return True, True
|
|
78
|
+
|
|
79
|
+
if in_multiline_import:
|
|
80
|
+
return self._handle_multiline_import_continuation(line)
|
|
81
|
+
|
|
82
|
+
if self._is_import_statement(line):
|
|
83
|
+
return False, True
|
|
84
|
+
|
|
85
|
+
return False, False
|
|
86
|
+
|
|
87
|
+
def _is_multiline_import_start(self, line: str) -> bool:
|
|
88
|
+
"""Check if line starts a multi-line import statement.
|
|
89
|
+
|
|
90
|
+
Args:
|
|
91
|
+
line: Normalized code line
|
|
92
|
+
|
|
93
|
+
Returns:
|
|
94
|
+
True if line starts a multi-line import (has opening paren but no closing)
|
|
95
|
+
"""
|
|
96
|
+
return self._is_import_statement(line) and "(" in line and ")" not in line
|
|
97
|
+
|
|
98
|
+
def _handle_multiline_import_continuation(self, line: str) -> tuple[bool, bool]:
|
|
99
|
+
"""Handle a line that's part of a multi-line import.
|
|
100
|
+
|
|
101
|
+
Args:
|
|
102
|
+
line: Normalized code line inside a multi-line import
|
|
103
|
+
|
|
104
|
+
Returns:
|
|
105
|
+
Tuple of (still_in_import, should_skip)
|
|
106
|
+
"""
|
|
107
|
+
closes_import = ")" in line
|
|
108
|
+
return not closes_import, True
|
|
109
|
+
|
|
56
110
|
def _strip_comments(self, line: str) -> str:
|
|
57
111
|
"""Remove comments from line (Python # and // style).
|
|
58
112
|
|
|
@@ -186,20 +186,22 @@ class TypeScriptDuplicateAnalyzer(BaseTokenAnalyzer): # thailint: ignore[srp.vi
|
|
|
186
186
|
List of (original_line_number, normalized_code) tuples
|
|
187
187
|
"""
|
|
188
188
|
lines_with_numbers = []
|
|
189
|
+
in_multiline_import = False
|
|
189
190
|
|
|
190
191
|
for line_num, line in enumerate(content.split("\n"), start=1):
|
|
191
192
|
# Skip JSDoc comment lines
|
|
192
193
|
if line_num in jsdoc_lines:
|
|
193
194
|
continue
|
|
194
195
|
|
|
195
|
-
|
|
196
|
-
line = self._hasher._strip_comments(line) # pylint: disable=protected-access
|
|
197
|
-
line = " ".join(line.split())
|
|
198
|
-
|
|
196
|
+
line = self._hasher._normalize_line(line) # pylint: disable=protected-access
|
|
199
197
|
if not line:
|
|
200
198
|
continue
|
|
201
199
|
|
|
202
|
-
if
|
|
200
|
+
# Update multi-line import state and check if line should be skipped
|
|
201
|
+
in_multiline_import, should_skip = self._hasher._should_skip_import_line( # pylint: disable=protected-access
|
|
202
|
+
line, in_multiline_import
|
|
203
|
+
)
|
|
204
|
+
if should_skip:
|
|
203
205
|
continue
|
|
204
206
|
|
|
205
207
|
lines_with_numbers.append((line_num, line))
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
"""
|
|
2
|
+
File: src/linters/file_header/__init__.py
|
|
3
|
+
Purpose: File header linter module initialization
|
|
4
|
+
Exports: FileHeaderRule
|
|
5
|
+
Depends: linter.FileHeaderRule
|
|
6
|
+
Implements: Module-level exports for clean API
|
|
7
|
+
Related: linter.py for main rule implementation
|
|
8
|
+
|
|
9
|
+
Overview:
|
|
10
|
+
Initializes the file header linter module providing multi-language file header
|
|
11
|
+
validation with mandatory field checking, atemporal language detection, and configuration
|
|
12
|
+
support. Main entry point for file header linting functionality.
|
|
13
|
+
|
|
14
|
+
Usage:
|
|
15
|
+
from src.linters.file_header import FileHeaderRule
|
|
16
|
+
rule = FileHeaderRule()
|
|
17
|
+
violations = rule.check(context)
|
|
18
|
+
|
|
19
|
+
Notes: Follows standard Python module initialization pattern with __all__ export control
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
from .linter import FileHeaderRule
|
|
23
|
+
|
|
24
|
+
__all__ = ["FileHeaderRule"]
|
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
"""
|
|
2
|
+
File: src/linters/file_header/atemporal_detector.py
|
|
3
|
+
Purpose: Detects temporal language patterns in file headers
|
|
4
|
+
Exports: AtemporalDetector class
|
|
5
|
+
Depends: re module for regex matching
|
|
6
|
+
Implements: Regex-based pattern matching with configurable patterns
|
|
7
|
+
Related: linter.py for detector usage, violation_builder.py for violation creation
|
|
8
|
+
|
|
9
|
+
Overview:
|
|
10
|
+
Implements pattern-based detection of temporal language that violates atemporal
|
|
11
|
+
documentation requirements. Detects dates, temporal qualifiers, state change language,
|
|
12
|
+
and future references using regex patterns. Provides violation details for each pattern match.
|
|
13
|
+
|
|
14
|
+
Usage:
|
|
15
|
+
detector = AtemporalDetector()
|
|
16
|
+
violations = detector.detect_violations(header_text)
|
|
17
|
+
|
|
18
|
+
Notes: Four pattern categories - dates, temporal qualifiers, state changes, future references
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
import re
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class AtemporalDetector:
|
|
25
|
+
"""Detects temporal language patterns in text."""
|
|
26
|
+
|
|
27
|
+
# Date patterns
|
|
28
|
+
DATE_PATTERNS = [
|
|
29
|
+
(r"\d{4}-\d{2}-\d{2}", "ISO date format (YYYY-MM-DD)"),
|
|
30
|
+
(
|
|
31
|
+
r"(?:January|February|March|April|May|June|July|August|September|October|November|December)\s+\d{4}",
|
|
32
|
+
"Month Year format",
|
|
33
|
+
),
|
|
34
|
+
(r"(?:Created|Updated|Modified):\s*\d{4}", "Date metadata"),
|
|
35
|
+
]
|
|
36
|
+
|
|
37
|
+
# Temporal qualifiers
|
|
38
|
+
TEMPORAL_QUALIFIERS = [
|
|
39
|
+
(r"\bcurrently\b", 'temporal qualifier "currently"'),
|
|
40
|
+
(r"\bnow\b", 'temporal qualifier "now"'),
|
|
41
|
+
(r"\brecently\b", 'temporal qualifier "recently"'),
|
|
42
|
+
(r"\bsoon\b", 'temporal qualifier "soon"'),
|
|
43
|
+
(r"\bfor now\b", 'temporal qualifier "for now"'),
|
|
44
|
+
]
|
|
45
|
+
|
|
46
|
+
# State change language
|
|
47
|
+
STATE_CHANGE = [
|
|
48
|
+
(r"\breplaces?\b", 'state change "replaces"'),
|
|
49
|
+
(r"\bmigrated from\b", 'state change "migrated from"'),
|
|
50
|
+
(r"\bformerly\b", 'state change "formerly"'),
|
|
51
|
+
(r"\bold implementation\b", 'state change "old"'),
|
|
52
|
+
(r"\bnew implementation\b", 'state change "new"'),
|
|
53
|
+
]
|
|
54
|
+
|
|
55
|
+
# Future references
|
|
56
|
+
FUTURE_REFS = [
|
|
57
|
+
(r"\bwill be\b", 'future reference "will be"'),
|
|
58
|
+
(r"\bplanned\b", 'future reference "planned"'),
|
|
59
|
+
(r"\bto be added\b", 'future reference "to be added"'),
|
|
60
|
+
(r"\bcoming soon\b", 'future reference "coming soon"'),
|
|
61
|
+
]
|
|
62
|
+
|
|
63
|
+
def detect_violations( # thailint: ignore[nesting]
|
|
64
|
+
self, text: str
|
|
65
|
+
) -> list[tuple[str, str, int]]:
|
|
66
|
+
"""Detect all temporal language violations in text.
|
|
67
|
+
|
|
68
|
+
Args:
|
|
69
|
+
text: Text to check for temporal language
|
|
70
|
+
|
|
71
|
+
Returns:
|
|
72
|
+
List of (pattern, description, line_number) tuples for each violation
|
|
73
|
+
"""
|
|
74
|
+
violations = []
|
|
75
|
+
|
|
76
|
+
# Check all pattern categories
|
|
77
|
+
all_patterns = (
|
|
78
|
+
self.DATE_PATTERNS + self.TEMPORAL_QUALIFIERS + self.STATE_CHANGE + self.FUTURE_REFS
|
|
79
|
+
)
|
|
80
|
+
|
|
81
|
+
lines = text.split("\n")
|
|
82
|
+
for line_num, line in enumerate(lines, start=1):
|
|
83
|
+
for pattern, description in all_patterns:
|
|
84
|
+
if re.search(pattern, line, re.IGNORECASE):
|
|
85
|
+
violations.append((pattern, description, line_num))
|
|
86
|
+
|
|
87
|
+
return violations
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
"""
|
|
2
|
+
File: src/linters/file_header/config.py
|
|
3
|
+
Purpose: Configuration model for file header linter
|
|
4
|
+
Exports: FileHeaderConfig dataclass
|
|
5
|
+
Depends: dataclasses, pathlib
|
|
6
|
+
Implements: Configuration with validation and defaults
|
|
7
|
+
Related: linter.py for configuration usage
|
|
8
|
+
|
|
9
|
+
Overview:
|
|
10
|
+
Defines configuration structure for file header linter including required fields
|
|
11
|
+
per language, ignore patterns, and validation options. Provides defaults matching
|
|
12
|
+
ai-doc-standard.md requirements and supports loading from .thailint.yaml configuration.
|
|
13
|
+
|
|
14
|
+
Usage:
|
|
15
|
+
config = FileHeaderConfig()
|
|
16
|
+
config = FileHeaderConfig.from_dict(config_dict, "python")
|
|
17
|
+
|
|
18
|
+
Notes: Dataclass with validation and language-specific defaults
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
from dataclasses import dataclass, field
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
@dataclass
|
|
25
|
+
class FileHeaderConfig:
|
|
26
|
+
"""Configuration for file header linting."""
|
|
27
|
+
|
|
28
|
+
# Required fields by language
|
|
29
|
+
required_fields_python: list[str] = field(
|
|
30
|
+
default_factory=lambda: [
|
|
31
|
+
"Purpose",
|
|
32
|
+
"Scope",
|
|
33
|
+
"Overview",
|
|
34
|
+
"Dependencies",
|
|
35
|
+
"Exports",
|
|
36
|
+
"Interfaces",
|
|
37
|
+
"Implementation",
|
|
38
|
+
]
|
|
39
|
+
)
|
|
40
|
+
|
|
41
|
+
# Enforce atemporal language checking
|
|
42
|
+
enforce_atemporal: bool = True
|
|
43
|
+
|
|
44
|
+
# Patterns to ignore (file paths)
|
|
45
|
+
ignore: list[str] = field(
|
|
46
|
+
default_factory=lambda: ["test/**", "**/migrations/**", "**/__init__.py"]
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
@classmethod
|
|
50
|
+
def from_dict(cls, config_dict: dict, language: str) -> "FileHeaderConfig":
|
|
51
|
+
"""Create config from dictionary.
|
|
52
|
+
|
|
53
|
+
Args:
|
|
54
|
+
config_dict: Dictionary of configuration values
|
|
55
|
+
language: Programming language for language-specific config
|
|
56
|
+
|
|
57
|
+
Returns:
|
|
58
|
+
FileHeaderConfig instance with values from dictionary
|
|
59
|
+
"""
|
|
60
|
+
return cls(
|
|
61
|
+
required_fields_python=config_dict.get("required_fields", {}).get(
|
|
62
|
+
"python", cls().required_fields_python
|
|
63
|
+
),
|
|
64
|
+
enforce_atemporal=config_dict.get("enforce_atemporal", True),
|
|
65
|
+
ignore=config_dict.get("ignore", cls().ignore),
|
|
66
|
+
)
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
"""
|
|
2
|
+
File: src/linters/file_header/field_validator.py
|
|
3
|
+
Purpose: Validates mandatory fields in file headers
|
|
4
|
+
Exports: FieldValidator class
|
|
5
|
+
Depends: FileHeaderConfig for field requirements
|
|
6
|
+
Implements: Configuration-driven validation with field presence checking
|
|
7
|
+
Related: linter.py for validator usage, config.py for configuration
|
|
8
|
+
|
|
9
|
+
Overview:
|
|
10
|
+
Validates presence and quality of mandatory header fields. Checks that all
|
|
11
|
+
required fields are present, non-empty, and meet minimum content requirements.
|
|
12
|
+
Supports language-specific required fields and provides detailed violation messages.
|
|
13
|
+
|
|
14
|
+
Usage:
|
|
15
|
+
validator = FieldValidator(config)
|
|
16
|
+
violations = validator.validate_fields(fields, "python")
|
|
17
|
+
|
|
18
|
+
Notes: Language-specific field requirements defined in config
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
from .config import FileHeaderConfig
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class FieldValidator:
|
|
25
|
+
"""Validates mandatory fields in headers."""
|
|
26
|
+
|
|
27
|
+
def __init__(self, config: FileHeaderConfig):
|
|
28
|
+
"""Initialize validator with configuration.
|
|
29
|
+
|
|
30
|
+
Args:
|
|
31
|
+
config: File header configuration with required fields
|
|
32
|
+
"""
|
|
33
|
+
self.config = config
|
|
34
|
+
|
|
35
|
+
def validate_fields( # thailint: ignore[nesting]
|
|
36
|
+
self, fields: dict[str, str], language: str
|
|
37
|
+
) -> list[tuple[str, str]]:
|
|
38
|
+
"""Validate all required fields are present.
|
|
39
|
+
|
|
40
|
+
Args:
|
|
41
|
+
fields: Dictionary of parsed header fields
|
|
42
|
+
language: File language (python, typescript, etc.)
|
|
43
|
+
|
|
44
|
+
Returns:
|
|
45
|
+
List of (field_name, error_message) tuples for missing/invalid fields
|
|
46
|
+
"""
|
|
47
|
+
violations = []
|
|
48
|
+
required_fields = self._get_required_fields(language)
|
|
49
|
+
|
|
50
|
+
for field_name in required_fields:
|
|
51
|
+
if field_name not in fields:
|
|
52
|
+
violations.append((field_name, f"Missing mandatory field: {field_name}"))
|
|
53
|
+
elif not fields[field_name] or len(fields[field_name].strip()) == 0:
|
|
54
|
+
violations.append((field_name, f"Empty mandatory field: {field_name}"))
|
|
55
|
+
|
|
56
|
+
return violations
|
|
57
|
+
|
|
58
|
+
def _get_required_fields(self, language: str) -> list[str]:
|
|
59
|
+
"""Get required fields for language.
|
|
60
|
+
|
|
61
|
+
Args:
|
|
62
|
+
language: Programming language
|
|
63
|
+
|
|
64
|
+
Returns:
|
|
65
|
+
List of required field names for the language
|
|
66
|
+
"""
|
|
67
|
+
if language == "python":
|
|
68
|
+
return self.config.required_fields_python
|
|
69
|
+
return [] # Other languages in PR5
|
|
@@ -0,0 +1,313 @@
|
|
|
1
|
+
"""
|
|
2
|
+
File: src/linters/file_header/linter.py
|
|
3
|
+
Purpose: Main file header linter rule implementation
|
|
4
|
+
Exports: FileHeaderRule class
|
|
5
|
+
Depends: BaseLintRule, PythonHeaderParser, FieldValidator, AtemporalDetector, ViolationBuilder
|
|
6
|
+
Implements: Composition pattern with helper classes, AST-based Python parsing
|
|
7
|
+
Related: config.py for configuration, python_parser.py for extraction
|
|
8
|
+
|
|
9
|
+
Overview:
|
|
10
|
+
Orchestrates file header validation for Python files using focused helper classes.
|
|
11
|
+
Coordinates docstring extraction, field validation, atemporal language detection, and
|
|
12
|
+
violation building. Supports configuration from .thailint.yaml and ignore directives.
|
|
13
|
+
Validates headers against mandatory field requirements and atemporal language standards.
|
|
14
|
+
|
|
15
|
+
Usage:
|
|
16
|
+
rule = FileHeaderRule()
|
|
17
|
+
violations = rule.check(context)
|
|
18
|
+
|
|
19
|
+
Notes: Follows composition pattern from magic_numbers linter for maintainability
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
from pathlib import Path
|
|
23
|
+
|
|
24
|
+
from src.core.base import BaseLintContext, BaseLintRule
|
|
25
|
+
from src.core.linter_utils import load_linter_config
|
|
26
|
+
from src.core.types import Violation
|
|
27
|
+
from src.linter_config.ignore import IgnoreDirectiveParser
|
|
28
|
+
|
|
29
|
+
from .atemporal_detector import AtemporalDetector
|
|
30
|
+
from .config import FileHeaderConfig
|
|
31
|
+
from .field_validator import FieldValidator
|
|
32
|
+
from .python_parser import PythonHeaderParser
|
|
33
|
+
from .violation_builder import ViolationBuilder
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class FileHeaderRule(BaseLintRule): # thailint: ignore[srp]
|
|
37
|
+
"""Validates file headers for mandatory fields and atemporal language.
|
|
38
|
+
|
|
39
|
+
Method count (17) exceeds SRP guideline (8) because proper A-grade complexity
|
|
40
|
+
refactoring requires extracting helper methods. Class maintains single responsibility
|
|
41
|
+
of file header validation - all methods support this core purpose through composition
|
|
42
|
+
pattern with focused helper classes (parser, validator, detector, builder).
|
|
43
|
+
"""
|
|
44
|
+
|
|
45
|
+
def __init__(self) -> None:
|
|
46
|
+
"""Initialize the file header rule."""
|
|
47
|
+
self._violation_builder = ViolationBuilder(self.rule_id)
|
|
48
|
+
self._ignore_parser = IgnoreDirectiveParser()
|
|
49
|
+
|
|
50
|
+
@property
|
|
51
|
+
def rule_id(self) -> str:
|
|
52
|
+
"""Unique identifier for this rule.
|
|
53
|
+
|
|
54
|
+
Returns:
|
|
55
|
+
Rule identifier string
|
|
56
|
+
"""
|
|
57
|
+
return "file-header.validation"
|
|
58
|
+
|
|
59
|
+
@property
|
|
60
|
+
def rule_name(self) -> str:
|
|
61
|
+
"""Human-readable name for this rule.
|
|
62
|
+
|
|
63
|
+
Returns:
|
|
64
|
+
Rule name string
|
|
65
|
+
"""
|
|
66
|
+
return "File Header Validation"
|
|
67
|
+
|
|
68
|
+
@property
|
|
69
|
+
def description(self) -> str:
|
|
70
|
+
"""Description of what this rule checks.
|
|
71
|
+
|
|
72
|
+
Returns:
|
|
73
|
+
Rule description string
|
|
74
|
+
"""
|
|
75
|
+
return "Validates file headers for mandatory fields and atemporal language"
|
|
76
|
+
|
|
77
|
+
def check(self, context: BaseLintContext) -> list[Violation]:
|
|
78
|
+
"""Check file header for violations.
|
|
79
|
+
|
|
80
|
+
Args:
|
|
81
|
+
context: Lint context with file information
|
|
82
|
+
|
|
83
|
+
Returns:
|
|
84
|
+
List of violations found in file header
|
|
85
|
+
"""
|
|
86
|
+
# Only Python for now (PR3), multi-language in PR5
|
|
87
|
+
if context.language != "python":
|
|
88
|
+
return []
|
|
89
|
+
|
|
90
|
+
# Check for file-level ignore directives first
|
|
91
|
+
if self._has_file_ignore(context):
|
|
92
|
+
return []
|
|
93
|
+
|
|
94
|
+
# Load configuration
|
|
95
|
+
config = self._load_config(context)
|
|
96
|
+
|
|
97
|
+
# Check if file should be ignored by pattern
|
|
98
|
+
if self._should_ignore_file(context, config):
|
|
99
|
+
return []
|
|
100
|
+
|
|
101
|
+
# Extract and validate header
|
|
102
|
+
return self._check_python_header(context, config)
|
|
103
|
+
|
|
104
|
+
def _has_file_ignore(self, context: BaseLintContext) -> bool:
|
|
105
|
+
"""Check if file has file-level ignore directive.
|
|
106
|
+
|
|
107
|
+
Args:
|
|
108
|
+
context: Lint context
|
|
109
|
+
|
|
110
|
+
Returns:
|
|
111
|
+
True if file has ignore-file directive
|
|
112
|
+
"""
|
|
113
|
+
file_content = context.file_content or ""
|
|
114
|
+
|
|
115
|
+
if self._has_standard_ignore(file_content):
|
|
116
|
+
return True
|
|
117
|
+
|
|
118
|
+
return self._has_custom_ignore_syntax(file_content)
|
|
119
|
+
|
|
120
|
+
def _has_standard_ignore(self, file_content: str) -> bool: # thailint: ignore[nesting]
|
|
121
|
+
"""Check standard ignore parser for file-level ignores."""
|
|
122
|
+
# Check first 10 lines for standard ignore directives
|
|
123
|
+
first_lines = file_content.splitlines()[:10]
|
|
124
|
+
for line in first_lines:
|
|
125
|
+
if self._ignore_parser._has_ignore_directive_marker(line): # pylint: disable=protected-access
|
|
126
|
+
if self._ignore_parser._check_specific_rule_ignore(line, self.rule_id): # pylint: disable=protected-access
|
|
127
|
+
return True
|
|
128
|
+
if self._ignore_parser._check_general_ignore(line): # pylint: disable=protected-access
|
|
129
|
+
return True
|
|
130
|
+
return False
|
|
131
|
+
|
|
132
|
+
def _has_custom_ignore_syntax(self, file_content: str) -> bool:
|
|
133
|
+
"""Check custom file-level ignore syntax."""
|
|
134
|
+
first_lines = file_content.splitlines()[:10]
|
|
135
|
+
return any(self._is_ignore_line(line) for line in first_lines)
|
|
136
|
+
|
|
137
|
+
def _is_ignore_line(self, line: str) -> bool:
|
|
138
|
+
"""Check if line contains ignore directive."""
|
|
139
|
+
line_lower = line.lower()
|
|
140
|
+
return "# thailint-ignore-file:" in line_lower or "# thailint-ignore" in line_lower
|
|
141
|
+
|
|
142
|
+
def _load_config(self, context: BaseLintContext) -> FileHeaderConfig:
|
|
143
|
+
"""Load configuration from context.
|
|
144
|
+
|
|
145
|
+
Args:
|
|
146
|
+
context: Lint context
|
|
147
|
+
|
|
148
|
+
Returns:
|
|
149
|
+
FileHeaderConfig with loaded or default values
|
|
150
|
+
"""
|
|
151
|
+
# Try production config first
|
|
152
|
+
if hasattr(context, "metadata") and isinstance(context.metadata, dict):
|
|
153
|
+
if "file_header" in context.metadata:
|
|
154
|
+
return load_linter_config(context, "file_header", FileHeaderConfig) # type: ignore[type-var]
|
|
155
|
+
|
|
156
|
+
# Use defaults
|
|
157
|
+
return FileHeaderConfig()
|
|
158
|
+
|
|
159
|
+
def _should_ignore_file(self, context: BaseLintContext, config: FileHeaderConfig) -> bool:
|
|
160
|
+
"""Check if file matches ignore patterns.
|
|
161
|
+
|
|
162
|
+
Args:
|
|
163
|
+
context: Lint context
|
|
164
|
+
config: File header configuration
|
|
165
|
+
|
|
166
|
+
Returns:
|
|
167
|
+
True if file should be ignored
|
|
168
|
+
"""
|
|
169
|
+
if not context.file_path:
|
|
170
|
+
return False
|
|
171
|
+
|
|
172
|
+
file_path = Path(context.file_path)
|
|
173
|
+
return any(self._matches_ignore_pattern(file_path, p) for p in config.ignore)
|
|
174
|
+
|
|
175
|
+
def _matches_ignore_pattern(self, file_path: Path, pattern: str) -> bool:
|
|
176
|
+
"""Check if file path matches a single ignore pattern."""
|
|
177
|
+
if file_path.match(pattern):
|
|
178
|
+
return True
|
|
179
|
+
|
|
180
|
+
if self._matches_directory_pattern(file_path, pattern):
|
|
181
|
+
return True
|
|
182
|
+
|
|
183
|
+
if self._matches_file_pattern(file_path, pattern):
|
|
184
|
+
return True
|
|
185
|
+
|
|
186
|
+
return pattern in str(file_path)
|
|
187
|
+
|
|
188
|
+
def _matches_directory_pattern(self, file_path: Path, pattern: str) -> bool:
|
|
189
|
+
"""Match directory patterns like **/migrations/**."""
|
|
190
|
+
if pattern.startswith("**/") and pattern.endswith("/**"):
|
|
191
|
+
dir_name = pattern[3:-3]
|
|
192
|
+
return dir_name in file_path.parts
|
|
193
|
+
return False
|
|
194
|
+
|
|
195
|
+
def _matches_file_pattern(self, file_path: Path, pattern: str) -> bool:
|
|
196
|
+
"""Match file patterns like **/__init__.py."""
|
|
197
|
+
if pattern.startswith("**/"):
|
|
198
|
+
filename_pattern = pattern[3:]
|
|
199
|
+
path_str = str(file_path)
|
|
200
|
+
return file_path.name == filename_pattern or path_str.endswith(filename_pattern)
|
|
201
|
+
return False
|
|
202
|
+
|
|
203
|
+
def _check_python_header(
|
|
204
|
+
self, context: BaseLintContext, config: FileHeaderConfig
|
|
205
|
+
) -> list[Violation]:
|
|
206
|
+
"""Check Python file header.
|
|
207
|
+
|
|
208
|
+
Args:
|
|
209
|
+
context: Lint context
|
|
210
|
+
config: Configuration
|
|
211
|
+
|
|
212
|
+
Returns:
|
|
213
|
+
List of violations filtered through ignore directives
|
|
214
|
+
"""
|
|
215
|
+
parser = PythonHeaderParser()
|
|
216
|
+
header = parser.extract_header(context.file_content or "")
|
|
217
|
+
|
|
218
|
+
if not header:
|
|
219
|
+
return self._build_missing_header_violations(context)
|
|
220
|
+
|
|
221
|
+
fields = parser.parse_fields(header)
|
|
222
|
+
violations = self._validate_header_fields(fields, context, config)
|
|
223
|
+
violations.extend(self._check_atemporal_violations(header, context, config))
|
|
224
|
+
|
|
225
|
+
return self._filter_ignored_violations(violations, context)
|
|
226
|
+
|
|
227
|
+
def _build_missing_header_violations(self, context: BaseLintContext) -> list[Violation]:
|
|
228
|
+
"""Build violations for missing header."""
|
|
229
|
+
return [
|
|
230
|
+
self._violation_builder.build_missing_field(
|
|
231
|
+
"docstring", str(context.file_path or ""), 1
|
|
232
|
+
)
|
|
233
|
+
]
|
|
234
|
+
|
|
235
|
+
def _validate_header_fields(
|
|
236
|
+
self, fields: dict[str, str], context: BaseLintContext, config: FileHeaderConfig
|
|
237
|
+
) -> list[Violation]:
|
|
238
|
+
"""Validate mandatory header fields."""
|
|
239
|
+
violations = []
|
|
240
|
+
field_validator = FieldValidator(config)
|
|
241
|
+
field_violations = field_validator.validate_fields(fields, context.language)
|
|
242
|
+
|
|
243
|
+
for field_name, _error_message in field_violations:
|
|
244
|
+
violations.append(
|
|
245
|
+
self._violation_builder.build_missing_field(
|
|
246
|
+
field_name, str(context.file_path or ""), 1
|
|
247
|
+
)
|
|
248
|
+
)
|
|
249
|
+
return violations
|
|
250
|
+
|
|
251
|
+
def _check_atemporal_violations(
|
|
252
|
+
self, header: str, context: BaseLintContext, config: FileHeaderConfig
|
|
253
|
+
) -> list[Violation]:
|
|
254
|
+
"""Check for atemporal language violations."""
|
|
255
|
+
if not config.enforce_atemporal:
|
|
256
|
+
return []
|
|
257
|
+
|
|
258
|
+
violations = []
|
|
259
|
+
atemporal_detector = AtemporalDetector()
|
|
260
|
+
atemporal_violations = atemporal_detector.detect_violations(header)
|
|
261
|
+
|
|
262
|
+
for pattern, description, line_num in atemporal_violations:
|
|
263
|
+
violations.append(
|
|
264
|
+
self._violation_builder.build_atemporal_violation(
|
|
265
|
+
pattern, description, str(context.file_path or ""), line_num
|
|
266
|
+
)
|
|
267
|
+
)
|
|
268
|
+
return violations
|
|
269
|
+
|
|
270
|
+
def _filter_ignored_violations(
|
|
271
|
+
self, violations: list[Violation], context: BaseLintContext
|
|
272
|
+
) -> list[Violation]:
|
|
273
|
+
"""Filter out violations that should be ignored.
|
|
274
|
+
|
|
275
|
+
Args:
|
|
276
|
+
violations: List of violations to filter
|
|
277
|
+
context: Lint context with file content
|
|
278
|
+
|
|
279
|
+
Returns:
|
|
280
|
+
Filtered list of violations
|
|
281
|
+
"""
|
|
282
|
+
file_content = context.file_content or ""
|
|
283
|
+
lines = file_content.splitlines()
|
|
284
|
+
|
|
285
|
+
filtered = []
|
|
286
|
+
for v in violations:
|
|
287
|
+
# Check standard ignore directives
|
|
288
|
+
if self._ignore_parser.should_ignore_violation(v, file_content):
|
|
289
|
+
continue
|
|
290
|
+
|
|
291
|
+
# Check custom line-level ignore syntax: # thailint-ignore-line:
|
|
292
|
+
if self._has_line_level_ignore(lines, v):
|
|
293
|
+
continue
|
|
294
|
+
|
|
295
|
+
filtered.append(v)
|
|
296
|
+
|
|
297
|
+
return filtered
|
|
298
|
+
|
|
299
|
+
def _has_line_level_ignore(self, lines: list[str], violation: Violation) -> bool:
|
|
300
|
+
"""Check for thailint-ignore-line directive.
|
|
301
|
+
|
|
302
|
+
Args:
|
|
303
|
+
lines: File content split into lines
|
|
304
|
+
violation: Violation to check
|
|
305
|
+
|
|
306
|
+
Returns:
|
|
307
|
+
True if line has ignore directive
|
|
308
|
+
"""
|
|
309
|
+
if violation.line <= 0 or violation.line > len(lines):
|
|
310
|
+
return False
|
|
311
|
+
|
|
312
|
+
line_content = lines[violation.line - 1] # Convert to 0-indexed
|
|
313
|
+
return "# thailint-ignore-line:" in line_content.lower()
|
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
"""
|
|
2
|
+
File: src/linters/file_header/python_parser.py
|
|
3
|
+
Purpose: Python docstring extraction and parsing for file headers
|
|
4
|
+
Exports: PythonHeaderParser class
|
|
5
|
+
Depends: Python ast module
|
|
6
|
+
Implements: AST-based docstring extraction with field parsing
|
|
7
|
+
Related: linter.py for parser usage, field_validator.py for field validation
|
|
8
|
+
|
|
9
|
+
Overview:
|
|
10
|
+
Extracts module-level docstrings from Python files using AST parsing.
|
|
11
|
+
Parses structured header fields from docstring content and handles both
|
|
12
|
+
well-formed and malformed headers. Provides field extraction and validation
|
|
13
|
+
support for FileHeaderRule.
|
|
14
|
+
|
|
15
|
+
Usage:
|
|
16
|
+
parser = PythonHeaderParser()
|
|
17
|
+
header = parser.extract_header(code)
|
|
18
|
+
fields = parser.parse_fields(header)
|
|
19
|
+
|
|
20
|
+
Notes: Uses ast.get_docstring() for reliable module-level docstring extraction
|
|
21
|
+
"""
|
|
22
|
+
|
|
23
|
+
import ast
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class PythonHeaderParser:
|
|
27
|
+
"""Extracts and parses Python file headers from docstrings."""
|
|
28
|
+
|
|
29
|
+
def extract_header(self, code: str) -> str | None:
|
|
30
|
+
"""Extract module-level docstring from Python code.
|
|
31
|
+
|
|
32
|
+
Args:
|
|
33
|
+
code: Python source code
|
|
34
|
+
|
|
35
|
+
Returns:
|
|
36
|
+
Module docstring or None if not found or parse error
|
|
37
|
+
"""
|
|
38
|
+
try:
|
|
39
|
+
tree = ast.parse(code)
|
|
40
|
+
return ast.get_docstring(tree)
|
|
41
|
+
except SyntaxError:
|
|
42
|
+
return None
|
|
43
|
+
|
|
44
|
+
def parse_fields(self, header: str) -> dict[str, str]: # thailint: ignore[nesting]
|
|
45
|
+
"""Parse structured fields from header text.
|
|
46
|
+
|
|
47
|
+
Args:
|
|
48
|
+
header: Header docstring text
|
|
49
|
+
|
|
50
|
+
Returns:
|
|
51
|
+
Dictionary mapping field_name -> field_value
|
|
52
|
+
"""
|
|
53
|
+
fields: dict[str, str] = {}
|
|
54
|
+
current_field: str | None = None
|
|
55
|
+
current_value: list[str] = []
|
|
56
|
+
|
|
57
|
+
for line in header.split("\n"):
|
|
58
|
+
if self._is_new_field_line(line):
|
|
59
|
+
current_field = self._save_and_start_new_field(
|
|
60
|
+
fields, current_field, current_value, line
|
|
61
|
+
)
|
|
62
|
+
current_value = [line.split(":", 1)[1].strip()]
|
|
63
|
+
elif current_field:
|
|
64
|
+
current_value.append(line.strip())
|
|
65
|
+
|
|
66
|
+
self._save_current_field(fields, current_field, current_value)
|
|
67
|
+
return fields
|
|
68
|
+
|
|
69
|
+
def _is_new_field_line(self, line: str) -> bool:
|
|
70
|
+
"""Check if line starts a new field."""
|
|
71
|
+
return ":" in line and not line.startswith(" ")
|
|
72
|
+
|
|
73
|
+
def _save_and_start_new_field(
|
|
74
|
+
self, fields: dict[str, str], current_field: str | None, current_value: list[str], line: str
|
|
75
|
+
) -> str:
|
|
76
|
+
"""Save current field and start new one."""
|
|
77
|
+
if current_field:
|
|
78
|
+
fields[current_field] = "\n".join(current_value).strip()
|
|
79
|
+
return line.split(":", 1)[0].strip()
|
|
80
|
+
|
|
81
|
+
def _save_current_field(
|
|
82
|
+
self, fields: dict[str, str], current_field: str | None, current_value: list[str]
|
|
83
|
+
) -> None:
|
|
84
|
+
"""Save the last field."""
|
|
85
|
+
if current_field:
|
|
86
|
+
fields[current_field] = "\n".join(current_value).strip()
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
"""
|
|
2
|
+
File: src/linters/file_header/violation_builder.py
|
|
3
|
+
Purpose: Builds violation messages for file header linter
|
|
4
|
+
Exports: ViolationBuilder class
|
|
5
|
+
Depends: Violation type from core
|
|
6
|
+
Implements: Message templates with context-specific details
|
|
7
|
+
Related: linter.py for builder usage, atemporal_detector.py for temporal violations
|
|
8
|
+
|
|
9
|
+
Overview:
|
|
10
|
+
Creates formatted violation messages for file header validation failures.
|
|
11
|
+
Handles missing fields, atemporal language, and other header issues with clear,
|
|
12
|
+
actionable messages. Provides consistent violation format across all validation types.
|
|
13
|
+
|
|
14
|
+
Usage:
|
|
15
|
+
builder = ViolationBuilder("file-header.validation")
|
|
16
|
+
violation = builder.build_missing_field("Purpose", "test.py", 1)
|
|
17
|
+
|
|
18
|
+
Notes: Follows standard violation format with rule_id, message, location, severity, suggestion
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
from src.core.types import Severity, Violation
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class ViolationBuilder:
|
|
25
|
+
"""Builds violation messages for file header issues."""
|
|
26
|
+
|
|
27
|
+
def __init__(self, rule_id: str):
|
|
28
|
+
"""Initialize with rule ID.
|
|
29
|
+
|
|
30
|
+
Args:
|
|
31
|
+
rule_id: Rule identifier for violations
|
|
32
|
+
"""
|
|
33
|
+
self.rule_id = rule_id
|
|
34
|
+
|
|
35
|
+
def build_missing_field(self, field_name: str, file_path: str, line: int = 1) -> Violation:
|
|
36
|
+
"""Build violation for missing mandatory field.
|
|
37
|
+
|
|
38
|
+
Args:
|
|
39
|
+
field_name: Name of missing field
|
|
40
|
+
file_path: Path to file
|
|
41
|
+
line: Line number (default 1 for header)
|
|
42
|
+
|
|
43
|
+
Returns:
|
|
44
|
+
Violation object describing missing field
|
|
45
|
+
"""
|
|
46
|
+
return Violation(
|
|
47
|
+
rule_id=self.rule_id,
|
|
48
|
+
message=f"Missing mandatory field: {field_name}",
|
|
49
|
+
file_path=file_path,
|
|
50
|
+
line=line,
|
|
51
|
+
column=1,
|
|
52
|
+
severity=Severity.ERROR,
|
|
53
|
+
suggestion=f"Add '{field_name}:' field to file header",
|
|
54
|
+
)
|
|
55
|
+
|
|
56
|
+
def build_atemporal_violation(
|
|
57
|
+
self, pattern: str, description: str, file_path: str, line: int
|
|
58
|
+
) -> Violation:
|
|
59
|
+
"""Build violation for temporal language.
|
|
60
|
+
|
|
61
|
+
Args:
|
|
62
|
+
pattern: Matched regex pattern
|
|
63
|
+
description: Description of temporal language
|
|
64
|
+
file_path: Path to file
|
|
65
|
+
line: Line number of violation
|
|
66
|
+
|
|
67
|
+
Returns:
|
|
68
|
+
Violation object describing temporal language issue
|
|
69
|
+
"""
|
|
70
|
+
return Violation(
|
|
71
|
+
rule_id=self.rule_id,
|
|
72
|
+
message=f"Temporal language detected: {description}",
|
|
73
|
+
file_path=file_path,
|
|
74
|
+
line=line,
|
|
75
|
+
column=1,
|
|
76
|
+
severity=Severity.ERROR,
|
|
77
|
+
suggestion="Use present-tense factual descriptions without temporal references",
|
|
78
|
+
)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: thailint
|
|
3
|
-
Version: 0.4.
|
|
3
|
+
Version: 0.4.6
|
|
4
4
|
Summary: The AI Linter - Enterprise-grade linting and governance for AI-generated code across multiple languages
|
|
5
5
|
License: MIT
|
|
6
6
|
License-File: LICENSE
|
|
@@ -28,7 +28,7 @@ Requires-Dist: pyprojroot (>=0.3.0,<0.4.0)
|
|
|
28
28
|
Requires-Dist: pyyaml (>=6.0,<7.0)
|
|
29
29
|
Requires-Dist: tree-sitter (>=0.25.2,<0.26.0)
|
|
30
30
|
Requires-Dist: tree-sitter-typescript (>=0.23.2,<0.24.0)
|
|
31
|
-
Project-URL: Documentation, https://
|
|
31
|
+
Project-URL: Documentation, https://thai-lint.readthedocs.io/
|
|
32
32
|
Project-URL: Homepage, https://github.com/be-wise-be-kind/thai-lint
|
|
33
33
|
Project-URL: Repository, https://github.com/be-wise-be-kind/thai-lint
|
|
34
34
|
Description-Content-Type: text/markdown
|
|
@@ -37,19 +37,20 @@ Description-Content-Type: text/markdown
|
|
|
37
37
|
|
|
38
38
|
[](https://opensource.org/licenses/MIT)
|
|
39
39
|
[](https://www.python.org/downloads/)
|
|
40
|
-
[](tests/)
|
|
41
41
|
[](htmlcov/)
|
|
42
|
+
[](https://thai-lint.readthedocs.io/en/latest/?badge=latest)
|
|
42
43
|
|
|
43
44
|
The AI Linter - Enterprise-ready linting and governance for AI-generated code across multiple languages.
|
|
44
45
|
|
|
45
46
|
## Documentation
|
|
46
47
|
|
|
47
48
|
**New to thailint?** Start here:
|
|
48
|
-
- **[Quick Start Guide](
|
|
49
|
-
- **[Configuration Reference](
|
|
50
|
-
- **[Troubleshooting Guide](
|
|
49
|
+
- **[Quick Start Guide](https://thai-lint.readthedocs.io/en/latest/quick-start/)** - Get running in 5 minutes
|
|
50
|
+
- **[Configuration Reference](https://thai-lint.readthedocs.io/en/latest/configuration/)** - Complete config options for all linters
|
|
51
|
+
- **[Troubleshooting Guide](https://thai-lint.readthedocs.io/en/latest/troubleshooting/)** - Common issues and solutions
|
|
51
52
|
|
|
52
|
-
**Full Documentation:** Browse the **[
|
|
53
|
+
**Full Documentation:** Browse the **[documentation site](https://thai-lint.readthedocs.io/)** for comprehensive guides covering installation, all linters, configuration patterns, and integration examples.
|
|
53
54
|
|
|
54
55
|
## Overview
|
|
55
56
|
|
|
@@ -66,7 +67,7 @@ We're not trying to replace the wonderful existing linters like Pylint, ESLint,
|
|
|
66
67
|
|
|
67
68
|
thailint complements your existing linting stack by catching the patterns AI tools repeatedly miss.
|
|
68
69
|
|
|
69
|
-
**Complete documentation available
|
|
70
|
+
**Complete documentation available at [thai-lint.readthedocs.io](https://thai-lint.readthedocs.io/)** covering installation, configuration, all linters, and troubleshooting.
|
|
70
71
|
|
|
71
72
|
## Features
|
|
72
73
|
|
|
@@ -164,7 +165,7 @@ thailint dry --config .thailint.yaml src/
|
|
|
164
165
|
thailint dry --format json src/
|
|
165
166
|
```
|
|
166
167
|
|
|
167
|
-
**New to thailint?** See the **[Quick Start Guide](
|
|
168
|
+
**New to thailint?** See the **[Quick Start Guide](https://thai-lint.readthedocs.io/en/latest/quick-start/)** for a complete walkthrough including config generation, understanding output, and next steps.
|
|
168
169
|
|
|
169
170
|
### Library Mode
|
|
170
171
|
|
|
@@ -356,9 +357,9 @@ magic-numbers:
|
|
|
356
357
|
}
|
|
357
358
|
```
|
|
358
359
|
|
|
359
|
-
See [Configuration Guide](
|
|
360
|
+
See [Configuration Guide](https://thai-lint.readthedocs.io/en/latest/configuration/) for complete reference.
|
|
360
361
|
|
|
361
|
-
**Need help with ignores?** See **[How to Ignore Violations](
|
|
362
|
+
**Need help with ignores?** See **[How to Ignore Violations](https://thai-lint.readthedocs.io/en/latest/how-to-ignore-violations/)** for complete guide to all ignore levels (line, method, class, file, repository).
|
|
362
363
|
|
|
363
364
|
## Nesting Depth Linter
|
|
364
365
|
|
|
@@ -444,7 +445,7 @@ Common patterns to reduce nesting:
|
|
|
444
445
|
- **TypeScript**: Full support (if/for/while/try/switch)
|
|
445
446
|
- **JavaScript**: Supported via TypeScript parser
|
|
446
447
|
|
|
447
|
-
See [Nesting Linter Guide](
|
|
448
|
+
See [Nesting Linter Guide](https://thai-lint.readthedocs.io/en/latest/nesting-linter/) for comprehensive documentation and refactoring patterns.
|
|
448
449
|
|
|
449
450
|
## Single Responsibility Principle (SRP) Linter
|
|
450
451
|
|
|
@@ -561,7 +562,7 @@ Common patterns to fix SRP violations (discovered during dogfooding):
|
|
|
561
562
|
- **After**: Extract Class pattern applied - 5 focused classes (ConfigLoader, PatternValidator, RuleChecker, PathResolver, FilePlacementLinter)
|
|
562
563
|
- **Result**: Each class ≤8 methods, ≤150 LOC, single responsibility
|
|
563
564
|
|
|
564
|
-
See [SRP Linter Guide](
|
|
565
|
+
See [SRP Linter Guide](https://thai-lint.readthedocs.io/en/latest/srp-linter/) for comprehensive documentation and refactoring patterns.
|
|
565
566
|
|
|
566
567
|
## DRY Linter (Don't Repeat Yourself)
|
|
567
568
|
|
|
@@ -714,7 +715,7 @@ Built-in filters automatically exclude common non-duplication patterns:
|
|
|
714
715
|
3. **Extract Utility Module**: Move helper functions to shared utilities
|
|
715
716
|
4. **Template Method**: Use function parameters for variations
|
|
716
717
|
|
|
717
|
-
See [DRY Linter Guide](
|
|
718
|
+
See [DRY Linter Guide](https://thai-lint.readthedocs.io/en/latest/dry-linter/) for comprehensive documentation, storage modes, and refactoring patterns.
|
|
718
719
|
|
|
719
720
|
## Magic Numbers Linter
|
|
720
721
|
|
|
@@ -866,7 +867,7 @@ def get_ports(): # thailint: ignore[magic-numbers] - Standard ports
|
|
|
866
867
|
# thailint: ignore-file[magic-numbers]
|
|
867
868
|
```
|
|
868
869
|
|
|
869
|
-
See **[How to Ignore Violations](
|
|
870
|
+
See **[How to Ignore Violations](https://thai-lint.readthedocs.io/en/latest/how-to-ignore-violations/)** and **[Magic Numbers Linter Guide](https://thai-lint.readthedocs.io/en/latest/magic-numbers-linter/)** for complete documentation.
|
|
870
871
|
|
|
871
872
|
## Pre-commit Hooks
|
|
872
873
|
|
|
@@ -928,7 +929,7 @@ repos:
|
|
|
928
929
|
pass_filenames: true
|
|
929
930
|
```
|
|
930
931
|
|
|
931
|
-
See **[Pre-commit Hooks Guide](
|
|
932
|
+
See **[Pre-commit Hooks Guide](https://thai-lint.readthedocs.io/en/latest/pre-commit-hooks/)** for complete documentation, troubleshooting, and advanced configuration.
|
|
932
933
|
|
|
933
934
|
## Common Use Cases
|
|
934
935
|
|
|
@@ -1145,20 +1146,20 @@ docker run --rm -v /path/to/workspace:/workspace \
|
|
|
1145
1146
|
|
|
1146
1147
|
### Comprehensive Guides
|
|
1147
1148
|
|
|
1148
|
-
- **[Getting Started](
|
|
1149
|
-
- **[Configuration Reference](
|
|
1150
|
-
- **[How to Ignore Violations](
|
|
1151
|
-
- **[API Reference](
|
|
1152
|
-
- **[CLI Reference](
|
|
1153
|
-
- **[Deployment Modes](
|
|
1154
|
-
- **[File Placement Linter](
|
|
1155
|
-
- **[Magic Numbers Linter](
|
|
1156
|
-
- **[Nesting Depth Linter](
|
|
1157
|
-
- **[SRP Linter](
|
|
1158
|
-
- **[DRY Linter](
|
|
1159
|
-
- **[Pre-commit Hooks](
|
|
1160
|
-
- **[Publishing Guide](
|
|
1161
|
-
- **[Publishing Checklist](
|
|
1149
|
+
- **[Getting Started](https://thai-lint.readthedocs.io/en/latest/getting-started/)** - Installation, first lint, basic config
|
|
1150
|
+
- **[Configuration Reference](https://thai-lint.readthedocs.io/en/latest/configuration/)** - Complete config options (YAML/JSON)
|
|
1151
|
+
- **[How to Ignore Violations](https://thai-lint.readthedocs.io/en/latest/how-to-ignore-violations/)** - Complete guide to all ignore levels
|
|
1152
|
+
- **[API Reference](https://thai-lint.readthedocs.io/en/latest/api-reference/)** - Library API documentation
|
|
1153
|
+
- **[CLI Reference](https://thai-lint.readthedocs.io/en/latest/cli-reference/)** - All CLI commands and options
|
|
1154
|
+
- **[Deployment Modes](https://thai-lint.readthedocs.io/en/latest/deployment-modes/)** - CLI, Library, and Docker usage
|
|
1155
|
+
- **[File Placement Linter](https://thai-lint.readthedocs.io/en/latest/file-placement-linter/)** - Detailed linter guide
|
|
1156
|
+
- **[Magic Numbers Linter](https://thai-lint.readthedocs.io/en/latest/magic-numbers-linter/)** - Magic numbers detection guide
|
|
1157
|
+
- **[Nesting Depth Linter](https://thai-lint.readthedocs.io/en/latest/nesting-linter/)** - Nesting depth analysis guide
|
|
1158
|
+
- **[SRP Linter](https://thai-lint.readthedocs.io/en/latest/srp-linter/)** - Single Responsibility Principle guide
|
|
1159
|
+
- **[DRY Linter](https://thai-lint.readthedocs.io/en/latest/dry-linter/)** - Duplicate code detection guide
|
|
1160
|
+
- **[Pre-commit Hooks](https://thai-lint.readthedocs.io/en/latest/pre-commit-hooks/)** - Automated quality checks
|
|
1161
|
+
- **[Publishing Guide](https://thai-lint.readthedocs.io/en/latest/releasing/)** - Release and publishing workflow
|
|
1162
|
+
- **[Publishing Checklist](https://thai-lint.readthedocs.io/en/latest/publishing-checklist/)** - Post-publication validation
|
|
1162
1163
|
|
|
1163
1164
|
### Examples
|
|
1164
1165
|
|
|
@@ -30,13 +30,20 @@ src/linters/dry/duplicate_storage.py,sha256=3OxE2mtoWGAsNNrB8J2c-4JirLUoqZ9ptydO
|
|
|
30
30
|
src/linters/dry/file_analyzer.py,sha256=ufSQ85ddsGTqGnBHZNTdV_5DGfTpUmJOB58sIdJNV0I,2928
|
|
31
31
|
src/linters/dry/inline_ignore.py,sha256=ASfA-fp_1aPpkakN2e0T6qdTh8S7Jqj89ovxXJLmFlc,4439
|
|
32
32
|
src/linters/dry/linter.py,sha256=XMLwCgGrFX0l0dVUJs1jpsXOfgxeKKDbxOtN5h5Emhk,5835
|
|
33
|
-
src/linters/dry/python_analyzer.py,sha256=
|
|
33
|
+
src/linters/dry/python_analyzer.py,sha256=jmMUw2YhwkRrAJJDHaHKffGUjCiz5OzWwv_ZJrzNXqw,27367
|
|
34
34
|
src/linters/dry/storage_initializer.py,sha256=ykMALFs4uMUrN0_skEwySDl_t5Dm_LGHllF0OxDhiUI,1366
|
|
35
|
-
src/linters/dry/token_hasher.py,sha256=
|
|
36
|
-
src/linters/dry/typescript_analyzer.py,sha256=
|
|
35
|
+
src/linters/dry/token_hasher.py,sha256=71njBzUsWvQjIWo38AKeRHQsG8K4jrjLTKuih-i6Gis,5372
|
|
36
|
+
src/linters/dry/typescript_analyzer.py,sha256=ShNoB2KfPe010wKEZoFxn-ZKh0MnRUwgADDQKQtfedI,21627
|
|
37
37
|
src/linters/dry/violation_builder.py,sha256=EUiEQIOZjzAoHEqZiIR8WZP8m4dgqJjcveR5mdMyClI,2803
|
|
38
38
|
src/linters/dry/violation_filter.py,sha256=aTOMz8kXG2sZlSVcf3cAxgxHs7f2kBXInfr1V_04fUQ,3125
|
|
39
39
|
src/linters/dry/violation_generator.py,sha256=cc6aKvTxtHSZm0F7Y-gL1bmD3JUphRmAvcbqk9aUzGg,6128
|
|
40
|
+
src/linters/file_header/__init__.py,sha256=S3a2xrOlxnNWD02To5K2ZwILsNEvSj1IvUAH8RjgOV4,791
|
|
41
|
+
src/linters/file_header/atemporal_detector.py,sha256=Rbjs2GHsgO-1r2kUHUuDRsX3XAX0INFZWVrKoUh5v-8,3104
|
|
42
|
+
src/linters/file_header/config.py,sha256=1pjfa0hIUEiE0riMLTH9FQiETXfEkhAFuv1HBx0SGgI,2073
|
|
43
|
+
src/linters/file_header/field_validator.py,sha256=bR0xZ5DhPHBE8lopbcaNwJr0auZlNDkCwml3Q8_7XOY,2380
|
|
44
|
+
src/linters/file_header/linter.py,sha256=ISNnt-YMgkKQl4psVLefp5P7qTQ6wIGidq4dTkTNbaI,11228
|
|
45
|
+
src/linters/file_header/python_parser.py,sha256=TcnA8OCbHK3YtixmFwSP7OCvxjjGLZBioez9wVukz28,2951
|
|
46
|
+
src/linters/file_header/violation_builder.py,sha256=yDNqsZ-hDnURpph9oZNj2lCkJgd_7hprjk3Fr0iPphs,2653
|
|
40
47
|
src/linters/file_placement/__init__.py,sha256=vJ43GZujcbAk-K3DwfsQZ0J3yP_5G35CKssatLyntXk,862
|
|
41
48
|
src/linters/file_placement/config_loader.py,sha256=Of5sTG2S-04efn3KOlXrSxpMcC1ipBpSvCjtJOMmWno,2640
|
|
42
49
|
src/linters/file_placement/directory_matcher.py,sha256=YaBeLGiT4bgqN_v4FmEmSASOBxkMC1lyEYpL17wLIDY,2607
|
|
@@ -76,8 +83,8 @@ src/orchestrator/language_detector.py,sha256=rHyVMApit80NTTNyDH1ObD1usKD8LjGmH3D
|
|
|
76
83
|
src/templates/thailint_config_template.yaml,sha256=u8WFv2coE4uqfgf_slw7xjo4kGYIowDm1RIgxsKQzrE,4275
|
|
77
84
|
src/utils/__init__.py,sha256=NiBtKeQ09Y3kuUzeN4O1JNfUIYPQDS2AP1l5ODq-Dec,125
|
|
78
85
|
src/utils/project_root.py,sha256=b3YTEGTa9RPcOeHn1IByMMWyRiUabfVlpnlektL0A0o,6156
|
|
79
|
-
thailint-0.4.
|
|
80
|
-
thailint-0.4.
|
|
81
|
-
thailint-0.4.
|
|
82
|
-
thailint-0.4.
|
|
83
|
-
thailint-0.4.
|
|
86
|
+
thailint-0.4.6.dist-info/METADATA,sha256=3iBTAjUgiuhAv4-_oU4Bjn96LP1Xyl8tFdV8YH2NitI,37855
|
|
87
|
+
thailint-0.4.6.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
|
|
88
|
+
thailint-0.4.6.dist-info/entry_points.txt,sha256=l7DQJgU18sVLDpSaXOXY3lLhnQHQIRrSJZTQjG1cEAk,62
|
|
89
|
+
thailint-0.4.6.dist-info/licenses/LICENSE,sha256=kxh1J0Sb62XvhNJ6MZsVNe8PqNVJ7LHRn_EWa-T3djw,1070
|
|
90
|
+
thailint-0.4.6.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|