thailint 0.15.2__py3-none-any.whl → 0.15.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
src/linters/dry/linter.py CHANGED
@@ -21,7 +21,9 @@ Interfaces: DRYRule.check(context) -> list[Violation], finalize() -> list[Violat
21
21
  Implementation: Delegates all logic to helper classes, maintains only orchestration and state
22
22
 
23
23
  Suppressions:
24
- - too-many-instance-attributes: DRYComponents groups related helper dependencies
24
+ - too-many-instance-attributes: DRYComponents groups helper dependencies; DRYRule has 8
25
+ attributes due to stateful caching requirements (storage, config, constants, file contents
26
+ for ignore directive processing)
25
27
  - B101: Type narrowing assertions after guards (storage initialized, file_path/content set)
26
28
  """
27
29
 
@@ -34,6 +36,7 @@ from pathlib import Path
34
36
  from src.core.base import BaseLintContext, BaseLintRule
35
37
  from src.core.linter_utils import should_process_file
36
38
  from src.core.types import Violation
39
+ from src.linter_config.ignore import IgnoreDirectiveParser
37
40
 
38
41
  from .config import DRYConfig
39
42
  from .config_loader import ConfigLoader
@@ -46,7 +49,7 @@ from .inline_ignore import InlineIgnoreParser
46
49
  from .python_constant_extractor import extract_python_constants
47
50
  from .storage_initializer import StorageInitializer
48
51
  from .typescript_constant_extractor import TypeScriptConstantExtractor
49
- from .violation_generator import ViolationGenerator
52
+ from .violation_generator import IgnoreContext, ViolationGenerator
50
53
 
51
54
 
52
55
  @dataclass
@@ -62,7 +65,7 @@ class DRYComponents: # pylint: disable=too-many-instance-attributes
62
65
  constant_violation_builder: ConstantViolationBuilder
63
66
 
64
67
 
65
- class DRYRule(BaseLintRule):
68
+ class DRYRule(BaseLintRule): # pylint: disable=too-many-instance-attributes
66
69
  """Detects duplicate code across project files."""
67
70
 
68
71
  def __init__(self) -> None:
@@ -71,10 +74,14 @@ class DRYRule(BaseLintRule):
71
74
  self._initialized = False
72
75
  self._config: DRYConfig | None = None
73
76
  self._file_analyzer: FileAnalyzer | None = None
77
+ self._project_root: Path | None = None
74
78
 
75
79
  # Collected constants for cross-file detection: list of (file_path, ConstantInfo)
76
80
  self._constants: list[tuple[Path, ConstantInfo]] = []
77
81
 
82
+ # Cache file contents for ignore directive checking during finalize
83
+ self._file_contents: dict[str, str] = {}
84
+
78
85
  # Helper components grouped to reduce instance attributes
79
86
  self._helpers = DRYComponents(
80
87
  config_loader=ConfigLoader(),
@@ -133,6 +140,12 @@ class DRYRule(BaseLintRule):
133
140
  assert context.file_content is not None # nosec B101
134
141
 
135
142
  file_path = context.file_path
143
+ # Cache file content for ignore directive checking in finalize
144
+ self._file_contents[str(file_path)] = context.file_content
145
+ # Get project root from context metadata if available
146
+ if self._project_root is None:
147
+ self._project_root = self._get_project_root(context)
148
+
136
149
  self._helpers.inline_ignore.parse_file(file_path, context.file_content)
137
150
  self._ensure_storage_initialized(context, config)
138
151
  self._analyze_and_store(context, config)
@@ -182,21 +195,56 @@ class DRYRule(BaseLintRule):
182
195
  if extract_fn:
183
196
  self._constants.extend((file_path, c) for c in extract_fn(context.file_content))
184
197
 
198
+ def _get_project_root(self, context: BaseLintContext) -> Path | None:
199
+ """Get project root from context if available.
200
+
201
+ Args:
202
+ context: Lint context
203
+
204
+ Returns:
205
+ Project root path or None if not available
206
+ """
207
+ # Try to get from metadata (orchestrator sets this)
208
+ if hasattr(context, "metadata") and isinstance(context.metadata, dict):
209
+ project_root = context.metadata.get("project_root")
210
+ if project_root:
211
+ return Path(project_root)
212
+
213
+ # Fallback: derive from file path
214
+ if context.file_path:
215
+ return Path(context.file_path).parent
216
+
217
+ return None
218
+
185
219
  def finalize(self) -> list[Violation]:
186
220
  """Generate violations after all files processed."""
187
221
  if not self._storage or not self._config:
188
222
  return []
223
+
224
+ # Create ignore context for violation filtering
225
+ ignore_parser = IgnoreDirectiveParser(self._project_root)
226
+ ignore_ctx = IgnoreContext(
227
+ inline_ignore=self._helpers.inline_ignore,
228
+ shared_parser=ignore_parser,
229
+ file_contents=self._file_contents,
230
+ )
231
+
189
232
  violations = self._helpers.violation_generator.generate_violations(
190
- self._storage, self.rule_id, self._config, self._helpers.inline_ignore
233
+ self._storage, self.rule_id, self._config, ignore_ctx
191
234
  )
192
235
  if self._config.detect_duplicate_constants and self._constants:
193
- violations.extend(
194
- _generate_constant_violations(
195
- self._constants, self._config, self._helpers, self.rule_id
196
- )
236
+ constant_violations = _generate_constant_violations(
237
+ self._constants, self._config, self._helpers, self.rule_id
197
238
  )
239
+ # Filter constant violations through shared ignore parser
240
+ constant_violations = _filter_ignored_violations(
241
+ constant_violations, ignore_parser, self._file_contents
242
+ )
243
+ violations.extend(constant_violations)
244
+
198
245
  self._helpers.inline_ignore.clear()
199
246
  self._constants = []
247
+ self._file_contents = {}
200
248
  return violations
201
249
 
202
250
 
@@ -225,3 +273,26 @@ def _generate_constant_violations(
225
273
  groups = find_constant_groups(constants)
226
274
  helpers.constant_violation_builder.min_occurrences = config.min_constant_occurrences
227
275
  return helpers.constant_violation_builder.build_violations(groups, rule_id)
276
+
277
+
278
+ def _filter_ignored_violations(
279
+ violations: list[Violation],
280
+ ignore_parser: IgnoreDirectiveParser,
281
+ file_contents: dict[str, str],
282
+ ) -> list[Violation]:
283
+ """Filter violations through the shared ignore directive parser.
284
+
285
+ Args:
286
+ violations: List of violations to filter
287
+ ignore_parser: Shared ignore directive parser
288
+ file_contents: Cached file contents for checking ignore directives
289
+
290
+ Returns:
291
+ Filtered list of violations not matching ignore directives
292
+ """
293
+ filtered = []
294
+ for violation in violations:
295
+ file_content = file_contents.get(violation.file_path, "")
296
+ if not ignore_parser.should_ignore_violation(violation, file_content):
297
+ filtered.append(violation)
298
+ return filtered
@@ -10,14 +10,16 @@ Overview: Handles violation generation for duplicate code blocks. Queries storag
10
10
 
11
11
  Dependencies: DuplicateStorage, ViolationDeduplicator, DRYViolationBuilder, Violation, DRYConfig
12
12
 
13
- Exports: ViolationGenerator class
13
+ Exports: ViolationGenerator class, IgnoreContext dataclass
14
14
 
15
15
  Interfaces: ViolationGenerator.generate_violations(storage, rule_id, config) -> list[Violation]
16
16
 
17
17
  Implementation: Queries storage, deduplicates blocks, builds violations, filters by ignore patterns
18
18
  """
19
19
 
20
+ from dataclasses import dataclass
20
21
  from pathlib import Path
22
+ from typing import TYPE_CHECKING
21
23
 
22
24
  from src.core.types import Violation
23
25
  from src.orchestrator.language_detector import detect_language
@@ -28,6 +30,18 @@ from .duplicate_storage import DuplicateStorage
28
30
  from .inline_ignore import InlineIgnoreParser
29
31
  from .violation_builder import DRYViolationBuilder
30
32
 
33
+ if TYPE_CHECKING:
34
+ from src.linter_config.ignore import IgnoreDirectiveParser
35
+
36
+
37
+ @dataclass
38
+ class IgnoreContext:
39
+ """Context for ignore directive filtering."""
40
+
41
+ inline_ignore: InlineIgnoreParser
42
+ shared_parser: "IgnoreDirectiveParser | None" = None
43
+ file_contents: dict[str, str] | None = None
44
+
31
45
 
32
46
  class ViolationGenerator:
33
47
  """Generates violations from duplicate code blocks."""
@@ -42,7 +56,7 @@ class ViolationGenerator:
42
56
  storage: DuplicateStorage,
43
57
  rule_id: str,
44
58
  config: DRYConfig,
45
- inline_ignore: InlineIgnoreParser,
59
+ ignore_ctx: IgnoreContext,
46
60
  ) -> list[Violation]:
47
61
  """Generate violations from storage.
48
62
 
@@ -50,19 +64,42 @@ class ViolationGenerator:
50
64
  storage: Duplicate storage instance
51
65
  rule_id: Rule identifier for violations
52
66
  config: DRY configuration with ignore patterns
53
- inline_ignore: Parser with inline ignore directives
67
+ ignore_ctx: Context containing ignore parsers and file contents
54
68
 
55
69
  Returns:
56
70
  List of violations filtered by ignore patterns and inline directives
57
71
  """
58
- duplicate_hashes = storage.duplicate_hashes
59
- violations = []
72
+ raw_violations = self._collect_violations(storage, rule_id, config)
73
+ deduplicated = self._deduplicator.deduplicate_violations(raw_violations)
74
+ pattern_filtered = self._filter_ignored(deduplicated, config.ignore_patterns)
75
+ inline_filtered = self._filter_inline_ignored(pattern_filtered, ignore_ctx.inline_ignore)
76
+
77
+ # Apply shared ignore directive filtering for block and line directives
78
+ if ignore_ctx.shared_parser and ignore_ctx.file_contents:
79
+ return self._filter_shared_ignored(
80
+ inline_filtered, ignore_ctx.shared_parser, ignore_ctx.file_contents
81
+ )
82
+
83
+ return inline_filtered
60
84
 
61
- for hash_value in duplicate_hashes:
85
+ def _collect_violations(
86
+ self, storage: DuplicateStorage, rule_id: str, config: DRYConfig
87
+ ) -> list[Violation]:
88
+ """Collect raw violations from storage duplicate hashes.
89
+
90
+ Args:
91
+ storage: Duplicate storage instance
92
+ rule_id: Rule identifier for violations
93
+ config: DRY configuration
94
+
95
+ Returns:
96
+ List of raw violations before filtering
97
+ """
98
+ violations = []
99
+ for hash_value in storage.duplicate_hashes:
62
100
  blocks = storage.get_blocks_for_hash(hash_value)
63
101
  dedup_blocks = self._deduplicator.deduplicate_blocks(blocks)
64
102
 
65
- # Check min_occurrences threshold (language-aware)
66
103
  if not self._meets_min_occurrences(dedup_blocks, config):
67
104
  continue
68
105
 
@@ -70,9 +107,7 @@ class ViolationGenerator:
70
107
  violation = self._violation_builder.build_violation(block, dedup_blocks, rule_id)
71
108
  violations.append(violation)
72
109
 
73
- deduplicated = self._deduplicator.deduplicate_violations(violations)
74
- pattern_filtered = self._filter_ignored(deduplicated, config.ignore_patterns)
75
- return self._filter_inline_ignored(pattern_filtered, inline_ignore)
110
+ return violations
76
111
 
77
112
  def _meets_min_occurrences(self, blocks: list, config: DRYConfig) -> bool:
78
113
  """Check if blocks meet minimum occurrence threshold for the language.
@@ -169,3 +204,28 @@ class ViolationGenerator:
169
204
  return int(message[start:end])
170
205
  except (ValueError, IndexError):
171
206
  return 1
207
+
208
+ def _filter_shared_ignored(
209
+ self,
210
+ violations: list[Violation],
211
+ ignore_parser: "IgnoreDirectiveParser",
212
+ file_contents: dict[str, str],
213
+ ) -> list[Violation]:
214
+ """Filter violations using the shared ignore directive parser.
215
+
216
+ This enables standard # thailint: ignore-start/end directives for DRY linter.
217
+
218
+ Args:
219
+ violations: List of violations to filter
220
+ ignore_parser: Shared ignore directive parser
221
+ file_contents: Cached file contents for ignore checking
222
+
223
+ Returns:
224
+ Filtered list of violations
225
+ """
226
+ filtered = []
227
+ for violation in violations:
228
+ file_content = file_contents.get(violation.file_path, "")
229
+ if not ignore_parser.should_ignore_violation(violation, file_content):
230
+ filtered.append(violation)
231
+ return filtered
@@ -42,6 +42,10 @@ class LazyIgnoresConfig: # pylint: disable=too-many-instance-attributes
42
42
  # Orphaned detection
43
43
  check_orphaned: bool = True # Header entries without matching ignores
44
44
 
45
+ # Inline justification options
46
+ allow_inline_justifications: bool = True # Allow " - reason" syntax
47
+ min_justification_length: int = 10 # Minimum chars for valid justification
48
+
45
49
  # File patterns to ignore
46
50
  ignore_patterns: list[str] = field(
47
51
  default_factory=lambda: [
@@ -64,5 +68,7 @@ class LazyIgnoresConfig: # pylint: disable=too-many-instance-attributes
64
68
  check_thailint_ignore=config_dict.get("check_thailint_ignore", True),
65
69
  check_test_skips=config_dict.get("check_test_skips", True),
66
70
  check_orphaned=config_dict.get("check_orphaned", True),
71
+ allow_inline_justifications=config_dict.get("allow_inline_justifications", True),
72
+ min_justification_length=config_dict.get("min_justification_length", 10),
67
73
  ignore_patterns=config_dict.get("ignore_patterns", []),
68
74
  )
@@ -5,15 +5,21 @@ Scope: Common directive creation and path normalization for ignore detectors
5
5
 
6
6
  Overview: Provides shared utility functions used across Python, TypeScript, and test skip
7
7
  detectors. Centralizes logic for normalizing file paths, extracting rule IDs from
8
- regex matches, and creating IgnoreDirective objects to avoid code duplication.
8
+ regex matches, extracting inline justifications, and creating IgnoreDirective objects
9
+ to avoid code duplication.
9
10
 
10
11
  Dependencies: re for match handling, pathlib for file paths, types module for dataclasses
11
12
 
12
- Exports: normalize_path, extract_rule_ids, create_directive, create_directive_no_rules
13
+ Exports: normalize_path, extract_rule_ids, create_directive, create_directive_no_rules,
14
+ extract_inline_justification
13
15
 
14
16
  Interfaces: Pure utility functions with no state
15
17
 
16
18
  Implementation: Simple helper functions for directive creation
19
+
20
+ Suppressions:
21
+ too-many-arguments: create_directive needs all params for proper IgnoreDirective construction
22
+ too-many-positional-arguments: Factory function mirrors IgnoreDirective fields
17
23
  """
18
24
 
19
25
  import re
@@ -21,6 +27,9 @@ from pathlib import Path
21
27
 
22
28
  from src.linters.lazy_ignores.types import IgnoreDirective, IgnoreType
23
29
 
30
+ # Pattern for inline justification: space-dash-space followed by text
31
+ INLINE_JUSTIFICATION_PATTERN = re.compile(r"\s+-\s+(.+)$")
32
+
24
33
 
25
34
  def normalize_path(file_path: Path | str | None) -> Path:
26
35
  """Normalize file path to Path object.
@@ -38,6 +47,29 @@ def normalize_path(file_path: Path | str | None) -> Path:
38
47
  return file_path
39
48
 
40
49
 
50
+ def extract_inline_justification(raw_text: str) -> str | None:
51
+ """Extract inline justification from raw directive text.
52
+
53
+ Looks for the pattern " - " (space-dash-space) followed by justification text.
54
+ This allows inline justifications like:
55
+ # noqa: PLR0912 - state machine inherently complex
56
+ # type: ignore[arg-type] - library has typing bug
57
+
58
+ Args:
59
+ raw_text: The raw comment text containing the ignore directive
60
+
61
+ Returns:
62
+ The justification text if found, None otherwise.
63
+ Returns None for empty/whitespace-only justifications.
64
+ """
65
+ match = INLINE_JUSTIFICATION_PATTERN.search(raw_text)
66
+ if not match:
67
+ return None
68
+
69
+ justification = match.group(1).strip()
70
+ return justification if justification else None
71
+
72
+
41
73
  def _get_captured_group(match: re.Match[str]) -> str | None:
42
74
  """Get the first captured group from a regex match if it exists.
43
75
 
@@ -69,12 +101,13 @@ def extract_rule_ids(match: re.Match[str]) -> list[str]:
69
101
  return [rule_id for rule_id in ids if rule_id]
70
102
 
71
103
 
72
- def create_directive(
104
+ def create_directive( # pylint: disable=too-many-arguments,too-many-positional-arguments
73
105
  match: re.Match[str],
74
106
  ignore_type: IgnoreType,
75
107
  line_num: int,
76
108
  file_path: Path,
77
109
  rule_ids: tuple[str, ...] | None = None,
110
+ full_line: str | None = None,
78
111
  ) -> IgnoreDirective:
79
112
  """Create an IgnoreDirective from a regex match.
80
113
 
@@ -84,6 +117,7 @@ def create_directive(
84
117
  line_num: 1-indexed line number
85
118
  file_path: Path to source file
86
119
  rule_ids: Optional tuple of rule IDs; if None, extracts from match group 1
120
+ full_line: Optional full line text for extracting inline justification
87
121
 
88
122
  Returns:
89
123
  IgnoreDirective for this match
@@ -91,13 +125,22 @@ def create_directive(
91
125
  if rule_ids is None:
92
126
  rule_ids = tuple(extract_rule_ids(match))
93
127
 
128
+ # Use full line from match position to capture inline justification
129
+ if full_line is not None:
130
+ raw_text = full_line[match.start() :].strip()
131
+ else:
132
+ raw_text = match.group(0).strip()
133
+
134
+ inline_justification = extract_inline_justification(raw_text)
135
+
94
136
  return IgnoreDirective(
95
137
  ignore_type=ignore_type,
96
138
  rule_ids=rule_ids,
97
139
  line=line_num,
98
140
  column=match.start() + 1,
99
- raw_text=match.group(0).strip(),
141
+ raw_text=raw_text,
100
142
  file_path=file_path,
143
+ inline_justification=inline_justification,
101
144
  )
102
145
 
103
146
 
@@ -32,13 +32,15 @@ from .types import IgnoreDirective, IgnoreType
32
32
  class IgnoreSuppressionMatcher:
33
33
  """Matches ignore directives with header suppressions."""
34
34
 
35
- def __init__(self, parser: SuppressionsParser) -> None:
35
+ def __init__(self, parser: SuppressionsParser, min_justification_length: int = 10) -> None:
36
36
  """Initialize the matcher.
37
37
 
38
38
  Args:
39
39
  parser: SuppressionsParser for rule ID normalization.
40
+ min_justification_length: Minimum length for valid inline justification.
40
41
  """
41
42
  self._parser = parser
43
+ self._min_justification_length = min_justification_length
42
44
 
43
45
  def collect_used_rule_ids(self, ignores: list[IgnoreDirective]) -> set[str]:
44
46
  """Collect all normalized rule IDs used in ignore directives.
@@ -72,6 +74,9 @@ class IgnoreSuppressionMatcher:
72
74
  ) -> list[str]:
73
75
  """Find which rule IDs in an ignore are not justified.
74
76
 
77
+ Checks inline justification first (higher precedence), then falls back
78
+ to header-based suppressions.
79
+
75
80
  Args:
76
81
  ignore: The ignore directive to check.
77
82
  suppressions: Dict of normalized rule IDs to justifications.
@@ -79,17 +84,45 @@ class IgnoreSuppressionMatcher:
79
84
  Returns:
80
85
  List of unjustified rule IDs (original case preserved).
81
86
  """
87
+ if self._has_valid_inline_justification(ignore):
88
+ return []
89
+
82
90
  if not ignore.rule_ids:
83
- type_key = self._normalize(ignore.ignore_type.value)
84
- if type_key not in suppressions:
85
- return [ignore.ignore_type.value]
91
+ return self._check_bare_ignore(ignore, suppressions)
92
+
93
+ return self._check_rule_specific_ignore(ignore, suppressions)
94
+
95
+ def _check_bare_ignore(
96
+ self, ignore: IgnoreDirective, suppressions: dict[str, str]
97
+ ) -> list[str]:
98
+ """Check if a bare ignore (no specific rules) is justified."""
99
+ type_key = self._normalize(ignore.ignore_type.value)
100
+ if type_key in suppressions:
86
101
  return []
102
+ return [ignore.ignore_type.value]
87
103
 
88
- unjustified: list[str] = []
89
- for rule_id in ignore.rule_ids:
90
- if not self._is_rule_justified(ignore, rule_id, suppressions):
91
- unjustified.append(rule_id)
92
- return unjustified
104
+ def _check_rule_specific_ignore(
105
+ self, ignore: IgnoreDirective, suppressions: dict[str, str]
106
+ ) -> list[str]:
107
+ """Check which specific rule IDs are not justified."""
108
+ return [
109
+ rule_id
110
+ for rule_id in ignore.rule_ids
111
+ if not self._is_rule_justified(ignore, rule_id, suppressions)
112
+ ]
113
+
114
+ def _has_valid_inline_justification(self, ignore: IgnoreDirective) -> bool:
115
+ """Check if the ignore has a valid inline justification.
116
+
117
+ Args:
118
+ ignore: The ignore directive to check.
119
+
120
+ Returns:
121
+ True if the ignore has an inline justification meeting minimum length.
122
+ """
123
+ if not ignore.inline_justification:
124
+ return False
125
+ return len(ignore.inline_justification) >= self._min_justification_length
93
126
 
94
127
  def _is_rule_justified(
95
128
  self, ignore: IgnoreDirective, rule_id: str, suppressions: dict[str, str]
@@ -205,5 +205,5 @@ class PythonIgnoreDetector:
205
205
  continue
206
206
  if _is_pattern_in_string_literal(line, match.start()):
207
207
  continue
208
- found.append(create_directive(match, ignore_type, line_num, file_path))
208
+ found.append(create_directive(match, ignore_type, line_num, file_path, full_line=line))
209
209
  return found
@@ -59,6 +59,7 @@ class IgnoreDirective:
59
59
  column: int
60
60
  raw_text: str # Original comment text
61
61
  file_path: Path
62
+ inline_justification: str | None = None # Justification after " - " delimiter
62
63
 
63
64
 
64
65
  @dataclass(frozen=True)
@@ -72,8 +72,12 @@ def _build_unjustified_suggestion(rule_id: str) -> str:
72
72
 
73
73
  suppression_entries = "\n".join(f" {rid}: [Your justification here]" for rid in rule_ids)
74
74
 
75
- return f"""To fix, add an entry to the file header Suppressions section:
75
+ return f"""To fix, either:
76
76
 
77
+ 1. Add an inline justification (10+ chars) after the ignore directive:
78
+ # noqa: {rule_ids[0]} - [Your justification here]
79
+
80
+ 2. Or add an entry to the file header Suppressions section:
77
81
  Suppressions:
78
82
  {suppression_entries}
79
83
 
@@ -461,7 +461,7 @@ class StringlyTypedStorage: # thailint: ignore[srp]
461
461
  List of (function_name, param_index, unique_values) tuples
462
462
  """
463
463
  cursor = self._db.execute(
464
- """SELECT function_name, param_index, GROUP_CONCAT(DISTINCT string_value)
464
+ """SELECT function_name, param_index, json_group_array(DISTINCT string_value)
465
465
  FROM function_calls
466
466
  GROUP BY function_name, param_index
467
467
  HAVING COUNT(DISTINCT string_value) >= ?
@@ -470,12 +470,7 @@ class StringlyTypedStorage: # thailint: ignore[srp]
470
470
  (min_values, max_values, min_files),
471
471
  )
472
472
 
473
- results: list[tuple[str, int, set[str]]] = []
474
- for row in cursor.fetchall():
475
- values = set(row[2].split(",")) if row[2] else set()
476
- results.append((row[0], row[1], values))
477
-
478
- return results
473
+ return [(row[0], row[1], set(json.loads(row[2]))) for row in cursor.fetchall()]
479
474
 
480
475
  def get_calls_by_function(
481
476
  self, function_name: str, param_index: int
@@ -566,7 +561,7 @@ class StringlyTypedStorage: # thailint: ignore[srp]
566
561
  List of (variable_name, unique_values) tuples
567
562
  """
568
563
  cursor = self._db.execute(
569
- """SELECT variable_name, GROUP_CONCAT(DISTINCT compared_value)
564
+ """SELECT variable_name, json_group_array(DISTINCT compared_value)
570
565
  FROM string_comparisons
571
566
  GROUP BY variable_name
572
567
  HAVING COUNT(DISTINCT compared_value) >= ?
@@ -574,12 +569,7 @@ class StringlyTypedStorage: # thailint: ignore[srp]
574
569
  (min_values, min_files),
575
570
  )
576
571
 
577
- results: list[tuple[str, set[str]]] = []
578
- for row in cursor.fetchall():
579
- values = set(row[1].split(",")) if row[1] else set()
580
- results.append((row[0], values))
581
-
582
- return results
572
+ return [(row[0], set(json.loads(row[1]))) for row in cursor.fetchall()]
583
573
 
584
574
  def get_comparisons_by_variable(self, variable_name: str) -> list[StoredComparison]:
585
575
  """Get all comparisons for a specific variable.
@@ -54,6 +54,11 @@ def _is_allowed_value_set(values: set[str], config: StringlyTypedConfig) -> bool
54
54
  return any(values == set(allowed) for allowed in config.allowed_string_sets)
55
55
 
56
56
 
57
+ def _has_spaces(values: set[str]) -> bool:
58
+ """Check if any value contains spaces (indicates SQL/templates, not enums)."""
59
+ return any(" " in v for v in values)
60
+
61
+
57
62
  def _is_enum_candidate(pattern: StoredPattern, config: StringlyTypedConfig) -> bool:
58
63
  """Check if pattern's value count is within enum range."""
59
64
  value_count = len(pattern.string_values)
@@ -65,6 +70,15 @@ def _is_pattern_allowed(pattern: StoredPattern, config: StringlyTypedConfig) ->
65
70
  return _is_allowed_value_set(set(pattern.string_values), config)
66
71
 
67
72
 
73
+ def _should_skip_pattern_values(values: set[str], config: StringlyTypedConfig) -> bool:
74
+ """Check if pattern values should be skipped."""
75
+ return (
76
+ _is_allowed_value_set(values, config)
77
+ or context_filter.are_all_values_excluded(values)
78
+ or _has_spaces(values)
79
+ )
80
+
81
+
68
82
  def _should_skip_patterns(patterns: list[StoredPattern], config: StringlyTypedConfig) -> bool:
69
83
  """Check if pattern group should be skipped based on config."""
70
84
  if not patterns:
@@ -72,12 +86,7 @@ def _should_skip_patterns(patterns: list[StoredPattern], config: StringlyTypedCo
72
86
  first = patterns[0]
73
87
  if not _is_enum_candidate(first, config):
74
88
  return True
75
- if _is_pattern_allowed(first, config):
76
- return True
77
- # Skip if all values match excluded patterns (numeric strings, etc.)
78
- if context_filter.are_all_values_excluded(set(first.string_values)):
79
- return True
80
- return False
89
+ return _should_skip_pattern_values(set(first.string_values), config)
81
90
 
82
91
 
83
92
  def _should_skip_comparison(unique_values: set[str], config: StringlyTypedConfig) -> bool:
@@ -88,6 +97,8 @@ def _should_skip_comparison(unique_values: set[str], config: StringlyTypedConfig
88
97
  return True
89
98
  if context_filter.are_all_values_excluded(unique_values):
90
99
  return True
100
+ if _has_spaces(unique_values):
101
+ return True
91
102
  return False
92
103
 
93
104
 
@@ -250,6 +261,15 @@ def _process_pattern_group(
250
261
  # --- Helper functions for function call processing ---
251
262
 
252
263
 
264
+ def _is_valid_function(name: str, idx: int, vals: set[str], config: StringlyTypedConfig) -> bool:
265
+ """Check if a function passes all validity filters."""
266
+ if _is_allowed_value_set(vals, config):
267
+ return False
268
+ if _has_spaces(vals):
269
+ return False
270
+ return context_filter.should_include(name, idx, vals)
271
+
272
+
253
273
  def _get_valid_functions(
254
274
  storage: StringlyTypedStorage,
255
275
  config: StringlyTypedConfig,
@@ -261,13 +281,7 @@ def _get_valid_functions(
261
281
  max_values=config.max_values_for_enum,
262
282
  min_files=min_files,
263
283
  )
264
-
265
- return [
266
- (name, idx, vals)
267
- for name, idx, vals in limited_funcs
268
- if not _is_allowed_value_set(vals, config)
269
- and context_filter.should_include(name, idx, vals)
270
- ]
284
+ return [(n, i, v) for n, i, v in limited_funcs if _is_valid_function(n, i, v, config)]
271
285
 
272
286
 
273
287
  def _build_call_violations(
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: thailint
3
- Version: 0.15.2
3
+ Version: 0.15.4
4
4
  Summary: The AI Linter - Enterprise-grade linting and governance for AI-generated code across multiple languages
5
5
  License: MIT
6
6
  License-File: LICENSE
@@ -79,7 +79,7 @@ src/linters/dry/deduplicator.py,sha256=a1TRvldxCszf5QByo1ihXF3W98dpGuyaRT74jPfQf
79
79
  src/linters/dry/duplicate_storage.py,sha256=9pIALnwAuz5BJUYNXrPbObbP932CE9x0vgUkICryT_s,1970
80
80
  src/linters/dry/file_analyzer.py,sha256=3uO2fy8HvxFiRCtYaBs9LztkzvDynqmy7INYRaJjK-g,2990
81
81
  src/linters/dry/inline_ignore.py,sha256=3fgPsn_kXeF7kVy_9FL8xYwSUA9cSmgUR3F0tOwzjuY,4275
82
- src/linters/dry/linter.py,sha256=-8MiprIs2MsAj2fAmg-hCbasDVBkLLgMP9BR2_ZcDv0,9304
82
+ src/linters/dry/linter.py,sha256=oXhFAFYrukiVQq8KmfwGBXvbPLutwW_OuzZAElRTYbc,12066
83
83
  src/linters/dry/python_analyzer.py,sha256=b7n7u3NbOW3DFZHeC-bG3N_6VgW2NU2easG8pIHYaZc,10962
84
84
  src/linters/dry/python_constant_extractor.py,sha256=v9-3NDCq9CKT6uvti_PRIC3DMEESHW2g3i2IfFy0hw8,3469
85
85
  src/linters/dry/single_statement_detector.py,sha256=ZmMo_tsvVxugFmnNzWtGc-4e5sGOw18HHxHre3lRUUA,18220
@@ -91,7 +91,7 @@ src/linters/dry/typescript_statement_detector.py,sha256=8WiwcjLs8j8_wp0UTsoXN0vV
91
91
  src/linters/dry/typescript_value_extractor.py,sha256=TbHIvcEnmjSV1WNnkRbXeUi_JA9-rGX0_BrD4bczO9Y,2519
92
92
  src/linters/dry/violation_builder.py,sha256=WkCibSNytoqMHGC-3GrVff4PD7-SOnVzzZgkMeqmzco,2952
93
93
  src/linters/dry/violation_filter.py,sha256=2e6NHN7GYadt27Pz5kiXhttKPJu1loiXhOi0G3J3Epk,3211
94
- src/linters/dry/violation_generator.py,sha256=7jkRfauwAEdipFuDhoy82eIeXS1ECN4W_3jspMlnFaU,6068
94
+ src/linters/dry/violation_generator.py,sha256=aXKVB-ERpigzwFD4iY5GoN2BWTESBhZaO6Qr7Vm7mzU,8146
95
95
  src/linters/file_header/__init__.py,sha256=S3a2xrOlxnNWD02To5K2ZwILsNEvSj1IvUAH8RjgOV4,791
96
96
  src/linters/file_header/atemporal_detector.py,sha256=lerkiMwiUhYv3X3vxpm_88otZhSMG3DBgryhFyxe3As,3934
97
97
  src/linters/file_header/base_parser.py,sha256=k6ymg1ocuesA6PH7NMDQOy0LTgSglu0wXed68fPaHxM,3382
@@ -114,17 +114,17 @@ src/linters/file_placement/pattern_validator.py,sha256=P6qbgnVxxFlBrsPzxjXjVlVPp
114
114
  src/linters/file_placement/rule_checker.py,sha256=HInWmyxxZfqmrBH-5RealvYIV5ChzGd4EwxB6RNZE9w,7930
115
115
  src/linters/file_placement/violation_factory.py,sha256=NkQmBcgpa3g3W2ZdFZNQ5djLVP4x9OKs65d7F1rCKvM,6040
116
116
  src/linters/lazy_ignores/__init__.py,sha256=qPwCC1Y-TPn6tNLTO4X6QsACaAiPMBpsIKlKh_dSz5k,1656
117
- src/linters/lazy_ignores/config.py,sha256=IBW9hO5QgVyIhuSJw6KrupCjbVilDiNUUHL-TcQMOXU,2591
118
- src/linters/lazy_ignores/directive_utils.py,sha256=6Mc56hrcFe21LG7PMmIVEPDGKUF6F9dffhHkwixj1R8,3432
117
+ src/linters/lazy_ignores/config.py,sha256=Bg7A01BLt4zhXOSb_AQsNP7vNtyyL30HTcACdsdBStY,2960
118
+ src/linters/lazy_ignores/directive_utils.py,sha256=x0Xaj92R1Z7kAmCpH0rUWRBd32rRwd8z9NBZNgG_LeI,5111
119
119
  src/linters/lazy_ignores/header_parser.py,sha256=ADtVJUoJfoVx_zehoPpo6YLws0N-7c2PLK1PhNqn5Uk,6030
120
120
  src/linters/lazy_ignores/linter.py,sha256=1bl3b2NliVurlb9bUcZUzd8sBZG6KJhRODD-Mb46HnE,6052
121
- src/linters/lazy_ignores/matcher.py,sha256=jLE12aPPEz6tBejFrmXGXdCb7kzeFh8IYH5b_UjHqpQ,5201
122
- src/linters/lazy_ignores/python_analyzer.py,sha256=ia60Y0mw0FxngXWw55JqZggWL8HJD6M3mN93PoqyOGA,7367
121
+ src/linters/lazy_ignores/matcher.py,sha256=3djc1iRiwBeigmRl5OIeIIZqRYRiku7E65WyzELg2LY,6496
122
+ src/linters/lazy_ignores/python_analyzer.py,sha256=MuP-mXuLrZ3Qe3OHl63iVVvlja5YbziUUEcOKkKVNG8,7383
123
123
  src/linters/lazy_ignores/rule_id_utils.py,sha256=sE7kAQFO6zGAR5JQN2OLLVjAdiVNPZONTb0sY01ri9w,5809
124
124
  src/linters/lazy_ignores/skip_detector.py,sha256=9RK5uD4b2pAfdJsK1dHRTAWG4kKfAf1yfjc1OBsI14M,10461
125
- src/linters/lazy_ignores/types.py,sha256=ygcRYjiuCAx4qDZoBG6xSWNbd1Fh7_FPwoyH8PvLFvc,2251
125
+ src/linters/lazy_ignores/types.py,sha256=2lnFazlxbJ7SEThEVcWaV5PWLztYyGcPzYQVzHE9iO0,2334
126
126
  src/linters/lazy_ignores/typescript_analyzer.py,sha256=k8R60Mcw9OxvHFFUPhUErrb-tbek7Q7PXXZDq_H0ioM,5322
127
- src/linters/lazy_ignores/violation_builder.py,sha256=Z5RlCRJKkTfetkRVqsu1rfJRgBeiE9RTEu1djr-nmto,4203
127
+ src/linters/lazy_ignores/violation_builder.py,sha256=FIlybBPgXTQfot745eQwM4gm7-Pst50SBpBhriG53-w,4342
128
128
  src/linters/lbyl/__init__.py,sha256=5_an3Zy9iQvbajvuQT_DTMtlIMfOWxwewZGFe-cSDg8,1124
129
129
  src/linters/lbyl/config.py,sha256=kWCjBRs1HEVf9oK4dHKHfjQX8KU-o5i-Jc_94ULDD4Y,2434
130
130
  src/linters/lbyl/linter.py,sha256=sp6PETOKuk13wzfWYPYmWVqcdCwcFRr6CGvFmNPunaU,2288
@@ -206,21 +206,21 @@ src/linters/stringly_typed/python/constants.py,sha256=IF3Y2W96hihHlr5HMenq5Q93uO
206
206
  src/linters/stringly_typed/python/match_analyzer.py,sha256=mgarAtnL79iOrK6xuiRE2Hw-9tR8ocrIRza6g0SorY8,2719
207
207
  src/linters/stringly_typed/python/validation_detector.py,sha256=jzcowBcA7R_aKeXFf2sxI2yGGUoT1lGj1y7DTnmO88M,6288
208
208
  src/linters/stringly_typed/python/variable_extractor.py,sha256=yYJQ5jTSMz94SD_0IMfCHMWcw1F57GmRuh9h51oiAEs,2769
209
- src/linters/stringly_typed/storage.py,sha256=UoaY3Ejpb4k-7it15d_wzZFcEJNjWTClMJhkY6wkc9A,22021
209
+ src/linters/stringly_typed/storage.py,sha256=4ymgg1JiBPLUazKShOb6djPGAVSma3SOIUDmOmeqH5A,21742
210
210
  src/linters/stringly_typed/storage_initializer.py,sha256=3-4St1ieN8325Xkb0HTS27dVyjjluM_X-bkwOfJW1JM,1548
211
211
  src/linters/stringly_typed/typescript/__init__.py,sha256=lOgclS9wxLNyszfwVGbVxKfCkbTLX1pvskHzcADi5Xg,1121
212
212
  src/linters/stringly_typed/typescript/analyzer.py,sha256=iNEk6wQJJfmJoRTXx29GEeqTpKzQ5TcNIimSuQPb6UU,6376
213
213
  src/linters/stringly_typed/typescript/call_tracker.py,sha256=NPRpjqTe-Owi3_qJk_baojAazqaL6EsH4E2SIOsUAjU,11299
214
214
  src/linters/stringly_typed/typescript/comparison_tracker.py,sha256=TiEldIqppu6i2XYd9a040HK0U4cy7IFf6Qjjlb93wAA,12573
215
- src/linters/stringly_typed/violation_generator.py,sha256=aye60bShNnt8f6BPQwduTOLX97jAuy7Z7DLq9wzrFB4,14769
215
+ src/linters/stringly_typed/violation_generator.py,sha256=g1dTc6EvjvTYmW3zdfTydmLaTCnqWDq4Q5UIntYxF1A,15336
216
216
  src/orchestrator/__init__.py,sha256=XXLDJq2oaB-TpP2Y97GRnde9EkITGuFCmuLrDfxI9nY,245
217
217
  src/orchestrator/core.py,sha256=rt3h-YFgF1aAFeKvTa0PP7k_8zfwpeGIqrIxKuyckxY,17683
218
218
  src/orchestrator/language_detector.py,sha256=ALt2BEZKXQM2dWr1ChF9lZVj83YF4Bl9xwrB9ezfmMc,2799
219
219
  src/templates/thailint_config_template.yaml,sha256=57ZtLxnIoOHtR5Ejq3clb4nhY9J4n6h36XFb79ZZPlc,12020
220
220
  src/utils/__init__.py,sha256=NiBtKeQ09Y3kuUzeN4O1JNfUIYPQDS2AP1l5ODq-Dec,125
221
221
  src/utils/project_root.py,sha256=aaxUM-LQ1okrPClmZWPFd_D09W3V1ArgJiidEEp_eU8,6262
222
- thailint-0.15.2.dist-info/METADATA,sha256=isRRo7g6PLejqPDJ0k8Fd2ni6QP8_jVE6Q638kdS2Jg,7202
223
- thailint-0.15.2.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
224
- thailint-0.15.2.dist-info/entry_points.txt,sha256=DNoGUlxpaMFqxQDgHp1yeGqohOjdFR-kH19uHYi3OUY,72
225
- thailint-0.15.2.dist-info/licenses/LICENSE,sha256=kxh1J0Sb62XvhNJ6MZsVNe8PqNVJ7LHRn_EWa-T3djw,1070
226
- thailint-0.15.2.dist-info/RECORD,,
222
+ thailint-0.15.4.dist-info/METADATA,sha256=NNgbYCX8kCsjk1YvX9govMKHkNKPltrzwATaBA5fx9E,7202
223
+ thailint-0.15.4.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
224
+ thailint-0.15.4.dist-info/entry_points.txt,sha256=DNoGUlxpaMFqxQDgHp1yeGqohOjdFR-kH19uHYi3OUY,72
225
+ thailint-0.15.4.dist-info/licenses/LICENSE,sha256=kxh1J0Sb62XvhNJ6MZsVNe8PqNVJ7LHRn_EWa-T3djw,1070
226
+ thailint-0.15.4.dist-info/RECORD,,