thailint 0.9.0__py3-none-any.whl → 0.11.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (69) hide show
  1. src/__init__.py +1 -0
  2. src/cli/__init__.py +27 -0
  3. src/cli/__main__.py +22 -0
  4. src/cli/config.py +478 -0
  5. src/cli/linters/__init__.py +58 -0
  6. src/cli/linters/code_patterns.py +372 -0
  7. src/cli/linters/code_smells.py +343 -0
  8. src/cli/linters/documentation.py +155 -0
  9. src/cli/linters/shared.py +89 -0
  10. src/cli/linters/structure.py +313 -0
  11. src/cli/linters/structure_quality.py +316 -0
  12. src/cli/main.py +120 -0
  13. src/cli/utils.py +375 -0
  14. src/cli_main.py +34 -0
  15. src/config.py +2 -3
  16. src/core/rule_discovery.py +43 -10
  17. src/core/types.py +13 -0
  18. src/core/violation_utils.py +69 -0
  19. src/linter_config/ignore.py +32 -16
  20. src/linters/collection_pipeline/__init__.py +90 -0
  21. src/linters/collection_pipeline/config.py +63 -0
  22. src/linters/collection_pipeline/continue_analyzer.py +100 -0
  23. src/linters/collection_pipeline/detector.py +130 -0
  24. src/linters/collection_pipeline/linter.py +437 -0
  25. src/linters/collection_pipeline/suggestion_builder.py +63 -0
  26. src/linters/dry/block_filter.py +99 -9
  27. src/linters/dry/cache.py +94 -6
  28. src/linters/dry/config.py +47 -10
  29. src/linters/dry/constant.py +92 -0
  30. src/linters/dry/constant_matcher.py +214 -0
  31. src/linters/dry/constant_violation_builder.py +98 -0
  32. src/linters/dry/linter.py +89 -48
  33. src/linters/dry/python_analyzer.py +44 -431
  34. src/linters/dry/python_constant_extractor.py +101 -0
  35. src/linters/dry/single_statement_detector.py +415 -0
  36. src/linters/dry/token_hasher.py +5 -5
  37. src/linters/dry/typescript_analyzer.py +63 -382
  38. src/linters/dry/typescript_constant_extractor.py +134 -0
  39. src/linters/dry/typescript_statement_detector.py +255 -0
  40. src/linters/dry/typescript_value_extractor.py +66 -0
  41. src/linters/file_header/linter.py +9 -13
  42. src/linters/file_placement/linter.py +30 -10
  43. src/linters/file_placement/pattern_matcher.py +19 -5
  44. src/linters/magic_numbers/linter.py +8 -67
  45. src/linters/magic_numbers/typescript_ignore_checker.py +81 -0
  46. src/linters/nesting/linter.py +12 -9
  47. src/linters/print_statements/linter.py +7 -24
  48. src/linters/srp/class_analyzer.py +9 -9
  49. src/linters/srp/heuristics.py +6 -5
  50. src/linters/srp/linter.py +4 -5
  51. src/linters/stateless_class/linter.py +2 -2
  52. src/linters/stringly_typed/__init__.py +23 -0
  53. src/linters/stringly_typed/config.py +165 -0
  54. src/linters/stringly_typed/python/__init__.py +29 -0
  55. src/linters/stringly_typed/python/analyzer.py +198 -0
  56. src/linters/stringly_typed/python/condition_extractor.py +131 -0
  57. src/linters/stringly_typed/python/conditional_detector.py +176 -0
  58. src/linters/stringly_typed/python/constants.py +21 -0
  59. src/linters/stringly_typed/python/match_analyzer.py +88 -0
  60. src/linters/stringly_typed/python/validation_detector.py +186 -0
  61. src/linters/stringly_typed/python/variable_extractor.py +96 -0
  62. src/orchestrator/core.py +241 -12
  63. {thailint-0.9.0.dist-info → thailint-0.11.0.dist-info}/METADATA +116 -3
  64. {thailint-0.9.0.dist-info → thailint-0.11.0.dist-info}/RECORD +67 -29
  65. thailint-0.11.0.dist-info/entry_points.txt +4 -0
  66. src/cli.py +0 -2014
  67. thailint-0.9.0.dist-info/entry_points.txt +0 -4
  68. {thailint-0.9.0.dist-info → thailint-0.11.0.dist-info}/WHEEL +0 -0
  69. {thailint-0.9.0.dist-info → thailint-0.11.0.dist-info}/licenses/LICENSE +0 -0
src/linters/dry/cache.py CHANGED
@@ -1,32 +1,40 @@
1
1
  """
2
2
  Purpose: SQLite storage manager for DRY linter duplicate detection
3
3
 
4
- Scope: Code block storage and duplicate detection queries
4
+ Scope: Code block storage, constant storage, and duplicate detection queries
5
5
 
6
- Overview: Implements in-memory or temporary-file SQLite storage for duplicate code detection.
7
- Stores code blocks with hash values, file locations, and metadata during a single linter run.
6
+ Overview: Implements in-memory or temporary-file SQLite storage for duplicate code detection
7
+ and duplicate constants detection. Stores code blocks with hash values and constants with
8
+ name/value pairs, enabling cross-file duplicate detection during a single linter run.
8
9
  Supports both :memory: mode (fast, RAM-only) and tempfile mode (disk-backed for large projects).
9
10
  No persistence between runs - storage is cleared when linter completes. Includes indexes for
10
- fast hash lookups enabling cross-file duplicate detection with minimal overhead.
11
+ fast hash lookups and constant name lookups enabling efficient cross-file detection.
11
12
 
12
13
  Dependencies: Python sqlite3 module (stdlib), tempfile module (stdlib), pathlib.Path, dataclasses
13
14
 
14
15
  Exports: CodeBlock dataclass, DRYCache class
15
16
 
16
17
  Interfaces: DRYCache.__init__(storage_mode), add_blocks(file_path, blocks),
17
- find_duplicates_by_hash(hash_value), get_duplicate_hashes(), close()
18
+ find_duplicates_by_hash(hash_value), duplicate_hashes, add_constants(file_path, constants),
19
+ all_constants, get_duplicate_constant_names(), close()
18
20
 
19
- Implementation: SQLite with two tables (files, code_blocks), indexed on hash_value for performance,
21
+ Implementation: SQLite with three tables (files, code_blocks, constants), indexed for performance,
20
22
  storage_mode determines :memory: vs tempfile location, ACID transactions for reliability
21
23
  """
22
24
 
25
+ from __future__ import annotations
26
+
23
27
  import sqlite3
24
28
  import tempfile
25
29
  from dataclasses import dataclass
26
30
  from pathlib import Path
31
+ from typing import TYPE_CHECKING
27
32
 
28
33
  from .cache_query import CacheQueryService
29
34
 
35
+ if TYPE_CHECKING:
36
+ from .constant import ConstantInfo
37
+
30
38
 
31
39
  @dataclass
32
40
  class CodeBlock:
@@ -93,6 +101,19 @@ class DRYCache:
93
101
  self.db.execute("CREATE INDEX IF NOT EXISTS idx_hash_value ON code_blocks(hash_value)")
94
102
  self.db.execute("CREATE INDEX IF NOT EXISTS idx_file_path ON code_blocks(file_path)")
95
103
 
104
+ # Constants table for duplicate constant detection
105
+ self.db.execute(
106
+ """CREATE TABLE IF NOT EXISTS constants (
107
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
108
+ file_path TEXT NOT NULL,
109
+ name TEXT NOT NULL,
110
+ line_number INTEGER NOT NULL,
111
+ value TEXT,
112
+ FOREIGN KEY (file_path) REFERENCES files(file_path) ON DELETE CASCADE
113
+ )"""
114
+ )
115
+ self.db.execute("CREATE INDEX IF NOT EXISTS idx_constant_name ON constants(name)")
116
+
96
117
  self.db.commit()
97
118
 
98
119
  def add_blocks(self, file_path: Path, blocks: list[CodeBlock]) -> None:
@@ -166,6 +187,73 @@ class DRYCache:
166
187
  """
167
188
  return self._query_service.get_duplicate_hashes(self.db)
168
189
 
190
+ def add_constants(
191
+ self,
192
+ file_path: Path,
193
+ constants: list[ConstantInfo],
194
+ ) -> None:
195
+ """Add constants to storage.
196
+
197
+ Args:
198
+ file_path: Path to source file
199
+ constants: List of ConstantInfo instances to store
200
+ """
201
+ if not constants:
202
+ return
203
+
204
+ for const in constants:
205
+ self.db.execute(
206
+ """INSERT INTO constants
207
+ (file_path, name, line_number, value)
208
+ VALUES (?, ?, ?, ?)""",
209
+ (
210
+ str(file_path),
211
+ const.name,
212
+ const.line_number,
213
+ const.value,
214
+ ),
215
+ )
216
+
217
+ self.db.commit()
218
+
219
+ @property
220
+ def all_constants(self) -> list[tuple[str, str, int, str | None]]:
221
+ """All constants from storage.
222
+
223
+ Returns:
224
+ List of tuples: (file_path, name, line_number, value)
225
+ """
226
+ cursor = self.db.execute("SELECT file_path, name, line_number, value FROM constants")
227
+ return cursor.fetchall()
228
+
229
+ def get_duplicate_constant_names(self) -> list[str]:
230
+ """Get constant names that appear in 2+ files.
231
+
232
+ Returns:
233
+ List of constant names appearing in multiple files
234
+ """
235
+ cursor = self.db.execute(
236
+ """SELECT name FROM constants
237
+ GROUP BY name
238
+ HAVING COUNT(DISTINCT file_path) >= 2"""
239
+ )
240
+ return [row[0] for row in cursor.fetchall()]
241
+
242
+ def get_constants_by_name(self, name: str) -> list[tuple[str, int, str | None]]:
243
+ """Get all locations of a constant by name.
244
+
245
+ Args:
246
+ name: The constant name to search for
247
+
248
+ Returns:
249
+ List of tuples: (file_path, line_number, value)
250
+ """
251
+ cursor = self.db.execute(
252
+ "SELECT file_path, line_number, value FROM constants WHERE name = ?",
253
+ (name,),
254
+ )
255
+ return cursor.fetchall()
256
+
169
257
  def close(self) -> None:
170
258
  """Close database connection and cleanup tempfile if used."""
171
259
  self.db.close()
src/linters/dry/config.py CHANGED
@@ -23,6 +23,7 @@ from typing import Any
23
23
  # Default configuration constants
24
24
  DEFAULT_MIN_DUPLICATE_LINES = 3
25
25
  DEFAULT_MIN_DUPLICATE_TOKENS = 30
26
+ DEFAULT_DETECT_DUPLICATE_CONSTANTS = True
26
27
 
27
28
 
28
29
  @dataclass
@@ -60,23 +61,34 @@ class DRYConfig: # pylint: disable=too-many-instance-attributes
60
61
  }
61
62
  )
62
63
 
64
+ # Duplicate constants detection
65
+ detect_duplicate_constants: bool = DEFAULT_DETECT_DUPLICATE_CONSTANTS
66
+ min_constant_occurrences: int = 2 # Minimum files with same constant to report
67
+
68
+ # Language-specific overrides for constant detection
69
+ python_min_constant_occurrences: int | None = None
70
+ typescript_min_constant_occurrences: int | None = None
71
+
63
72
  def __post_init__(self) -> None:
64
73
  """Validate configuration values."""
65
- if self.min_duplicate_lines <= 0:
66
- raise ValueError(
67
- f"min_duplicate_lines must be positive, got {self.min_duplicate_lines}"
68
- )
69
- if self.min_duplicate_tokens <= 0:
70
- raise ValueError(
71
- f"min_duplicate_tokens must be positive, got {self.min_duplicate_tokens}"
72
- )
73
- if self.min_occurrences <= 0:
74
- raise ValueError(f"min_occurrences must be positive, got {self.min_occurrences}")
74
+ self._validate_positive_fields()
75
75
  if self.storage_mode not in ("memory", "tempfile"):
76
76
  raise ValueError(
77
77
  f"storage_mode must be 'memory' or 'tempfile', got '{self.storage_mode}'"
78
78
  )
79
79
 
80
+ def _validate_positive_fields(self) -> None:
81
+ """Validate that required fields are positive."""
82
+ positive_fields = [
83
+ ("min_duplicate_lines", self.min_duplicate_lines),
84
+ ("min_duplicate_tokens", self.min_duplicate_tokens),
85
+ ("min_occurrences", self.min_occurrences),
86
+ ("min_constant_occurrences", self.min_constant_occurrences),
87
+ ]
88
+ for name, value in positive_fields:
89
+ if value <= 0:
90
+ raise ValueError(f"{name} must be positive, got {value}")
91
+
80
92
  def get_min_occurrences_for_language(self, language: str) -> int:
81
93
  """Get minimum occurrences threshold for a specific language.
82
94
 
@@ -97,6 +109,25 @@ class DRYConfig: # pylint: disable=too-many-instance-attributes
97
109
  override = language_overrides.get(language_lower)
98
110
  return override if override is not None else self.min_occurrences
99
111
 
112
+ def get_min_constant_occurrences_for_language(self, language: str) -> int:
113
+ """Get minimum constant occurrences threshold for a specific language.
114
+
115
+ Args:
116
+ language: Language identifier (e.g., "python", "typescript")
117
+
118
+ Returns:
119
+ Minimum constant occurrences threshold for the language, or global default
120
+ """
121
+ language_lower = language.lower()
122
+
123
+ language_overrides = {
124
+ "python": self.python_min_constant_occurrences,
125
+ "typescript": self.typescript_min_constant_occurrences,
126
+ }
127
+
128
+ override = language_overrides.get(language_lower)
129
+ return override if override is not None else self.min_constant_occurrences
130
+
100
131
  @classmethod
101
132
  def from_dict(cls, config: dict[str, Any]) -> "DRYConfig":
102
133
  """Load configuration from dictionary.
@@ -131,4 +162,10 @@ class DRYConfig: # pylint: disable=too-many-instance-attributes
131
162
  storage_mode=config.get("storage_mode", "memory"),
132
163
  ignore_patterns=config.get("ignore", []),
133
164
  filters=filters,
165
+ detect_duplicate_constants=config.get(
166
+ "detect_duplicate_constants", DEFAULT_DETECT_DUPLICATE_CONSTANTS
167
+ ),
168
+ min_constant_occurrences=config.get("min_constant_occurrences", 2),
169
+ python_min_constant_occurrences=python_config.get("min_constant_occurrences"),
170
+ typescript_min_constant_occurrences=typescript_config.get("min_constant_occurrences"),
134
171
  )
@@ -0,0 +1,92 @@
1
+ """
2
+ Purpose: Dataclasses for duplicate constants detection in DRY linter
3
+
4
+ Scope: Data structures for constant extraction and cross-file detection
5
+
6
+ Overview: Provides dataclasses for representing constants extracted from source code and their
7
+ locations across multiple files. ConstantInfo stores extracted constant metadata (name, line,
8
+ value) from a single file. ConstantLocation represents where a constant appears across the
9
+ project. ConstantGroup represents a group of related constants (exact or fuzzy matches) for
10
+ violation reporting. These structures support the duplicate constants detection feature that
11
+ identifies when the same constant name appears in multiple files.
12
+
13
+ Dependencies: Python dataclasses module, pathlib for Path types
14
+
15
+ Exports: ConstantInfo, ConstantLocation, ConstantGroup dataclasses
16
+
17
+ Interfaces: Dataclass constructors with named fields
18
+
19
+ Implementation: Immutable dataclasses with optional fields for extracted value context
20
+ """
21
+
22
+ import re
23
+ from dataclasses import dataclass, field
24
+ from pathlib import Path
25
+
26
+ # Shared pattern for ALL_CAPS constant names (public only, no leading underscore)
27
+ # Used by both Python and TypeScript constant extractors
28
+ # Requires at least 2 characters to exclude single-letter type params (P, T, K, V)
29
+ CONSTANT_NAME_PATTERN = re.compile(r"^[A-Z][A-Z0-9_]+$")
30
+
31
+
32
+ @dataclass
33
+ class ConstantInfo:
34
+ """Information about a constant extracted from source code.
35
+
36
+ Represents a single constant definition found during file analysis.
37
+ Used during the collection phase before cross-file matching.
38
+ """
39
+
40
+ name: str # Constant name (e.g., "API_TIMEOUT")
41
+ line_number: int # Line where constant is defined
42
+ value: str | None = None # Optional: the value (for violation message context)
43
+
44
+
45
+ @dataclass
46
+ class ConstantLocation:
47
+ """Location of a constant in the project.
48
+
49
+ Represents where a specific constant appears, including file path,
50
+ line number, and the value assigned. Used for cross-file reporting.
51
+ """
52
+
53
+ file_path: Path
54
+ line_number: int
55
+ name: str
56
+ value: str | None = None
57
+
58
+
59
+ @dataclass
60
+ class ConstantGroup:
61
+ """A group of related constants for violation reporting.
62
+
63
+ Groups constants that match (either exactly or via fuzzy matching)
64
+ across multiple files. Used by the violation builder to generate
65
+ comprehensive violation messages.
66
+ """
67
+
68
+ # The canonical name (first seen or most common)
69
+ canonical_name: str
70
+
71
+ # All locations where this constant (or fuzzy match) appears
72
+ locations: list[ConstantLocation] = field(default_factory=list)
73
+
74
+ # All names in this group (for fuzzy matches, may include variants)
75
+ all_names: set[str] = field(default_factory=set)
76
+
77
+ # Whether this is a fuzzy match (True) or exact match (False)
78
+ is_fuzzy_match: bool = False
79
+
80
+ def add_location(self, location: ConstantLocation) -> None:
81
+ """Add a location to this group.
82
+
83
+ Args:
84
+ location: The constant location to add
85
+ """
86
+ self.locations.append(location)
87
+ self.all_names.add(location.name)
88
+
89
+ @property
90
+ def file_count(self) -> int:
91
+ """Number of unique files containing this constant."""
92
+ return len({loc.file_path for loc in self.locations})
@@ -0,0 +1,214 @@
1
+ """
2
+ Purpose: Fuzzy matching for constant names across files
3
+
4
+ Scope: Constant name matching with word-set and edit distance algorithms
5
+
6
+ Overview: Implements fuzzy matching strategies to identify related constants across files. Uses
7
+ two matching strategies: word-set matching (same words in different order, e.g., API_TIMEOUT
8
+ and TIMEOUT_API) and edit distance matching (typos within Levenshtein distance <= 2, e.g.,
9
+ MAX_RETRYS and MAX_RETRIES). Single-word constants (e.g., MAX, TIMEOUT) only use exact
10
+ matching to avoid false positives. Groups related constants into ConstantGroup instances
11
+ for violation reporting.
12
+
13
+ Dependencies: ConstantInfo, ConstantLocation, ConstantGroup from constant module
14
+
15
+ Exports: ConstantMatcher class
16
+
17
+ Interfaces: ConstantMatcher.find_groups(constants) -> list[ConstantGroup]
18
+
19
+ Implementation: Union-Find algorithm for grouping, word-set hashing, Levenshtein distance calculation
20
+ """
21
+
22
+ from collections.abc import Callable
23
+ from itertools import combinations
24
+ from pathlib import Path
25
+
26
+ from .constant import ConstantGroup, ConstantInfo, ConstantLocation
27
+
28
+ # Maximum edit distance for fuzzy matching
29
+ MAX_EDIT_DISTANCE = 2
30
+
31
+ # Antonym pairs that should not be fuzzy-matched
32
+ # If one name contains a word from the left side and the other contains the right side,
33
+ # they represent different concepts and should not be grouped together
34
+ ANTONYM_PAIRS = frozenset(
35
+ (
36
+ frozenset(("max", "min")),
37
+ frozenset(("start", "end")),
38
+ frozenset(("first", "last")),
39
+ frozenset(("before", "after")),
40
+ frozenset(("open", "close")),
41
+ frozenset(("read", "write")),
42
+ frozenset(("get", "set")),
43
+ frozenset(("push", "pop")),
44
+ frozenset(("add", "remove")),
45
+ frozenset(("create", "delete")),
46
+ frozenset(("enable", "disable")),
47
+ frozenset(("show", "hide")),
48
+ frozenset(("up", "down")),
49
+ frozenset(("left", "right")),
50
+ frozenset(("top", "bottom")),
51
+ frozenset(("prev", "next")),
52
+ frozenset(("success", "failure")),
53
+ frozenset(("true", "false")),
54
+ frozenset(("on", "off")),
55
+ frozenset(("in", "out")),
56
+ )
57
+ )
58
+
59
+ # Minimum length for constant names (exclude single-letter type params like P, T, K, V)
60
+ MIN_CONSTANT_NAME_LENGTH = 2
61
+
62
+
63
+ class UnionFind:
64
+ """Union-Find data structure for grouping."""
65
+
66
+ def __init__(self, items: list[str]) -> None:
67
+ """Initialize with list of items."""
68
+ self._parent = {item: item for item in items}
69
+
70
+ def find(self, x: str) -> str:
71
+ """Find root with path compression."""
72
+ if self._parent[x] != x:
73
+ self._parent[x] = self.find(self._parent[x])
74
+ return self._parent[x]
75
+
76
+ def union(self, x: str, y: str) -> None:
77
+ """Merge two sets."""
78
+ px, py = self.find(x), self.find(y)
79
+ if px != py:
80
+ self._parent[px] = py
81
+
82
+
83
+ class ConstantMatcher:
84
+ """Fuzzy matching for constant names."""
85
+
86
+ def find_groups(self, constants: list[tuple[Path, ConstantInfo]]) -> list[ConstantGroup]:
87
+ """Find groups of related constants."""
88
+ if not constants:
89
+ return []
90
+ locations = _build_locations(constants)
91
+ exact_groups = _group_by_exact_name(locations)
92
+ return self._merge_fuzzy_groups(exact_groups)
93
+
94
+ def _merge_fuzzy_groups(self, groups: dict[str, ConstantGroup]) -> list[ConstantGroup]:
95
+ """Merge groups that match via fuzzy matching."""
96
+ names = list(groups.keys())
97
+ uf = UnionFind(names)
98
+ _union_matching_pairs(names, uf, self._is_fuzzy_match)
99
+ return _build_merged_groups(names, groups, uf)
100
+
101
+ def _is_fuzzy_match(self, name1: str, name2: str) -> bool:
102
+ """Check if two constant names should be considered a match."""
103
+ if name1 == name2:
104
+ return True
105
+ return _is_fuzzy_similar(name1, name2)
106
+
107
+
108
+ def _build_locations(constants: list[tuple[Path, ConstantInfo]]) -> list[ConstantLocation]:
109
+ """Build location list from constants."""
110
+ return [
111
+ ConstantLocation(
112
+ file_path=file_path, line_number=info.line_number, name=info.name, value=info.value
113
+ )
114
+ for file_path, info in constants
115
+ ]
116
+
117
+
118
+ def _group_by_exact_name(locations: list[ConstantLocation]) -> dict[str, ConstantGroup]:
119
+ """Group locations by exact constant name."""
120
+ groups: dict[str, ConstantGroup] = {}
121
+ for loc in locations:
122
+ if loc.name not in groups:
123
+ groups[loc.name] = ConstantGroup(
124
+ canonical_name=loc.name, locations=[], all_names=set(), is_fuzzy_match=False
125
+ )
126
+ groups[loc.name].add_location(loc)
127
+ return groups
128
+
129
+
130
+ def _union_matching_pairs(
131
+ names: list[str], uf: UnionFind, is_match: Callable[[str, str], bool]
132
+ ) -> None:
133
+ """Union all pairs of names that match."""
134
+ for name1, name2 in combinations(names, 2):
135
+ if is_match(name1, name2):
136
+ uf.union(name1, name2)
137
+
138
+
139
+ def _build_merged_groups(
140
+ names: list[str], groups: dict[str, ConstantGroup], uf: UnionFind
141
+ ) -> list[ConstantGroup]:
142
+ """Build merged groups from union-find structure."""
143
+ merged: dict[str, ConstantGroup] = {}
144
+ for name in names:
145
+ root = uf.find(name)
146
+ if root not in merged:
147
+ merged[root] = ConstantGroup(
148
+ canonical_name=root, locations=[], all_names=set(), is_fuzzy_match=False
149
+ )
150
+ for loc in groups[name].locations:
151
+ merged[root].add_location(loc)
152
+ if name != root:
153
+ merged[root].is_fuzzy_match = True
154
+ return list(merged.values())
155
+
156
+
157
+ def _get_words(name: str) -> list[str]:
158
+ """Split constant name into lowercase words."""
159
+ return [w.lower() for w in name.split("_") if w]
160
+
161
+
162
+ def _is_fuzzy_similar(name1: str, name2: str) -> bool:
163
+ """Check if two names are fuzzy similar (word-set or edit distance)."""
164
+ words1, words2 = _get_words(name1), _get_words(name2)
165
+ if not _has_enough_words(words1, words2):
166
+ return False
167
+ if _has_antonym_conflict(set(words1), set(words2)):
168
+ return False
169
+ return _word_set_match(words1, words2) or _edit_distance_match(name1, name2)
170
+
171
+
172
+ def _has_enough_words(words1: list[str], words2: list[str]) -> bool:
173
+ """Check if both word lists have at least 2 words for fuzzy matching."""
174
+ return len(words1) >= 2 and len(words2) >= 2
175
+
176
+
177
+ def _word_set_match(words1: list[str], words2: list[str]) -> bool:
178
+ """Check if two word lists contain the same words."""
179
+ return set(words1) == set(words2)
180
+
181
+
182
+ def _has_antonym_conflict(set1: set[str], set2: set[str]) -> bool:
183
+ """Check if word sets contain conflicting antonyms (e.g., MAX vs MIN)."""
184
+ return any(_is_antonym_split(pair, set1, set2) for pair in ANTONYM_PAIRS)
185
+
186
+
187
+ def _is_antonym_split(pair: frozenset[str], set1: set[str], set2: set[str]) -> bool:
188
+ """Check if one set has one word of the pair and the other has the opposite."""
189
+ pair_list = tuple(pair)
190
+ word_a, word_b = pair_list[0], pair_list[1]
191
+ return (word_a in set1 and word_b in set2) or (word_b in set1 and word_a in set2)
192
+
193
+
194
+ def _edit_distance_match(name1: str, name2: str) -> bool:
195
+ """Check if names match within edit distance threshold."""
196
+ return _levenshtein_distance(name1.lower(), name2.lower()) <= MAX_EDIT_DISTANCE
197
+
198
+
199
+ def _levenshtein_distance(s1: str, s2: str) -> int:
200
+ """Calculate Levenshtein distance between two strings."""
201
+ if len(s1) < len(s2):
202
+ return _levenshtein_distance(s2, s1) # pylint: disable=arguments-out-of-order
203
+ if len(s2) == 0:
204
+ return len(s1)
205
+ previous_row = list(range(len(s2) + 1))
206
+ for i, c1 in enumerate(s1):
207
+ current_row = [i + 1]
208
+ for j, c2 in enumerate(s2):
209
+ insertions = previous_row[j + 1] + 1
210
+ deletions = current_row[j] + 1
211
+ substitutions = previous_row[j] + (c1 != c2)
212
+ current_row.append(min(insertions, deletions, substitutions))
213
+ previous_row = current_row
214
+ return previous_row[-1]
@@ -0,0 +1,98 @@
1
+ """
2
+ Purpose: Build violation messages for duplicate constants
3
+
4
+ Scope: Violation message formatting for constant duplication detection
5
+
6
+ Overview: Formats detailed violation messages for duplicate constant detection. Creates messages
7
+ that include the constant name(s), all file locations with line numbers, and the values
8
+ assigned at each location. Distinguishes between exact matches (same constant name) and
9
+ fuzzy matches (similar names like API_TIMEOUT and TIMEOUT_API). Provides actionable guidance
10
+ to consolidate constants into a shared module.
11
+
12
+ Dependencies: ConstantGroup from constant module, Violation from core.types
13
+
14
+ Exports: ConstantViolationBuilder class
15
+
16
+ Interfaces: ConstantViolationBuilder.build_violations(groups, rule_id) -> list[Violation]
17
+
18
+ Implementation: Message template formatting with location enumeration and fuzzy match indication
19
+ """
20
+
21
+ from src.core.types import Severity, Violation
22
+
23
+ from .constant import ConstantGroup, ConstantLocation
24
+
25
+ # Maximum other locations to show in violation message
26
+ MAX_DISPLAYED_LOCATIONS = 3
27
+
28
+
29
+ class ConstantViolationBuilder:
30
+ """Builds violation messages for duplicate constants."""
31
+
32
+ def __init__(self, min_occurrences: int = 2) -> None:
33
+ """Initialize with minimum occurrence threshold."""
34
+ self.min_occurrences = min_occurrences
35
+
36
+ def build_violations(self, groups: list[ConstantGroup], rule_id: str) -> list[Violation]:
37
+ """Build violations from constant groups."""
38
+ violations = []
39
+ for group in groups:
40
+ if group.file_count >= self.min_occurrences:
41
+ violations.extend(self._violations_for_group(group, rule_id))
42
+ return violations
43
+
44
+ def _violations_for_group(self, group: ConstantGroup, rule_id: str) -> list[Violation]:
45
+ """Create violations for all locations in a group."""
46
+ return [
47
+ Violation(
48
+ rule_id=rule_id,
49
+ file_path=str(loc.file_path),
50
+ line=loc.line_number,
51
+ column=1,
52
+ message=self._format_message(group, loc),
53
+ severity=Severity.ERROR,
54
+ )
55
+ for loc in group.locations
56
+ ]
57
+
58
+ def _format_message(self, group: ConstantGroup, current: ConstantLocation) -> str:
59
+ """Format the violation message based on match type."""
60
+ others = _get_other_locations(group, current)
61
+ locations_text = _format_locations_text(others)
62
+ if group.is_fuzzy_match:
63
+ names_str = " ≈ ".join(f"'{n}'" for n in sorted(group.all_names))
64
+ return (
65
+ f"Similar constants found: {names_str} in {group.file_count} files. "
66
+ f"{locations_text} "
67
+ f"These appear to represent the same concept - consider standardizing the name."
68
+ )
69
+ return (
70
+ f"Duplicate constant '{group.canonical_name}' defined in {group.file_count} files. "
71
+ f"{locations_text} "
72
+ f"Consider consolidating to a shared constants module."
73
+ )
74
+
75
+
76
+ def _get_other_locations(group: ConstantGroup, current: ConstantLocation) -> list[ConstantLocation]:
77
+ """Get locations excluding current (module-level helper)."""
78
+ return [
79
+ loc
80
+ for loc in group.locations
81
+ if loc.file_path != current.file_path or loc.line_number != current.line_number
82
+ ]
83
+
84
+
85
+ def _format_locations_text(others: list[ConstantLocation]) -> str:
86
+ """Format other locations as text (module-level helper)."""
87
+ if not others:
88
+ return ""
89
+ parts = [_format_single_location(loc) for loc in others[:MAX_DISPLAYED_LOCATIONS]]
90
+ result = "Also found in: " + ", ".join(parts)
91
+ extra = len(others) - MAX_DISPLAYED_LOCATIONS
92
+ return result + (f" and {extra} more." if extra > 0 else ".")
93
+
94
+
95
+ def _format_single_location(loc: ConstantLocation) -> str:
96
+ """Format a single location for display (module-level helper)."""
97
+ value_str = f" = {loc.value}" if loc.value else ""
98
+ return f"{loc.file_path.name}:{loc.line_number} ({loc.name}{value_str})"