thailint 0.14.0__py3-none-any.whl → 0.15.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. src/analyzers/rust_base.py +155 -0
  2. src/analyzers/rust_context.py +141 -0
  3. src/cli/config.py +6 -4
  4. src/cli/linters/code_patterns.py +64 -16
  5. src/cli/linters/code_smells.py +23 -14
  6. src/cli/linters/documentation.py +5 -3
  7. src/cli/linters/performance.py +23 -10
  8. src/cli/linters/shared.py +22 -6
  9. src/cli/linters/structure.py +13 -4
  10. src/cli/linters/structure_quality.py +9 -4
  11. src/cli/utils.py +4 -4
  12. src/config.py +34 -21
  13. src/core/python_lint_rule.py +101 -0
  14. src/linter_config/ignore.py +2 -1
  15. src/linters/cqs/__init__.py +54 -0
  16. src/linters/cqs/config.py +55 -0
  17. src/linters/cqs/function_analyzer.py +201 -0
  18. src/linters/cqs/input_detector.py +139 -0
  19. src/linters/cqs/linter.py +159 -0
  20. src/linters/cqs/output_detector.py +84 -0
  21. src/linters/cqs/python_analyzer.py +54 -0
  22. src/linters/cqs/types.py +82 -0
  23. src/linters/cqs/typescript_cqs_analyzer.py +61 -0
  24. src/linters/cqs/typescript_function_analyzer.py +192 -0
  25. src/linters/cqs/typescript_input_detector.py +203 -0
  26. src/linters/cqs/typescript_output_detector.py +117 -0
  27. src/linters/cqs/violation_builder.py +94 -0
  28. src/linters/dry/typescript_value_extractor.py +2 -1
  29. src/linters/file_header/linter.py +2 -1
  30. src/linters/file_placement/linter.py +6 -6
  31. src/linters/file_placement/pattern_validator.py +6 -5
  32. src/linters/file_placement/rule_checker.py +10 -5
  33. src/linters/lazy_ignores/config.py +5 -3
  34. src/linters/lazy_ignores/python_analyzer.py +5 -1
  35. src/linters/lazy_ignores/types.py +2 -1
  36. src/linters/lbyl/__init__.py +3 -1
  37. src/linters/lbyl/linter.py +67 -0
  38. src/linters/lbyl/pattern_detectors/__init__.py +30 -2
  39. src/linters/lbyl/pattern_detectors/base.py +24 -7
  40. src/linters/lbyl/pattern_detectors/dict_key_detector.py +107 -0
  41. src/linters/lbyl/pattern_detectors/division_check_detector.py +232 -0
  42. src/linters/lbyl/pattern_detectors/file_exists_detector.py +220 -0
  43. src/linters/lbyl/pattern_detectors/hasattr_detector.py +119 -0
  44. src/linters/lbyl/pattern_detectors/isinstance_detector.py +119 -0
  45. src/linters/lbyl/pattern_detectors/len_check_detector.py +173 -0
  46. src/linters/lbyl/pattern_detectors/none_check_detector.py +146 -0
  47. src/linters/lbyl/pattern_detectors/string_validator_detector.py +145 -0
  48. src/linters/lbyl/python_analyzer.py +215 -0
  49. src/linters/lbyl/violation_builder.py +354 -0
  50. src/linters/stringly_typed/ignore_checker.py +4 -6
  51. src/orchestrator/language_detector.py +5 -3
  52. {thailint-0.14.0.dist-info → thailint-0.15.1.dist-info}/METADATA +4 -2
  53. {thailint-0.14.0.dist-info → thailint-0.15.1.dist-info}/RECORD +56 -29
  54. {thailint-0.14.0.dist-info → thailint-0.15.1.dist-info}/WHEEL +0 -0
  55. {thailint-0.14.0.dist-info → thailint-0.15.1.dist-info}/entry_points.txt +0 -0
  56. {thailint-0.14.0.dist-info → thailint-0.15.1.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,94 @@
1
+ """
2
+ Purpose: Builds Violation objects from CQSPattern instances
3
+
4
+ Scope: Violation message formatting and suggestion generation for CQS violations
5
+
6
+ Overview: Provides build_cqs_violation function that converts a CQSPattern with a detected
7
+ CQS violation into a Violation object with properly formatted message. Message includes
8
+ function name (with class prefix for methods), lists INPUT operations with line numbers,
9
+ lists OUTPUT operations with line numbers, and provides actionable suggestion to split
10
+ the function into separate query and command functions.
11
+
12
+ Dependencies: CQSPattern, InputOperation, OutputOperation, Violation, Severity
13
+
14
+ Exports: build_cqs_violation
15
+
16
+ Interfaces: build_cqs_violation(pattern: CQSPattern) -> Violation
17
+
18
+ Implementation: String formatting with INPUT/OUTPUT line number aggregation
19
+ """
20
+
21
+ from src.core.types import Severity, Violation
22
+
23
+ from .types import CQSPattern
24
+
25
+
26
+ def _format_inputs(pattern: CQSPattern) -> str:
27
+ """Format INPUT operations for violation message.
28
+
29
+ Args:
30
+ pattern: CQSPattern containing inputs
31
+
32
+ Returns:
33
+ Formatted string listing INPUTs with line numbers
34
+ """
35
+ if not pattern.inputs:
36
+ return ""
37
+
38
+ parts = [f"Line {inp.line}: {inp.target} = {inp.expression}" for inp in pattern.inputs]
39
+ return "; ".join(parts)
40
+
41
+
42
+ def _format_outputs(pattern: CQSPattern) -> str:
43
+ """Format OUTPUT operations for violation message.
44
+
45
+ Args:
46
+ pattern: CQSPattern containing outputs
47
+
48
+ Returns:
49
+ Formatted string listing OUTPUTs with line numbers
50
+ """
51
+ if not pattern.outputs:
52
+ return ""
53
+
54
+ parts = [f"Line {out.line}: {out.expression}" for out in pattern.outputs]
55
+ return "; ".join(parts)
56
+
57
+
58
+ def build_cqs_violation(pattern: CQSPattern) -> Violation:
59
+ """Build a Violation object from a CQSPattern.
60
+
61
+ Creates a violation message that includes:
62
+ - Function name (with class prefix for methods)
63
+ - List of INPUT operations with line numbers
64
+ - List of OUTPUT operations with line numbers
65
+ - Suggestion to split into query and command functions
66
+
67
+ Args:
68
+ pattern: CQSPattern representing a function that violates CQS
69
+
70
+ Returns:
71
+ Violation object with formatted message and suggestion
72
+ """
73
+ full_name = pattern.get_full_name()
74
+
75
+ # Build detailed message
76
+ inputs_str = _format_inputs(pattern)
77
+ outputs_str = _format_outputs(pattern)
78
+
79
+ message = (
80
+ f"Function '{full_name}' violates CQS: mixes queries and commands. "
81
+ f"INPUTs: {inputs_str}. OUTPUTs: {outputs_str}."
82
+ )
83
+
84
+ suggestion = "Split into separate query and command functions."
85
+
86
+ return Violation(
87
+ rule_id="cqs",
88
+ file_path=pattern.file_path,
89
+ line=pattern.line,
90
+ column=pattern.column,
91
+ message=message,
92
+ severity=Severity.ERROR,
93
+ suggestion=suggestion,
94
+ )
@@ -19,6 +19,7 @@ Suppressions:
19
19
  - type:ignore[assignment,misc]: Tree-sitter Node type alias (optional dependency fallback)
20
20
  """
21
21
 
22
+ from contextlib import suppress
22
23
  from typing import Any
23
24
 
24
25
  from src.analyzers.typescript_base import TREE_SITTER_AVAILABLE
@@ -46,7 +47,7 @@ class TypeScriptValueExtractor:
46
47
  """Get string representation of a value node."""
47
48
  if node.type in self.LITERAL_TYPES:
48
49
  return self.get_node_text(node, content)
49
- if node.type in self.FIXED_REPRESENTATIONS:
50
+ with suppress(KeyError):
50
51
  return self.FIXED_REPRESENTATIONS[node.type]
51
52
  if node.type == "call_expression":
52
53
  return self._get_call_string(node, content)
@@ -27,6 +27,7 @@ Suppressions:
27
27
  languages. Methods support single responsibility of file header validation.
28
28
  """
29
29
 
30
+ from contextlib import suppress
30
31
  from pathlib import Path
31
32
  from typing import Protocol
32
33
 
@@ -302,7 +303,7 @@ class FileHeaderRule(BaseLintRule): # thailint: ignore[srp]
302
303
  prose_parts = []
303
304
 
304
305
  for field_name in prose_fields:
305
- if field_name in fields:
306
+ with suppress(KeyError):
306
307
  prose_parts.append(f"{field_name}: {fields[field_name]}")
307
308
 
308
309
  return "\n".join(prose_parts)
@@ -27,6 +27,7 @@ Suppressions:
27
27
  """
28
28
 
29
29
  import json
30
+ from contextlib import suppress
30
31
  from pathlib import Path
31
32
  from typing import Any
32
33
 
@@ -327,10 +328,10 @@ class FilePlacementRule(BaseLintRule): # thailint: ignore[srp.violation]
327
328
  if not hasattr(context, "metadata"):
328
329
  return None
329
330
  # Try hyphenated format first (original format)
330
- if "file-placement" in context.metadata:
331
+ with suppress(KeyError):
331
332
  return context.metadata["file-placement"]
332
333
  # Try underscored format (normalized format)
333
- if "file_placement" in context.metadata:
334
+ with suppress(KeyError):
334
335
  return context.metadata["file_placement"]
335
336
  return None
336
337
 
@@ -364,7 +365,7 @@ class FilePlacementRule(BaseLintRule): # thailint: ignore[srp.violation]
364
365
  FilePlacementLinter instance
365
366
  """
366
367
  # Check if cached linter exists for this project root
367
- if project_root in self._linter_cache:
368
+ with suppress(KeyError):
368
369
  return self._linter_cache[project_root]
369
370
 
370
371
  # Try to get config from context metadata (orchestrator passes config here)
@@ -420,11 +421,10 @@ class FilePlacementRule(BaseLintRule): # thailint: ignore[srp.violation]
420
421
  config = self._parse_layout_file(layout_path)
421
422
 
422
423
  # Unwrap file-placement key if present (try both formats for backward compatibility)
423
- if "file-placement" in config:
424
+ with suppress(KeyError):
424
425
  return config["file-placement"]
425
- if "file_placement" in config:
426
+ with suppress(KeyError):
426
427
  return config["file_placement"]
427
-
428
428
  return config
429
429
  except Exception:
430
430
  return {}
@@ -18,6 +18,7 @@ Implementation: Uses re.compile() to test pattern validity, provides detailed er
18
18
  """
19
19
 
20
20
  import re
21
+ from contextlib import suppress
21
22
  from typing import Any
22
23
 
23
24
 
@@ -76,7 +77,7 @@ class PatternValidator:
76
77
  Args:
77
78
  rules: Rules dictionary containing allow patterns
78
79
  """
79
- if "allow" in rules:
80
+ with suppress(KeyError):
80
81
  for pattern in rules["allow"]:
81
82
  self._validate_pattern(pattern)
82
83
 
@@ -86,7 +87,7 @@ class PatternValidator:
86
87
  Args:
87
88
  rules: Rules dictionary containing deny patterns
88
89
  """
89
- if "deny" in rules:
90
+ with suppress(KeyError):
90
91
  for deny_item in rules["deny"]:
91
92
  pattern = _extract_pattern(deny_item)
92
93
  self._validate_pattern(pattern)
@@ -97,7 +98,7 @@ class PatternValidator:
97
98
  Args:
98
99
  fp_config: File placement configuration section
99
100
  """
100
- if "directories" in fp_config:
101
+ with suppress(KeyError):
101
102
  for _dir_path, rules in fp_config["directories"].items():
102
103
  self._validate_allow_patterns(rules)
103
104
  self._validate_deny_patterns(rules)
@@ -108,7 +109,7 @@ class PatternValidator:
108
109
  Args:
109
110
  fp_config: File placement configuration section
110
111
  """
111
- if "global_patterns" in fp_config:
112
+ with suppress(KeyError):
112
113
  self._validate_allow_patterns(fp_config["global_patterns"])
113
114
  self._validate_deny_patterns(fp_config["global_patterns"])
114
115
 
@@ -118,7 +119,7 @@ class PatternValidator:
118
119
  Args:
119
120
  fp_config: File placement configuration section
120
121
  """
121
- if "global_deny" in fp_config:
122
+ with suppress(KeyError):
122
123
  for deny_item in fp_config["global_deny"]:
123
124
  pattern = _extract_pattern(deny_item)
124
125
  self._validate_pattern(pattern)
@@ -19,6 +19,7 @@ Implementation: Checks deny before allow, delegates directory matching to Direct
19
19
  uses RuleCheckContext dataclass to reduce parameter duplication
20
20
  """
21
21
 
22
+ from contextlib import suppress
22
23
  from dataclasses import dataclass
23
24
  from pathlib import Path
24
25
  from typing import Any
@@ -76,17 +77,17 @@ class RuleChecker:
76
77
  """
77
78
  violations: list[Violation] = []
78
79
 
79
- if "directories" in fp_config:
80
+ with suppress(KeyError):
80
81
  dir_violations = self._check_directory_rules(
81
82
  path_str, rel_path, fp_config["directories"]
82
83
  )
83
84
  violations.extend(dir_violations)
84
85
 
85
- if "global_deny" in fp_config:
86
+ with suppress(KeyError):
86
87
  deny_violations = self._check_global_deny(path_str, rel_path, fp_config["global_deny"])
87
88
  violations.extend(deny_violations)
88
89
 
89
- if "global_patterns" in fp_config:
90
+ with suppress(KeyError):
90
91
  global_violations = self._check_global_patterns(
91
92
  path_str, rel_path, fp_config["global_patterns"]
92
93
  )
@@ -209,21 +210,25 @@ class RuleChecker:
209
210
  List of violations
210
211
  """
211
212
  # Check deny patterns first
212
- if "deny" in global_patterns:
213
+ try:
213
214
  is_denied, reason = self.pattern_matcher.match_deny_patterns(
214
215
  path_str, global_patterns["deny"]
215
216
  )
216
217
  if is_denied:
217
218
  violation = self.violation_factory.create_global_deny_violation(rel_path, reason)
218
219
  return self._wrap_violation(violation)
220
+ except KeyError:
221
+ pass # No deny patterns
219
222
 
220
223
  # Check allow patterns
221
- if "allow" in global_patterns:
224
+ try:
222
225
  is_allowed = self.pattern_matcher.match_allow_patterns(
223
226
  path_str, global_patterns["allow"]
224
227
  )
225
228
  if not is_allowed:
226
229
  violation = self.violation_factory.create_global_allow_violation(rel_path)
227
230
  return self._wrap_violation(violation)
231
+ except KeyError:
232
+ pass # No allow patterns
228
233
 
229
234
  return []
@@ -4,9 +4,9 @@ Purpose: Configuration for lazy-ignores linter
4
4
  Scope: All configurable options for ignore detection
5
5
 
6
6
  Overview: Provides LazyIgnoresConfig dataclass with pattern-specific toggles for each
7
- ignore type (noqa, type:ignore, pylint, nosec, typescript, eslint, thailint). Includes
8
- orphaned detection toggle and file pattern ignores. Configuration can be loaded from
9
- dictionary (YAML) with sensible defaults for all options.
7
+ ignore type (noqa, type:ignore, pylint, nosec, pyright, typescript, eslint, thailint).
8
+ Includes orphaned detection toggle and file pattern ignores. Configuration can be
9
+ loaded from dictionary (YAML) with sensible defaults for all options.
10
10
 
11
11
  Dependencies: dataclasses, typing
12
12
 
@@ -33,6 +33,7 @@ class LazyIgnoresConfig: # pylint: disable=too-many-instance-attributes
33
33
  check_type_ignore: bool = True
34
34
  check_pylint_disable: bool = True
35
35
  check_nosec: bool = True
36
+ check_pyright_ignore: bool = True
36
37
  check_ts_ignore: bool = True
37
38
  check_eslint_disable: bool = True
38
39
  check_thailint_ignore: bool = True
@@ -57,6 +58,7 @@ class LazyIgnoresConfig: # pylint: disable=too-many-instance-attributes
57
58
  check_type_ignore=config_dict.get("check_type_ignore", True),
58
59
  check_pylint_disable=config_dict.get("check_pylint_disable", True),
59
60
  check_nosec=config_dict.get("check_nosec", True),
61
+ check_pyright_ignore=config_dict.get("check_pyright_ignore", True),
60
62
  check_ts_ignore=config_dict.get("check_ts_ignore", True),
61
63
  check_eslint_disable=config_dict.get("check_eslint_disable", True),
62
64
  check_thailint_ignore=config_dict.get("check_thailint_ignore", True),
@@ -1,7 +1,7 @@
1
1
  """
2
2
  Purpose: Detect Python linting ignore directives in source code
3
3
 
4
- Scope: noqa, type:ignore, pylint:disable, nosec, dry:ignore-block pattern detection
4
+ Scope: noqa, type:ignore, pylint:disable, nosec, pyright:ignore, dry:ignore-block pattern detection
5
5
 
6
6
  Overview: Provides PythonIgnoreDetector class that scans Python source code for common
7
7
  linting ignore patterns. Detects bare patterns (e.g., # noqa) and rule-specific
@@ -106,6 +106,10 @@ class PythonIgnoreDetector:
106
106
  r"#\s*nosec(?:\s+([A-Z0-9,\s]+))?(?:\s|$)",
107
107
  re.IGNORECASE,
108
108
  ),
109
+ IgnoreType.PYRIGHT_IGNORE: re.compile(
110
+ r"#\s*pyright:\s*ignore(?:\[([^\]]+)\])?",
111
+ re.IGNORECASE,
112
+ ),
109
113
  IgnoreType.THAILINT_IGNORE: re.compile(
110
114
  r"#\s*thailint:\s*ignore(?!-)(?:\[([^\]]+)\])?",
111
115
  re.IGNORECASE,
@@ -6,7 +6,7 @@ Scope: Data structures for ignore directives and suppression entries
6
6
  Overview: Defines core types for the lazy-ignores linter including IgnoreType enum for
7
7
  categorizing different suppression patterns, IgnoreDirective dataclass for representing
8
8
  detected ignores in code, and SuppressionEntry dataclass for representing declared
9
- suppressions in file headers. Supports Python (noqa, type:ignore, pylint, nosec),
9
+ suppressions in file headers. Supports Python (noqa, type:ignore, pylint, nosec, pyright),
10
10
  TypeScript (@ts-ignore, eslint-disable), thai-lint (thailint:ignore), and test skip
11
11
  patterns (pytest.mark.skip, it.skip, describe.skip).
12
12
 
@@ -31,6 +31,7 @@ class IgnoreType(Enum):
31
31
  TYPE_IGNORE = "type:ignore"
32
32
  PYLINT_DISABLE = "pylint:disable"
33
33
  NOSEC = "nosec"
34
+ PYRIGHT_IGNORE = "pyright:ignore"
34
35
  TS_IGNORE = "ts-ignore"
35
36
  TS_NOCHECK = "ts-nocheck"
36
37
  TS_EXPECT_ERROR = "ts-expect-error"
@@ -12,7 +12,7 @@ Overview: Package providing LBYL pattern detection for Python code. Identifies c
12
12
 
13
13
  Dependencies: ast module for Python parsing, src.core for base classes
14
14
 
15
- Exports: LBYLConfig, LBYLPattern, BaseLBYLDetector
15
+ Exports: LBYLConfig, LBYLPattern, BaseLBYLDetector, LBYLRule
16
16
 
17
17
  Interfaces: LBYLConfig.from_dict() for YAML configuration loading
18
18
 
@@ -20,10 +20,12 @@ Implementation: AST-based pattern detection with configurable pattern toggles
20
20
  """
21
21
 
22
22
  from .config import LBYLConfig
23
+ from .linter import LBYLRule
23
24
  from .pattern_detectors.base import BaseLBYLDetector, LBYLPattern
24
25
 
25
26
  __all__ = [
26
27
  "LBYLConfig",
27
28
  "LBYLPattern",
28
29
  "BaseLBYLDetector",
30
+ "LBYLRule",
29
31
  ]
@@ -0,0 +1,67 @@
1
+ """
2
+ Purpose: Main LBYL linter rule implementing PythonOnlyLintRule interface
3
+
4
+ Scope: Entry point for LBYL anti-pattern detection in Python code
5
+
6
+ Overview: Provides LBYLRule class that implements the PythonOnlyLintRule interface for
7
+ detecting Look Before You Leap anti-patterns in Python code. Validates that files
8
+ are Python with content, loads configuration, and delegates analysis to
9
+ PythonLBYLAnalyzer. Returns violations with EAFP suggestions for detected patterns.
10
+ Supports disabling via configuration and pattern-specific toggles.
11
+
12
+ Dependencies: PythonOnlyLintRule, PythonLBYLAnalyzer, LBYLConfig
13
+
14
+ Exports: LBYLRule
15
+
16
+ Interfaces: check(context: BaseLintContext) -> list[Violation]
17
+
18
+ Implementation: Single-file analysis with config-driven pattern detection
19
+ """
20
+
21
+ from src.core.python_lint_rule import PythonOnlyLintRule
22
+ from src.core.types import Violation
23
+
24
+ from .config import LBYLConfig
25
+ from .python_analyzer import PythonLBYLAnalyzer
26
+
27
+
28
+ class LBYLRule(PythonOnlyLintRule[LBYLConfig]):
29
+ """Detects Look Before You Leap anti-patterns in Python code."""
30
+
31
+ def __init__(self, config: LBYLConfig | None = None) -> None:
32
+ """Initialize the LBYL rule."""
33
+ super().__init__(config)
34
+ self._analyzer = PythonLBYLAnalyzer()
35
+
36
+ @property
37
+ def rule_id(self) -> str:
38
+ """Unique identifier for this rule."""
39
+ return "lbyl"
40
+
41
+ @property
42
+ def rule_name(self) -> str:
43
+ """Human-readable name for this rule."""
44
+ return "Look Before You Leap"
45
+
46
+ @property
47
+ def description(self) -> str:
48
+ """Description of what this rule checks."""
49
+ return (
50
+ "Detects LBYL (Look Before You Leap) anti-patterns that should be "
51
+ "refactored to EAFP (Easier to Ask Forgiveness than Permission) style "
52
+ "using try/except blocks."
53
+ )
54
+
55
+ @property
56
+ def _config_key(self) -> str:
57
+ """Configuration key in metadata."""
58
+ return "lbyl"
59
+
60
+ @property
61
+ def _config_class(self) -> type[LBYLConfig]:
62
+ """Configuration class type."""
63
+ return LBYLConfig
64
+
65
+ def _analyze(self, code: str, file_path: str, config: LBYLConfig) -> list[Violation]:
66
+ """Analyze code for LBYL violations."""
67
+ return self._analyzer.analyze(code, file_path, config)
@@ -6,11 +6,15 @@ Scope: All AST-based pattern detectors for LBYL anti-pattern detection
6
6
  Overview: Exports pattern detector classes for the LBYL linter. Each detector is an
7
7
  AST NodeVisitor that identifies specific LBYL anti-patterns. Detectors include
8
8
  dict key checking, hasattr, isinstance, file exists, length checks, None checks,
9
- string validation, and division safety checks.
9
+ string validators, and division zero-checks.
10
10
 
11
11
  Dependencies: ast module, base detector class
12
12
 
13
- Exports: BaseLBYLDetector, LBYLPattern
13
+ Exports: BaseLBYLDetector, LBYLPattern, DictKeyDetector, DictKeyPattern, HasattrDetector,
14
+ HasattrPattern, IsinstanceDetector, IsinstancePattern, FileExistsDetector,
15
+ FileExistsPattern, LenCheckDetector, LenCheckPattern, NoneCheckDetector,
16
+ NoneCheckPattern, StringValidatorDetector, StringValidatorPattern,
17
+ DivisionCheckDetector, DivisionCheckPattern
14
18
 
15
19
  Interfaces: find_patterns(tree: ast.AST) -> list[LBYLPattern]
16
20
 
@@ -18,8 +22,32 @@ Implementation: Modular detector pattern for extensible LBYL detection
18
22
  """
19
23
 
20
24
  from .base import BaseLBYLDetector, LBYLPattern
25
+ from .dict_key_detector import DictKeyDetector, DictKeyPattern
26
+ from .division_check_detector import DivisionCheckDetector, DivisionCheckPattern
27
+ from .file_exists_detector import FileExistsDetector, FileExistsPattern
28
+ from .hasattr_detector import HasattrDetector, HasattrPattern
29
+ from .isinstance_detector import IsinstanceDetector, IsinstancePattern
30
+ from .len_check_detector import LenCheckDetector, LenCheckPattern
31
+ from .none_check_detector import NoneCheckDetector, NoneCheckPattern
32
+ from .string_validator_detector import StringValidatorDetector, StringValidatorPattern
21
33
 
22
34
  __all__ = [
23
35
  "BaseLBYLDetector",
24
36
  "LBYLPattern",
37
+ "DictKeyDetector",
38
+ "DictKeyPattern",
39
+ "DivisionCheckDetector",
40
+ "DivisionCheckPattern",
41
+ "FileExistsDetector",
42
+ "FileExistsPattern",
43
+ "HasattrDetector",
44
+ "HasattrPattern",
45
+ "IsinstanceDetector",
46
+ "IsinstancePattern",
47
+ "LenCheckDetector",
48
+ "LenCheckPattern",
49
+ "NoneCheckDetector",
50
+ "NoneCheckPattern",
51
+ "StringValidatorDetector",
52
+ "StringValidatorPattern",
25
53
  ]
@@ -7,19 +7,21 @@ Overview: Defines BaseLBYLDetector abstract class that all pattern detectors ext
7
7
  Inherits from ast.NodeVisitor for AST traversal. Defines LBYLPattern base dataclass
8
8
  for representing detected patterns with line number and column information. Each
9
9
  concrete detector implements find_patterns() to identify specific LBYL anti-patterns.
10
+ Uses Generic TypeVar for type-safe subclass pattern storage.
10
11
 
11
- Dependencies: abc, ast, dataclasses
12
+ Dependencies: abc, ast, dataclasses, typing
12
13
 
13
14
  Exports: BaseLBYLDetector, LBYLPattern
14
15
 
15
16
  Interfaces: find_patterns(tree: ast.AST) -> list[LBYLPattern]
16
17
 
17
- Implementation: Abstract base with NodeVisitor pattern for extensibility
18
+ Implementation: Abstract base with NodeVisitor pattern for extensibility, Generic for type safety
18
19
  """
19
20
 
20
21
  import ast
21
- from abc import ABC, abstractmethod
22
+ from abc import ABC
22
23
  from dataclasses import dataclass
24
+ from typing import Generic, TypeVar
23
25
 
24
26
 
25
27
  @dataclass
@@ -30,10 +32,23 @@ class LBYLPattern:
30
32
  column: int
31
33
 
32
34
 
33
- class BaseLBYLDetector(ast.NodeVisitor, ABC):
34
- """Base class for LBYL pattern detectors."""
35
+ PatternT = TypeVar("PatternT", bound=LBYLPattern)
36
+
37
+
38
+ class BaseLBYLDetector(ast.NodeVisitor, ABC, Generic[PatternT]):
39
+ """Base class for LBYL pattern detectors.
40
+
41
+ Subclasses must initialize self._patterns as an empty list in __init__
42
+ and populate it in visit methods. The _patterns attribute stores subclass-
43
+ specific pattern types (DictKeyPattern, HasattrPattern, etc.) which all
44
+ inherit from LBYLPattern.
45
+
46
+ Type Parameters:
47
+ PatternT: The specific pattern type used by this detector
48
+ """
49
+
50
+ _patterns: list[PatternT]
35
51
 
36
- @abstractmethod
37
52
  def find_patterns(self, tree: ast.AST) -> list[LBYLPattern]:
38
53
  """Find LBYL patterns in AST.
39
54
 
@@ -43,4 +58,6 @@ class BaseLBYLDetector(ast.NodeVisitor, ABC):
43
58
  Returns:
44
59
  List of detected LBYL patterns
45
60
  """
46
- raise NotImplementedError
61
+ self._patterns = []
62
+ self.visit(tree)
63
+ return list(self._patterns)
@@ -0,0 +1,107 @@
1
+ """
2
+ Purpose: AST-based detector for dict key LBYL patterns
3
+
4
+ Scope: Detects 'if key in dict: dict[key]' patterns in Python code
5
+
6
+ Overview: Provides DictKeyDetector class that uses AST traversal to find LBYL anti-patterns
7
+ involving dict key checking. Identifies patterns where code checks if a key exists in a
8
+ dict before accessing it (e.g., 'if key in d: d[key]'). Returns DictKeyPattern objects
9
+ containing the dict name, key expression, and location. Avoids false positives for
10
+ different dict/key combinations, walrus operator patterns, and dict.get() usage.
11
+
12
+ Dependencies: ast module, base detector classes from pattern_detectors.base
13
+
14
+ Exports: DictKeyPattern, DictKeyDetector
15
+
16
+ Interfaces: DictKeyDetector.find_patterns(tree: ast.AST) -> list[DictKeyPattern]
17
+
18
+ Implementation: AST NodeVisitor pattern with visit_If to detect in-check followed by subscript
19
+
20
+ Suppressions:
21
+ - N802: visit_If follows Python AST visitor naming convention (camelCase required)
22
+ - invalid-name: visit_If follows Python AST visitor naming convention (camelCase required)
23
+ """
24
+
25
+ import ast
26
+ from dataclasses import dataclass
27
+
28
+ from .base import BaseLBYLDetector, LBYLPattern
29
+
30
+
31
+ @dataclass
32
+ class DictKeyPattern(LBYLPattern):
33
+ """Pattern data for dict key LBYL anti-pattern."""
34
+
35
+ dict_name: str
36
+ key_expression: str
37
+
38
+
39
+ def _is_walrus_get_pattern(test: ast.Compare) -> bool:
40
+ """Check if test is '(val := d.get(k)) is not None' pattern."""
41
+ if not isinstance(test.left, ast.NamedExpr):
42
+ return False
43
+ if not isinstance(test.left.value, ast.Call):
44
+ return False
45
+ call = test.left.value
46
+ return isinstance(call.func, ast.Attribute) and call.func.attr == "get"
47
+
48
+
49
+ def _is_simple_in_compare(test: ast.Compare) -> bool:
50
+ """Check if Compare has single In operator and one comparator."""
51
+ return len(test.ops) == 1 and isinstance(test.ops[0], ast.In) and len(test.comparators) == 1
52
+
53
+
54
+ def _extract_in_check(test: ast.expr) -> tuple[ast.expr | None, ast.expr | None]:
55
+ """Extract dict and key from 'key in dict' comparison."""
56
+ if isinstance(test, ast.NamedExpr):
57
+ return None, None
58
+ if not isinstance(test, ast.Compare):
59
+ return None, None
60
+ if not _is_simple_in_compare(test) or _is_walrus_get_pattern(test):
61
+ return None, None
62
+ return test.comparators[0], test.left
63
+
64
+
65
+ class DictKeyDetector(BaseLBYLDetector[DictKeyPattern]):
66
+ """Detects 'if key in dict: dict[key]' LBYL patterns."""
67
+
68
+ def __init__(self) -> None:
69
+ """Initialize the detector."""
70
+ self._patterns: list[DictKeyPattern] = []
71
+
72
+ def visit_If(self, node: ast.If) -> None: # noqa: N802 # pylint: disable=invalid-name
73
+ """Visit if statement to check for dict key LBYL pattern.
74
+
75
+ Args:
76
+ node: AST If node to analyze
77
+ """
78
+ dict_expr, key_expr = _extract_in_check(node.test)
79
+
80
+ if dict_expr is not None and key_expr is not None:
81
+ if self._body_has_subscript_match(node.body, dict_expr, key_expr):
82
+ self._patterns.append(self._create_pattern(node, dict_expr, key_expr))
83
+
84
+ self.generic_visit(node)
85
+
86
+ def _body_has_subscript_match(
87
+ self, body: list[ast.stmt], dict_expr: ast.expr, key_expr: ast.expr
88
+ ) -> bool:
89
+ """Check if body contains dict[key] access matching the in-check."""
90
+ expected = (ast.dump(dict_expr), ast.dump(key_expr))
91
+ return any(
92
+ (ast.dump(node.value), ast.dump(node.slice)) == expected
93
+ for stmt in body
94
+ for node in ast.walk(stmt)
95
+ if isinstance(node, ast.Subscript)
96
+ )
97
+
98
+ def _create_pattern(
99
+ self, node: ast.If, dict_expr: ast.expr, key_expr: ast.expr
100
+ ) -> DictKeyPattern:
101
+ """Create DictKeyPattern from detected pattern."""
102
+ return DictKeyPattern(
103
+ line_number=node.lineno,
104
+ column=node.col_offset,
105
+ dict_name=ast.unparse(dict_expr),
106
+ key_expression=ast.unparse(key_expr),
107
+ )