thailint 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
src/config.py ADDED
@@ -0,0 +1,386 @@
1
+ """
2
+ Purpose: Configuration management for CLI application with YAML/JSON support
3
+
4
+ Scope: Load, validate, save, and merge configuration from multiple sources
5
+
6
+ Overview: Provides comprehensive configuration management including loading from YAML and JSON files,
7
+ searching multiple default locations, merging configurations with clear precedence rules, schema
8
+ validation with helpful error messages, and safe persistence with atomic writes. Supports both
9
+ user-level and system-level configuration files with environment-specific overrides. Includes
10
+ default values for all settings to ensure the application works out of the box.
11
+
12
+ Dependencies: PyYAML for YAML parsing, json for JSON parsing, pathlib for file operations, logging
13
+
14
+ Exports: load_config(), save_config(), validate_config(), merge_configs(), ConfigError, DEFAULT_CONFIG
15
+
16
+ Interfaces: Configuration dictionaries, Path objects for file locations, validation results
17
+
18
+ Implementation: Multi-location config search, recursive dict merging, comprehensive validation
19
+ """
20
+
21
+ import json
22
+ import logging
23
+ from pathlib import Path
24
+ from typing import Any
25
+
26
+ import yaml
27
+
28
+ logger = logging.getLogger(__name__)
29
+
30
+
31
+ class ConfigError(Exception):
32
+ """Configuration-related errors."""
33
+
34
+
35
+ # Default configuration values
36
+ DEFAULT_CONFIG: dict[str, Any] = {
37
+ "app_name": "{{PROJECT_NAME}}",
38
+ "version": "0.1.0",
39
+ "log_level": "INFO",
40
+ "output_format": "text",
41
+ "greeting": "Hello",
42
+ "max_retries": 3,
43
+ "timeout": 30,
44
+ }
45
+
46
+ # Configuration file search paths (in priority order)
47
+ # First match wins
48
+ CONFIG_LOCATIONS: list[Path] = [
49
+ Path.cwd() / "config.yaml", # Current directory YAML
50
+ Path.cwd() / "config.json", # Current directory JSON
51
+ Path.home() / ".config" / "{{PROJECT_NAME}}" / "config.yaml", # User config YAML
52
+ Path.home() / ".config" / "{{PROJECT_NAME}}" / "config.json", # User config JSON
53
+ Path("/etc/{{PROJECT_NAME}}/config.yaml"), # System config YAML (Unix/Linux)
54
+ ]
55
+
56
+
57
+ def _load_and_merge_config(config_path: Path) -> dict[str, Any]:
58
+ """Load config file and merge with defaults."""
59
+ config = DEFAULT_CONFIG.copy()
60
+ user_config = _load_config_file(config_path)
61
+ return merge_configs(config, user_config)
62
+
63
+
64
+ def _validate_and_return_config(config: dict[str, Any], config_path: Path) -> dict[str, Any]:
65
+ """Validate config and return if valid, otherwise raise error."""
66
+ is_valid, errors = validate_config(config)
67
+ if not is_valid:
68
+ error_msg = "Configuration validation failed:\n" + "\n".join(f" - {e}" for e in errors)
69
+ raise ConfigError(error_msg)
70
+ logger.info("Loaded config from: %s", config_path)
71
+ return config
72
+
73
+
74
+ def _try_load_from_location(location: Path) -> dict[str, Any] | None:
75
+ """Try to load and validate config from a location."""
76
+ try:
77
+ config = _load_and_merge_config(location)
78
+ is_valid, errors = validate_config(config)
79
+ if not is_valid:
80
+ logger.warning("Invalid config at %s: %s", location, errors)
81
+ return None
82
+ logger.info("Loaded config from: %s", location)
83
+ return config
84
+ except ConfigError as e:
85
+ logger.warning("Failed to load config from %s: %s", location, e)
86
+ return None
87
+
88
+
89
+ def _load_from_explicit_path(config_path: Path) -> dict[str, Any]:
90
+ """Load config from explicit path."""
91
+ if not config_path.exists():
92
+ logger.warning("Config file not found: %s, using defaults", config_path)
93
+ return DEFAULT_CONFIG.copy()
94
+ merged_config = _load_and_merge_config(config_path)
95
+ return _validate_and_return_config(merged_config, config_path)
96
+
97
+
98
+ def _load_from_default_locations() -> dict[str, Any]:
99
+ """Load config from default locations."""
100
+ for location in CONFIG_LOCATIONS:
101
+ if not location.exists():
102
+ continue
103
+ loaded_config = _try_load_from_location(location)
104
+ if loaded_config:
105
+ return loaded_config
106
+ logger.info("No config file found, using defaults")
107
+ return DEFAULT_CONFIG.copy()
108
+
109
+
110
+ def load_config(config_path: Path | None = None) -> dict[str, Any]:
111
+ """
112
+ Load configuration with fallback to defaults.
113
+
114
+ Searches default locations if no explicit path provided. Validates
115
+ configuration after loading and merges with defaults to ensure all
116
+ keys are present.
117
+
118
+ Args:
119
+ config_path: Explicit path to config file. If None, searches
120
+ CONFIG_LOCATIONS in priority order.
121
+
122
+ Returns:
123
+ Configuration dictionary with defaults merged in.
124
+
125
+ Raises:
126
+ ConfigError: If config file exists but cannot be parsed or is invalid.
127
+
128
+ Example:
129
+ >>> config = load_config()
130
+ >>> config = load_config(Path('custom-config.yaml'))
131
+ """
132
+ if config_path:
133
+ return _load_from_explicit_path(config_path)
134
+ return _load_from_default_locations()
135
+
136
+
137
+ def _parse_yaml_file(f, path: Path) -> dict[str, Any]:
138
+ """Parse YAML file and return data."""
139
+ try:
140
+ data = yaml.safe_load(f)
141
+ return data if data is not None else {}
142
+ except yaml.YAMLError as e:
143
+ raise ConfigError(f"Invalid YAML in {path}: {e}") from e
144
+
145
+
146
+ def _parse_json_file(f, path: Path) -> dict[str, Any]:
147
+ """Parse JSON file and return data."""
148
+ try:
149
+ return json.load(f)
150
+ except json.JSONDecodeError as e:
151
+ raise ConfigError(f"Invalid JSON in {path}: {e}") from e
152
+
153
+
154
+ def _load_config_file(path: Path) -> dict[str, Any]:
155
+ """
156
+ Load config from YAML or JSON file based on extension.
157
+
158
+ Args:
159
+ path: Path to configuration file.
160
+
161
+ Returns:
162
+ Configuration dictionary.
163
+
164
+ Raises:
165
+ ConfigError: If file cannot be parsed.
166
+ """
167
+ try:
168
+ return _parse_config_by_extension(path)
169
+ except ConfigError:
170
+ raise
171
+ except Exception as e:
172
+ raise ConfigError(f"Failed to load config from {path}: {e}") from e
173
+
174
+
175
+ def _parse_config_by_extension(path: Path) -> dict[str, Any]:
176
+ """Parse config file based on its extension."""
177
+ with path.open() as f:
178
+ if path.suffix in [".yaml", ".yml"]:
179
+ return _parse_yaml_file(f, path)
180
+ if path.suffix == ".json":
181
+ return _parse_json_file(f, path)
182
+ raise ConfigError(f"Unsupported config format: {path.suffix}")
183
+
184
+
185
+ def _validate_before_save(config: dict[str, Any]) -> None:
186
+ """Validate config before saving."""
187
+ is_valid, errors = validate_config(config)
188
+ if not is_valid:
189
+ error_msg = "Cannot save invalid configuration:\n" + "\n".join(f" - {e}" for e in errors)
190
+ raise ConfigError(error_msg)
191
+
192
+
193
+ def _write_config_file(config: dict[str, Any], path: Path) -> None:
194
+ """Write config to file based on extension."""
195
+ if path.suffix in [".yaml", ".yml"]:
196
+ _write_yaml_config(config, path)
197
+ elif path.suffix == ".json":
198
+ _write_json_config(config, path)
199
+ else:
200
+ raise ConfigError(f"Unsupported config format: {path.suffix}")
201
+
202
+
203
+ def _write_yaml_config(config: dict[str, Any], path: Path) -> None:
204
+ """Write config as YAML."""
205
+ with path.open("w") as f:
206
+ yaml.dump(config, f, default_flow_style=False, sort_keys=False)
207
+
208
+
209
+ def _write_json_config(config: dict[str, Any], path: Path) -> None:
210
+ """Write config as JSON."""
211
+ with path.open("w") as f:
212
+ json.dump(config, f, indent=2, sort_keys=False)
213
+
214
+
215
+ def save_config(config: dict[str, Any], config_path: Path | None = None):
216
+ """
217
+ Save configuration to file.
218
+
219
+ Creates parent directory if it doesn't exist. Format determined by
220
+ file extension. Validates configuration before saving.
221
+
222
+ Args:
223
+ config: Configuration dictionary to save.
224
+ config_path: Path to save config. If None, uses first CONFIG_LOCATIONS entry.
225
+
226
+ Raises:
227
+ ConfigError: If config cannot be saved or is invalid.
228
+
229
+ Example:
230
+ >>> save_config({'log_level': 'DEBUG'})
231
+ >>> save_config({'log_level': 'DEBUG'}, Path('my-config.yaml'))
232
+ """
233
+ path = config_path or CONFIG_LOCATIONS[0]
234
+ path.parent.mkdir(parents=True, exist_ok=True)
235
+ _validate_before_save(config)
236
+ _write_and_log_config(config, path)
237
+
238
+
239
+ def _write_and_log_config(config: dict[str, Any], path: Path) -> None:
240
+ """Write config file and log success."""
241
+ try:
242
+ _write_config_file(config, path)
243
+ logger.info("Saved config to: %s", path)
244
+ except ConfigError:
245
+ raise
246
+ except Exception as e:
247
+ raise ConfigError(f"Failed to save config to {path}: {e}") from e
248
+
249
+
250
+ def _validate_required_keys(config: dict[str, Any], errors: list[str]) -> None:
251
+ """Validate that all required keys are present in config."""
252
+ required_keys = ["app_name", "log_level"]
253
+ for key in required_keys:
254
+ if key not in config:
255
+ errors.append(f"Missing required key: {key}")
256
+
257
+
258
+ def _validate_log_level(config: dict[str, Any], errors: list[str]) -> None:
259
+ """Validate log level is a valid value."""
260
+ valid_log_levels = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]
261
+ if "log_level" in config:
262
+ if config["log_level"] not in valid_log_levels:
263
+ errors.append(
264
+ f"Invalid log_level: {config['log_level']}. "
265
+ f"Must be one of: {', '.join(valid_log_levels)}"
266
+ )
267
+
268
+
269
+ def _validate_output_format(config: dict[str, Any], errors: list[str]) -> None:
270
+ """Validate output format is a valid value."""
271
+ valid_formats = ["text", "json", "yaml"]
272
+ if "output_format" in config:
273
+ if config["output_format"] not in valid_formats:
274
+ errors.append(
275
+ f"Invalid output_format: {config['output_format']}. "
276
+ f"Must be one of: {', '.join(valid_formats)}"
277
+ )
278
+
279
+
280
+ def _validate_max_retries(config: dict[str, Any], errors: list[str]) -> None:
281
+ """Validate max_retries configuration value."""
282
+ if "max_retries" in config:
283
+ if not isinstance(config["max_retries"], int) or config["max_retries"] < 0:
284
+ errors.append("max_retries must be a non-negative integer")
285
+
286
+
287
+ def _validate_timeout(config: dict[str, Any], errors: list[str]) -> None:
288
+ """Validate timeout configuration value."""
289
+ if "timeout" in config:
290
+ if not isinstance(config["timeout"], (int, float)) or config["timeout"] <= 0:
291
+ errors.append("timeout must be a positive number")
292
+
293
+
294
+ def _validate_numeric_values(config: dict[str, Any], errors: list[str]) -> None:
295
+ """Validate numeric configuration values."""
296
+ _validate_max_retries(config, errors)
297
+ _validate_timeout(config, errors)
298
+
299
+
300
+ def _validate_string_values(config: dict[str, Any], errors: list[str]) -> None:
301
+ """Validate string configuration values."""
302
+ if "app_name" in config:
303
+ if not isinstance(config["app_name"], str) or not config["app_name"].strip():
304
+ errors.append("app_name must be a non-empty string")
305
+
306
+
307
+ def validate_config(config: dict[str, Any]) -> tuple[bool, list[str]]:
308
+ """
309
+ Validate configuration schema and values.
310
+
311
+ Checks for required keys, validates value types and ranges, and ensures
312
+ enum values are within allowed sets.
313
+
314
+ Args:
315
+ config: Configuration dictionary to validate.
316
+
317
+ Returns:
318
+ Tuple of (is_valid, error_messages). is_valid is True if no errors,
319
+ error_messages is list of validation error strings.
320
+
321
+ Example:
322
+ >>> is_valid, errors = validate_config(config)
323
+ >>> if not is_valid:
324
+ ... for error in errors:
325
+ ... print(f"Error: {error}")
326
+ """
327
+ errors: list[str] = []
328
+
329
+ _validate_required_keys(config, errors)
330
+ _validate_log_level(config, errors)
331
+ _validate_output_format(config, errors)
332
+ _validate_numeric_values(config, errors)
333
+ _validate_string_values(config, errors)
334
+
335
+ return len(errors) == 0, errors
336
+
337
+
338
+ def merge_configs(base: dict[str, Any], override: dict[str, Any]) -> dict[str, Any]:
339
+ """
340
+ Merge two configurations, with override taking precedence.
341
+
342
+ Recursively merges nested dictionaries. Override values completely
343
+ replace base values for non-dict types.
344
+
345
+ Args:
346
+ base: Base configuration.
347
+ override: Override configuration (takes precedence).
348
+
349
+ Returns:
350
+ Merged configuration dictionary.
351
+
352
+ Example:
353
+ >>> base = {'a': 1, 'b': {'c': 2, 'd': 3}}
354
+ >>> override = {'b': {'d': 4}, 'e': 5}
355
+ >>> merged = merge_configs(base, override)
356
+ >>> # Result: {'a': 1, 'b': {'c': 2, 'd': 4}, 'e': 5}
357
+ """
358
+ result = base.copy()
359
+
360
+ for key, value in override.items():
361
+ if key in result and isinstance(result[key], dict) and isinstance(value, dict):
362
+ # Recursively merge nested dicts
363
+ result[key] = merge_configs(result[key], value)
364
+ else:
365
+ # Override value
366
+ result[key] = value
367
+
368
+ return result
369
+
370
+
371
+ def get_config_path() -> Path | None:
372
+ """
373
+ Find the first existing config file in CONFIG_LOCATIONS.
374
+
375
+ Returns:
376
+ Path to config file if found, None otherwise.
377
+
378
+ Example:
379
+ >>> path = get_config_path()
380
+ >>> if path:
381
+ ... print(f"Config at: {path}")
382
+ """
383
+ for location in CONFIG_LOCATIONS:
384
+ if location.exists():
385
+ return location
386
+ return None
src/core/__init__.py ADDED
@@ -0,0 +1,17 @@
1
+ """Core framework components for thai-lint.
2
+
3
+ This package contains the foundational abstractions and types that
4
+ power the plugin architecture.
5
+ """
6
+
7
+ from .base import BaseLintContext, BaseLintRule
8
+ from .registry import RuleRegistry
9
+ from .types import Severity, Violation
10
+
11
+ __all__ = [
12
+ "BaseLintContext",
13
+ "BaseLintRule",
14
+ "RuleRegistry",
15
+ "Severity",
16
+ "Violation",
17
+ ]
src/core/base.py ADDED
@@ -0,0 +1,122 @@
1
+ """
2
+ Purpose: Abstract base classes defining the core plugin architecture interfaces
3
+
4
+ Scope: Foundation interfaces for all linting rules, contexts, and plugin implementations
5
+
6
+ Overview: Establishes the contract that all linting plugins must follow through abstract base
7
+ classes, enabling the plugin architecture that allows dynamic rule discovery and execution.
8
+ Defines BaseLintRule which all concrete linting rules inherit from, specifying required
9
+ properties (rule_id, rule_name, description) and the check() method for violation detection.
10
+ Provides BaseLintContext as the interface for accessing file information during analysis,
11
+ exposing file_path, file_content, and language properties. These abstractions enable the
12
+ rule registry to discover and instantiate rules dynamically without tight coupling, supporting
13
+ the extensible plugin system where new rules can be added by simply placing them in the
14
+ appropriate directory structure.
15
+
16
+ Dependencies: abc for abstract base class support, pathlib for Path types, Violation from types
17
+
18
+ Exports: BaseLintRule (abstract rule interface), BaseLintContext (abstract context interface)
19
+
20
+ Interfaces: BaseLintRule.check(context) -> list[Violation], BaseLintContext properties
21
+ (file_path, file_content, language), all abstract methods must be implemented by subclasses
22
+
23
+ Implementation: ABC-based interface definitions with @abstractmethod decorators, property-based
24
+ API for rule metadata, context-based execution pattern for rule checking
25
+ """
26
+
27
+ from abc import ABC, abstractmethod
28
+ from pathlib import Path
29
+
30
+ from .types import Violation
31
+
32
+
33
+ class BaseLintContext(ABC):
34
+ """Base class for lint context.
35
+
36
+ A lint context provides all the information a rule needs to analyze
37
+ a file, including the file path, content, and language.
38
+ """
39
+
40
+ @property
41
+ @abstractmethod
42
+ def file_path(self) -> Path | None:
43
+ """Get the file path being analyzed.
44
+
45
+ Returns:
46
+ Path to the file, or None if analyzing content without a file.
47
+ """
48
+ raise NotImplementedError("Subclasses must implement file_path")
49
+
50
+ @property
51
+ @abstractmethod
52
+ def file_content(self) -> str | None:
53
+ """Get the file content being analyzed.
54
+
55
+ Returns:
56
+ Content of the file as a string, or None if file not available.
57
+ """
58
+ raise NotImplementedError("Subclasses must implement file_content")
59
+
60
+ @property
61
+ @abstractmethod
62
+ def language(self) -> str:
63
+ """Get the programming language of the file.
64
+
65
+ Returns:
66
+ Language identifier (e.g., 'python', 'javascript', 'go').
67
+ """
68
+ raise NotImplementedError("Subclasses must implement language")
69
+
70
+
71
+ class BaseLintRule(ABC):
72
+ """Base class for all linting rules.
73
+
74
+ All concrete linting rules must inherit from this class and implement
75
+ all abstract methods and properties. Rules are discovered and registered
76
+ automatically by the rule registry.
77
+ """
78
+
79
+ @property
80
+ @abstractmethod
81
+ def rule_id(self) -> str:
82
+ """Unique identifier for this rule.
83
+
84
+ The rule ID should follow the format 'category.rule-name', e.g.,
85
+ 'file-placement.deny-pattern' or 'naming.class-pascal-case'.
86
+
87
+ Returns:
88
+ Unique rule identifier.
89
+ """
90
+ raise NotImplementedError("Subclasses must implement rule_id")
91
+
92
+ @property
93
+ @abstractmethod
94
+ def rule_name(self) -> str:
95
+ """Human-readable name for this rule.
96
+
97
+ Returns:
98
+ Descriptive name for display to users.
99
+ """
100
+ raise NotImplementedError("Subclasses must implement rule_name")
101
+
102
+ @property
103
+ @abstractmethod
104
+ def description(self) -> str:
105
+ """Description of what this rule checks.
106
+
107
+ Returns:
108
+ Detailed description of the rule's purpose and behavior.
109
+ """
110
+ raise NotImplementedError("Subclasses must implement description")
111
+
112
+ @abstractmethod
113
+ def check(self, context: BaseLintContext) -> list[Violation]:
114
+ """Check for violations in the given context.
115
+
116
+ Args:
117
+ context: The lint context containing file information.
118
+
119
+ Returns:
120
+ List of violations found. Empty list if no violations.
121
+ """
122
+ raise NotImplementedError("Subclasses must implement check")
src/core/registry.py ADDED
@@ -0,0 +1,170 @@
1
+ """
2
+ Purpose: Rule registry with automatic plugin discovery and registration
3
+
4
+ Scope: Dynamic rule management and discovery across all linter plugin packages
5
+
6
+ Overview: Implements the plugin discovery system that enables the extensible architecture by
7
+ automatically finding and registering linting rules from specified packages without requiring
8
+ explicit registration code. The RuleRegistry maintains a collection of discovered rules indexed
9
+ by rule_id, providing methods to register individual rules, retrieve rules by identifier, and
10
+ list all available rules. Auto-discovery works by scanning Python packages for classes that
11
+ inherit from BaseLintRule, filtering out abstract base classes, instantiating concrete rule
12
+ classes, and registering them for use by the orchestrator. This enables developers to add new
13
+ rules simply by creating a class in the appropriate package structure without modifying any
14
+ framework code. The registry handles import errors gracefully and supports both package-level
15
+ and module-level discovery patterns.
16
+
17
+ Dependencies: importlib for dynamic module loading, inspect for class introspection,
18
+ pkgutil for package traversal, BaseLintRule for type checking
19
+
20
+ Exports: RuleRegistry class with register(), get(), list_all(), and discover_rules() methods
21
+
22
+ Interfaces: register(rule: BaseLintRule) -> None, get(rule_id: str) -> BaseLintRule | None,
23
+ list_all() -> list[BaseLintRule], discover_rules(package_path: str) -> int
24
+
25
+ Implementation: Package scanning with pkgutil.iter_modules(), class introspection with inspect,
26
+ subclass detection for BaseLintRule, abstract class filtering, graceful error handling for
27
+ failed imports, duplicate rule_id validation
28
+ """
29
+
30
+ import importlib
31
+ import inspect
32
+ import pkgutil
33
+ from typing import Any
34
+
35
+ from .base import BaseLintRule
36
+
37
+
38
+ class RuleRegistry:
39
+ """Registry for linting rules with auto-discovery.
40
+
41
+ The registry maintains a collection of registered rules and provides
42
+ methods to register, retrieve, and discover rules dynamically.
43
+ """
44
+
45
+ def __init__(self) -> None:
46
+ """Initialize empty registry."""
47
+ self._rules: dict[str, BaseLintRule] = {}
48
+
49
+ def register(self, rule: BaseLintRule) -> None:
50
+ """Register a new rule.
51
+
52
+ Args:
53
+ rule: The rule instance to register.
54
+
55
+ Raises:
56
+ ValueError: If a rule with the same ID is already registered.
57
+ """
58
+ rule_id = rule.rule_id
59
+
60
+ if rule_id in self._rules:
61
+ raise ValueError(f"Rule {rule_id} already registered")
62
+
63
+ self._rules[rule_id] = rule
64
+
65
+ def get(self, rule_id: str) -> BaseLintRule | None:
66
+ """Get a rule by ID.
67
+
68
+ Args:
69
+ rule_id: The unique identifier of the rule.
70
+
71
+ Returns:
72
+ The rule instance if found, None otherwise.
73
+ """
74
+ return self._rules.get(rule_id)
75
+
76
+ def list_all(self) -> list[BaseLintRule]:
77
+ """Get all registered rules.
78
+
79
+ Returns:
80
+ List of all registered rule instances.
81
+ """
82
+ return list(self._rules.values())
83
+
84
+ def discover_rules(self, package_path: str) -> int:
85
+ """Discover and register rules from a package.
86
+
87
+ This method automatically discovers all concrete BaseLintRule
88
+ subclasses in the specified package and registers them.
89
+
90
+ Args:
91
+ package_path: Python package path (e.g., 'src.linters').
92
+
93
+ Returns:
94
+ Number of rules discovered and registered.
95
+ """
96
+ try:
97
+ package = importlib.import_module(package_path)
98
+ except ImportError:
99
+ return 0
100
+
101
+ if not hasattr(package, "__path__"):
102
+ return self._discover_from_module(package_path)
103
+
104
+ return self._discover_from_package_modules(package_path, package)
105
+
106
+ def _discover_from_package_modules(self, package_path: str, package: Any) -> int:
107
+ """Discover rules from all modules in a package."""
108
+ discovered_count = 0
109
+ for _, module_name, _ in pkgutil.iter_modules(package.__path__):
110
+ full_module_name = f"{package_path}.{module_name}"
111
+ discovered_count += self._try_discover_from_module(full_module_name)
112
+ return discovered_count
113
+
114
+ def _try_discover_from_module(self, module_name: str) -> int:
115
+ """Try to discover rules from a module, return 0 on error."""
116
+ try:
117
+ return self._discover_from_module(module_name)
118
+ except (ImportError, AttributeError):
119
+ return 0
120
+
121
+ def _discover_from_module(self, module_path: str) -> int:
122
+ """Discover rules from a specific module.
123
+
124
+ Args:
125
+ module_path: Full module path to search.
126
+
127
+ Returns:
128
+ Number of rules discovered from this module.
129
+ """
130
+ try:
131
+ module = importlib.import_module(module_path)
132
+ except (ImportError, AttributeError):
133
+ return 0
134
+
135
+ return self._register_rules_from_module(module)
136
+
137
+ def _register_rules_from_module(self, module: Any) -> int:
138
+ """Register all rule classes from a module."""
139
+ discovered_count = 0
140
+ for _name, obj in inspect.getmembers(module):
141
+ if not self._is_rule_class(obj):
142
+ continue
143
+ if self._try_register_rule_class(obj):
144
+ discovered_count += 1
145
+ return discovered_count
146
+
147
+ def _try_register_rule_class(self, rule_class: Any) -> bool:
148
+ """Try to instantiate and register a rule class."""
149
+ try:
150
+ rule_instance = rule_class()
151
+ self.register(rule_instance)
152
+ return True
153
+ except (TypeError, AttributeError, ValueError):
154
+ return False
155
+
156
+ def _is_rule_class(self, obj: Any) -> bool:
157
+ """Check if an object is a valid rule class.
158
+
159
+ Args:
160
+ obj: Object to check.
161
+
162
+ Returns:
163
+ True if obj is a concrete BaseLintRule subclass.
164
+ """
165
+ return (
166
+ inspect.isclass(obj)
167
+ and issubclass(obj, BaseLintRule)
168
+ and obj is not BaseLintRule # Don't instantiate the base class
169
+ and not inspect.isabstract(obj) # Don't instantiate abstract classes
170
+ )