lucidscan 0.5.12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (91) hide show
  1. lucidscan/__init__.py +12 -0
  2. lucidscan/bootstrap/__init__.py +26 -0
  3. lucidscan/bootstrap/paths.py +160 -0
  4. lucidscan/bootstrap/platform.py +111 -0
  5. lucidscan/bootstrap/validation.py +76 -0
  6. lucidscan/bootstrap/versions.py +119 -0
  7. lucidscan/cli/__init__.py +50 -0
  8. lucidscan/cli/__main__.py +8 -0
  9. lucidscan/cli/arguments.py +405 -0
  10. lucidscan/cli/commands/__init__.py +64 -0
  11. lucidscan/cli/commands/autoconfigure.py +294 -0
  12. lucidscan/cli/commands/help.py +69 -0
  13. lucidscan/cli/commands/init.py +656 -0
  14. lucidscan/cli/commands/list_scanners.py +59 -0
  15. lucidscan/cli/commands/scan.py +307 -0
  16. lucidscan/cli/commands/serve.py +142 -0
  17. lucidscan/cli/commands/status.py +84 -0
  18. lucidscan/cli/commands/validate.py +105 -0
  19. lucidscan/cli/config_bridge.py +152 -0
  20. lucidscan/cli/exit_codes.py +17 -0
  21. lucidscan/cli/runner.py +284 -0
  22. lucidscan/config/__init__.py +29 -0
  23. lucidscan/config/ignore.py +178 -0
  24. lucidscan/config/loader.py +431 -0
  25. lucidscan/config/models.py +316 -0
  26. lucidscan/config/validation.py +645 -0
  27. lucidscan/core/__init__.py +3 -0
  28. lucidscan/core/domain_runner.py +463 -0
  29. lucidscan/core/git.py +174 -0
  30. lucidscan/core/logging.py +34 -0
  31. lucidscan/core/models.py +207 -0
  32. lucidscan/core/streaming.py +340 -0
  33. lucidscan/core/subprocess_runner.py +164 -0
  34. lucidscan/detection/__init__.py +21 -0
  35. lucidscan/detection/detector.py +154 -0
  36. lucidscan/detection/frameworks.py +270 -0
  37. lucidscan/detection/languages.py +328 -0
  38. lucidscan/detection/tools.py +229 -0
  39. lucidscan/generation/__init__.py +15 -0
  40. lucidscan/generation/config_generator.py +275 -0
  41. lucidscan/generation/package_installer.py +330 -0
  42. lucidscan/mcp/__init__.py +20 -0
  43. lucidscan/mcp/formatter.py +510 -0
  44. lucidscan/mcp/server.py +297 -0
  45. lucidscan/mcp/tools.py +1049 -0
  46. lucidscan/mcp/watcher.py +237 -0
  47. lucidscan/pipeline/__init__.py +17 -0
  48. lucidscan/pipeline/executor.py +187 -0
  49. lucidscan/pipeline/parallel.py +181 -0
  50. lucidscan/plugins/__init__.py +40 -0
  51. lucidscan/plugins/coverage/__init__.py +28 -0
  52. lucidscan/plugins/coverage/base.py +160 -0
  53. lucidscan/plugins/coverage/coverage_py.py +454 -0
  54. lucidscan/plugins/coverage/istanbul.py +411 -0
  55. lucidscan/plugins/discovery.py +107 -0
  56. lucidscan/plugins/enrichers/__init__.py +61 -0
  57. lucidscan/plugins/enrichers/base.py +63 -0
  58. lucidscan/plugins/linters/__init__.py +26 -0
  59. lucidscan/plugins/linters/base.py +125 -0
  60. lucidscan/plugins/linters/biome.py +448 -0
  61. lucidscan/plugins/linters/checkstyle.py +393 -0
  62. lucidscan/plugins/linters/eslint.py +368 -0
  63. lucidscan/plugins/linters/ruff.py +498 -0
  64. lucidscan/plugins/reporters/__init__.py +45 -0
  65. lucidscan/plugins/reporters/base.py +30 -0
  66. lucidscan/plugins/reporters/json_reporter.py +79 -0
  67. lucidscan/plugins/reporters/sarif_reporter.py +303 -0
  68. lucidscan/plugins/reporters/summary_reporter.py +61 -0
  69. lucidscan/plugins/reporters/table_reporter.py +81 -0
  70. lucidscan/plugins/scanners/__init__.py +57 -0
  71. lucidscan/plugins/scanners/base.py +60 -0
  72. lucidscan/plugins/scanners/checkov.py +484 -0
  73. lucidscan/plugins/scanners/opengrep.py +464 -0
  74. lucidscan/plugins/scanners/trivy.py +492 -0
  75. lucidscan/plugins/test_runners/__init__.py +27 -0
  76. lucidscan/plugins/test_runners/base.py +111 -0
  77. lucidscan/plugins/test_runners/jest.py +381 -0
  78. lucidscan/plugins/test_runners/karma.py +481 -0
  79. lucidscan/plugins/test_runners/playwright.py +434 -0
  80. lucidscan/plugins/test_runners/pytest.py +598 -0
  81. lucidscan/plugins/type_checkers/__init__.py +27 -0
  82. lucidscan/plugins/type_checkers/base.py +106 -0
  83. lucidscan/plugins/type_checkers/mypy.py +355 -0
  84. lucidscan/plugins/type_checkers/pyright.py +313 -0
  85. lucidscan/plugins/type_checkers/typescript.py +280 -0
  86. lucidscan-0.5.12.dist-info/METADATA +242 -0
  87. lucidscan-0.5.12.dist-info/RECORD +91 -0
  88. lucidscan-0.5.12.dist-info/WHEEL +5 -0
  89. lucidscan-0.5.12.dist-info/entry_points.txt +34 -0
  90. lucidscan-0.5.12.dist-info/licenses/LICENSE +201 -0
  91. lucidscan-0.5.12.dist-info/top_level.txt +1 -0
@@ -0,0 +1,237 @@
1
+ """File watcher for incremental LucidScan checks.
2
+
3
+ Watches for file changes and runs incremental quality checks.
4
+ """
5
+
6
+ from __future__ import annotations
7
+
8
+ import asyncio
9
+ from pathlib import Path
10
+ from typing import Any, Callable, Dict, List, Optional, Set
11
+
12
+ from watchdog.observers import Observer
13
+ from watchdog.events import FileSystemEventHandler
14
+
15
+ from lucidscan.config import LucidScanConfig
16
+ from lucidscan.core.logging import get_logger
17
+ from lucidscan.mcp.tools import MCPToolExecutor
18
+
19
+ LOGGER = get_logger(__name__)
20
+
21
+
22
+ class LucidScanFileWatcher:
23
+ """Watches for file changes and runs incremental checks."""
24
+
25
+ # Default patterns to ignore
26
+ DEFAULT_IGNORE_PATTERNS = [
27
+ ".git",
28
+ "__pycache__",
29
+ "node_modules",
30
+ ".venv",
31
+ "venv",
32
+ ".lucidscan",
33
+ ".mypy_cache",
34
+ ".pytest_cache",
35
+ ".ruff_cache",
36
+ "*.pyc",
37
+ "*.pyo",
38
+ ".coverage",
39
+ "htmlcov",
40
+ "dist",
41
+ "build",
42
+ "*.egg-info",
43
+ ]
44
+
45
+ def __init__(
46
+ self,
47
+ project_root: Path,
48
+ config: LucidScanConfig,
49
+ debounce_ms: int = 1000,
50
+ ignore_patterns: Optional[List[str]] = None,
51
+ ):
52
+ """Initialize LucidScanFileWatcher.
53
+
54
+ Args:
55
+ project_root: Project root directory to watch.
56
+ config: LucidScan configuration.
57
+ debounce_ms: Debounce delay in milliseconds.
58
+ ignore_patterns: Additional patterns to ignore.
59
+ """
60
+ self.project_root = project_root
61
+ self.config = config
62
+ self.debounce_ms = debounce_ms
63
+ self.executor = MCPToolExecutor(project_root, config)
64
+
65
+ # Combine default and custom ignore patterns
66
+ self.ignore_patterns = set(self.DEFAULT_IGNORE_PATTERNS)
67
+ if ignore_patterns:
68
+ self.ignore_patterns.update(ignore_patterns)
69
+
70
+ self._pending_files: Set[Path] = set()
71
+ self._debounce_task: Optional[asyncio.Task] = None
72
+ self._callbacks: List[Callable[[Dict[str, Any]], None]] = []
73
+ self._observer: Optional[Observer] = None # type: ignore[valid-type]
74
+ self._running = False
75
+
76
+ def on_result(self, callback: Callable[[Dict[str, Any]], None]):
77
+ """Register callback for scan results.
78
+
79
+ Args:
80
+ callback: Function to call with scan results.
81
+ """
82
+ self._callbacks.append(callback)
83
+
84
+ async def start(self):
85
+ """Start watching for file changes."""
86
+ if self._running:
87
+ LOGGER.warning("File watcher already running")
88
+ return
89
+
90
+ self._running = True
91
+ handler = _FileChangeHandler(self._on_file_change)
92
+ self._observer = Observer()
93
+ self._observer.schedule(handler, str(self.project_root), recursive=True)
94
+ self._observer.start()
95
+
96
+ LOGGER.info(f"Watching {self.project_root} for changes...")
97
+
98
+ try:
99
+ while self._running:
100
+ await asyncio.sleep(0.1)
101
+ finally:
102
+ self.stop()
103
+
104
+ def stop(self):
105
+ """Stop the file watcher."""
106
+ self._running = False
107
+ if self._observer:
108
+ self._observer.stop()
109
+ self._observer.join()
110
+ self._observer = None
111
+ LOGGER.info("File watcher stopped")
112
+
113
+ def _on_file_change(self, path: Path):
114
+ """Handle file change event.
115
+
116
+ Args:
117
+ path: Path to the changed file.
118
+ """
119
+ # Skip ignored paths
120
+ if self._should_ignore(path):
121
+ return
122
+
123
+ # Skip non-files
124
+ if not path.is_file():
125
+ return
126
+
127
+ LOGGER.debug(f"File changed: {path}")
128
+
129
+ # Add to pending files
130
+ self._pending_files.add(path)
131
+
132
+ # Cancel existing debounce task
133
+ if self._debounce_task and not self._debounce_task.done():
134
+ self._debounce_task.cancel()
135
+
136
+ # Schedule new debounce task
137
+ loop = asyncio.get_event_loop()
138
+ self._debounce_task = loop.create_task(self._process_pending())
139
+
140
+ async def _process_pending(self):
141
+ """Process pending file changes after debounce."""
142
+ await asyncio.sleep(self.debounce_ms / 1000)
143
+
144
+ files = list(self._pending_files)
145
+ self._pending_files.clear()
146
+
147
+ if not files:
148
+ return
149
+
150
+ LOGGER.info(f"Processing {len(files)} changed file(s)...")
151
+
152
+ # Get relative paths
153
+ relative_files = []
154
+ for f in files:
155
+ try:
156
+ rel = f.relative_to(self.project_root)
157
+ relative_files.append(str(rel))
158
+ except ValueError:
159
+ # File not under project root
160
+ relative_files.append(str(f))
161
+
162
+ try:
163
+ # Run incremental check
164
+ result = await self.executor.scan(
165
+ domains=["all"],
166
+ files=relative_files,
167
+ )
168
+
169
+ # Add file list to result
170
+ result["changed_files"] = relative_files
171
+
172
+ # Notify callbacks
173
+ for callback in self._callbacks:
174
+ try:
175
+ callback(result)
176
+ except Exception as e:
177
+ LOGGER.error(f"Callback error: {e}")
178
+
179
+ except Exception as e:
180
+ LOGGER.error(f"Scan failed: {e}")
181
+ # Notify callbacks of error
182
+ error_result = {
183
+ "error": str(e),
184
+ "changed_files": relative_files,
185
+ }
186
+ for callback in self._callbacks:
187
+ try:
188
+ callback(error_result)
189
+ except Exception as ce:
190
+ LOGGER.error(f"Callback error: {ce}")
191
+
192
+ def _should_ignore(self, path: Path) -> bool:
193
+ """Check if path should be ignored.
194
+
195
+ Args:
196
+ path: Path to check.
197
+
198
+ Returns:
199
+ True if path should be ignored.
200
+ """
201
+ path_str = str(path)
202
+ path_parts = path.parts
203
+
204
+ for pattern in self.ignore_patterns:
205
+ # Check if pattern matches any path component
206
+ if pattern in path_parts:
207
+ return True
208
+ # Check glob-style patterns
209
+ if pattern.startswith("*") and path_str.endswith(pattern[1:]):
210
+ return True
211
+ # Check if pattern is in path string
212
+ if pattern in path_str:
213
+ return True
214
+
215
+ return False
216
+
217
+
218
+ class _FileChangeHandler(FileSystemEventHandler):
219
+ """Watchdog event handler for file changes."""
220
+
221
+ def __init__(self, callback: Callable[[Path], None]):
222
+ """Initialize handler.
223
+
224
+ Args:
225
+ callback: Function to call on file change.
226
+ """
227
+ self.callback = callback
228
+
229
+ def on_modified(self, event):
230
+ """Handle file modification."""
231
+ if not event.is_directory:
232
+ self.callback(Path(event.src_path))
233
+
234
+ def on_created(self, event):
235
+ """Handle file creation."""
236
+ if not event.is_directory:
237
+ self.callback(Path(event.src_path))
@@ -0,0 +1,17 @@
1
+ """Pipeline orchestration for lucidscan.
2
+
3
+ Manages the execution of scan pipeline stages:
4
+ 1. Scanner execution (parallel by default)
5
+ 2. Enricher execution (sequential, in configured order)
6
+ 3. Result aggregation (metadata and summary)
7
+ """
8
+
9
+ from lucidscan.pipeline.executor import PipelineConfig, PipelineExecutor
10
+ from lucidscan.pipeline.parallel import ParallelScannerExecutor, ScannerResult
11
+
12
+ __all__ = [
13
+ "PipelineConfig",
14
+ "PipelineExecutor",
15
+ "ParallelScannerExecutor",
16
+ "ScannerResult",
17
+ ]
@@ -0,0 +1,187 @@
1
+ """Pipeline executor for orchestrating scan stages."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from dataclasses import dataclass, field
6
+ from datetime import datetime, timezone
7
+ from typing import TYPE_CHECKING, Any, Dict, List, Optional
8
+
9
+ from lucidscan.core.logging import get_logger
10
+ from lucidscan.core.models import (
11
+ ScanContext,
12
+ ScanMetadata,
13
+ ScanResult,
14
+ UnifiedIssue,
15
+ )
16
+ from lucidscan.pipeline.parallel import ParallelScannerExecutor, ScannerResult
17
+
18
+ if TYPE_CHECKING:
19
+ from lucidscan.config.models import LucidScanConfig
20
+
21
+ LOGGER = get_logger(__name__)
22
+
23
+
24
+ @dataclass
25
+ class PipelineConfig:
26
+ """Configuration for pipeline execution."""
27
+
28
+ sequential_scanners: bool = False
29
+ max_workers: int = 4
30
+ enricher_order: List[str] = field(default_factory=list)
31
+
32
+
33
+ class PipelineExecutor:
34
+ """Orchestrates the scan pipeline stages.
35
+
36
+ Pipeline stages:
37
+ 1. Scanner execution (parallel by default)
38
+ 2. Enricher execution (sequential, in configured order)
39
+ 3. Result aggregation
40
+
41
+ Reporter execution is handled separately in CLI.
42
+ """
43
+
44
+ def __init__(
45
+ self,
46
+ config: "LucidScanConfig",
47
+ pipeline_config: Optional[PipelineConfig] = None,
48
+ lucidscan_version: str = "unknown",
49
+ ) -> None:
50
+ """Initialize the pipeline executor.
51
+
52
+ Args:
53
+ config: LucidScan configuration.
54
+ pipeline_config: Optional pipeline-specific configuration.
55
+ lucidscan_version: Version string for metadata.
56
+ """
57
+ self._config = config
58
+ self._pipeline_config = pipeline_config or PipelineConfig()
59
+ self._lucidscan_version = lucidscan_version
60
+
61
+ def execute(
62
+ self,
63
+ scanner_names: List[str],
64
+ context: ScanContext,
65
+ ) -> ScanResult:
66
+ """Execute the full pipeline and return results.
67
+
68
+ Args:
69
+ scanner_names: List of scanner plugin names to run.
70
+ context: Scan context for execution.
71
+
72
+ Returns:
73
+ Aggregated ScanResult with all issues and metadata.
74
+ """
75
+ start_time = datetime.now(timezone.utc)
76
+
77
+ # Stage 1: Scanner Execution (parallel)
78
+ all_issues, scanner_results = self._execute_scanners(scanner_names, context)
79
+
80
+ # Stage 2: Enricher Execution (sequential)
81
+ enriched_issues = self._execute_enrichers(all_issues, context)
82
+
83
+ # Stage 3: Result Aggregation
84
+ end_time = datetime.now(timezone.utc)
85
+ duration_ms = int((end_time - start_time).total_seconds() * 1000)
86
+
87
+ result = ScanResult(issues=enriched_issues)
88
+ result.metadata = ScanMetadata(
89
+ lucidscan_version=self._lucidscan_version,
90
+ scan_started_at=start_time.isoformat(),
91
+ scan_finished_at=end_time.isoformat(),
92
+ duration_ms=duration_ms,
93
+ project_root=str(context.project_root),
94
+ scanners_used=self._format_scanners_used(scanner_results),
95
+ )
96
+ result.summary = result.compute_summary()
97
+
98
+ return result
99
+
100
+ def _execute_scanners(
101
+ self,
102
+ scanner_names: List[str],
103
+ context: ScanContext,
104
+ ) -> tuple[List[UnifiedIssue], List[ScannerResult]]:
105
+ """Execute scanner stage."""
106
+ executor = ParallelScannerExecutor(
107
+ max_workers=self._pipeline_config.max_workers,
108
+ sequential=self._pipeline_config.sequential_scanners,
109
+ )
110
+ return executor.execute(scanner_names, context)
111
+
112
+ def _execute_enrichers(
113
+ self,
114
+ issues: List[UnifiedIssue],
115
+ context: ScanContext,
116
+ ) -> List[UnifiedIssue]:
117
+ """Execute enricher stage in configured order.
118
+
119
+ Enrichers run sequentially, each receiving the output
120
+ of the previous enricher.
121
+ """
122
+ # Import here to avoid circular imports
123
+ from lucidscan.plugins.enrichers import get_enricher_plugin
124
+
125
+ enriched = issues
126
+
127
+ # Get enricher order from pipeline config, then from main config
128
+ enricher_order = self._pipeline_config.enricher_order
129
+ if not enricher_order:
130
+ enricher_order = self._get_enricher_order_from_config()
131
+
132
+ for enricher_name in enricher_order:
133
+ enricher = get_enricher_plugin(enricher_name)
134
+ if not enricher:
135
+ LOGGER.warning(
136
+ f"Enricher plugin '{enricher_name}' not found, skipping"
137
+ )
138
+ continue
139
+
140
+ LOGGER.info(f"Running {enricher_name} enricher...")
141
+
142
+ try:
143
+ enriched = enricher.enrich(enriched, context)
144
+ LOGGER.debug(f"{enricher_name}: processed {len(enriched)} issues")
145
+ except Exception as e:
146
+ LOGGER.error(f"Enricher {enricher_name} failed: {e}")
147
+ # Continue with unenriched issues on failure
148
+
149
+ return enriched
150
+
151
+ def _get_enricher_order_from_config(self) -> List[str]:
152
+ """Extract enricher order from config.
153
+
154
+ Looks for pipeline.enrichers or enabled enrichers in config.
155
+ """
156
+ # Check for explicit pipeline ordering in config
157
+ if hasattr(self._config, "pipeline") and self._config.pipeline:
158
+ pipeline = self._config.pipeline
159
+ if hasattr(pipeline, "enrichers") and pipeline.enrichers:
160
+ return pipeline.enrichers
161
+
162
+ # Fall back to enabled enrichers from enrichers config
163
+ enabled = []
164
+ for name, enricher_config in self._config.enrichers.items():
165
+ if isinstance(enricher_config, dict):
166
+ if enricher_config.get("enabled", True):
167
+ enabled.append(name)
168
+ else:
169
+ enabled.append(name)
170
+
171
+ return enabled
172
+
173
+ def _format_scanners_used(
174
+ self,
175
+ scanner_results: List[ScannerResult],
176
+ ) -> List[Dict[str, Any]]:
177
+ """Format scanner results for metadata."""
178
+ return [
179
+ {
180
+ "name": r.scanner_name,
181
+ "version": r.scanner_version,
182
+ "domains": r.domains,
183
+ "success": r.success,
184
+ "error": r.error,
185
+ }
186
+ for r in scanner_results
187
+ ]
@@ -0,0 +1,181 @@
1
+ """Parallel scanner execution using ThreadPoolExecutor."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import threading
6
+ from concurrent.futures import ThreadPoolExecutor, as_completed
7
+ from dataclasses import dataclass, field
8
+ from typing import List, Optional, Tuple
9
+
10
+ from lucidscan.core.logging import get_logger
11
+ from lucidscan.core.models import ScanContext, UnifiedIssue
12
+ from lucidscan.plugins.scanners import get_scanner_plugin
13
+
14
+ LOGGER = get_logger(__name__)
15
+
16
+ # Default number of worker threads
17
+ DEFAULT_MAX_WORKERS = 4
18
+
19
+
20
+ @dataclass
21
+ class ScannerResult:
22
+ """Result from a single scanner execution."""
23
+
24
+ scanner_name: str
25
+ scanner_version: str
26
+ domains: List[str]
27
+ issues: List[UnifiedIssue] = field(default_factory=list)
28
+ error: Optional[str] = None
29
+ success: bool = True
30
+
31
+
32
+ class ParallelScannerExecutor:
33
+ """Executes scanners in parallel using ThreadPoolExecutor.
34
+
35
+ Thread-safe aggregation of results using a lock.
36
+ """
37
+
38
+ def __init__(
39
+ self,
40
+ max_workers: int = DEFAULT_MAX_WORKERS,
41
+ sequential: bool = False,
42
+ ) -> None:
43
+ """Initialize the executor.
44
+
45
+ Args:
46
+ max_workers: Maximum number of concurrent scanner threads.
47
+ sequential: If True, run scanners sequentially (for debugging).
48
+ """
49
+ self._max_workers = max_workers
50
+ self._sequential = sequential
51
+ self._results_lock = threading.Lock()
52
+
53
+ def execute(
54
+ self,
55
+ scanner_names: List[str],
56
+ context: ScanContext,
57
+ ) -> Tuple[List[UnifiedIssue], List[ScannerResult]]:
58
+ """Execute scanners and return aggregated results.
59
+
60
+ Args:
61
+ scanner_names: List of scanner plugin names to execute.
62
+ context: Scan context for all scanners.
63
+
64
+ Returns:
65
+ Tuple of (all_issues, scanner_results) with thread-safe aggregation.
66
+ """
67
+ if not scanner_names:
68
+ return [], []
69
+
70
+ if self._sequential:
71
+ return self._execute_sequential(scanner_names, context)
72
+ return self._execute_parallel(scanner_names, context)
73
+
74
+ def _execute_parallel(
75
+ self,
76
+ scanner_names: List[str],
77
+ context: ScanContext,
78
+ ) -> Tuple[List[UnifiedIssue], List[ScannerResult]]:
79
+ """Execute scanners in parallel."""
80
+ all_issues: List[UnifiedIssue] = []
81
+ scanner_results: List[ScannerResult] = []
82
+
83
+ with ThreadPoolExecutor(max_workers=self._max_workers) as executor:
84
+ # Submit all scanner tasks
85
+ future_to_scanner = {
86
+ executor.submit(self._run_scanner, name, context): name
87
+ for name in scanner_names
88
+ }
89
+
90
+ # Collect results as they complete
91
+ for future in as_completed(future_to_scanner):
92
+ scanner_name = future_to_scanner[future]
93
+ try:
94
+ result = future.result()
95
+ with self._results_lock:
96
+ all_issues.extend(result.issues)
97
+ scanner_results.append(result)
98
+ except Exception as e:
99
+ LOGGER.error(f"Scanner {scanner_name} raised exception: {e}")
100
+ with self._results_lock:
101
+ scanner_results.append(
102
+ ScannerResult(
103
+ scanner_name=scanner_name,
104
+ scanner_version="unknown",
105
+ domains=[],
106
+ error=str(e),
107
+ success=False,
108
+ )
109
+ )
110
+
111
+ return all_issues, scanner_results
112
+
113
+ def _execute_sequential(
114
+ self,
115
+ scanner_names: List[str],
116
+ context: ScanContext,
117
+ ) -> Tuple[List[UnifiedIssue], List[ScannerResult]]:
118
+ """Execute scanners sequentially (for debugging)."""
119
+ all_issues: List[UnifiedIssue] = []
120
+ scanner_results: List[ScannerResult] = []
121
+
122
+ for name in scanner_names:
123
+ result = self._run_scanner(name, context)
124
+ all_issues.extend(result.issues)
125
+ scanner_results.append(result)
126
+
127
+ return all_issues, scanner_results
128
+
129
+ def _run_scanner(
130
+ self,
131
+ scanner_name: str,
132
+ context: ScanContext,
133
+ ) -> ScannerResult:
134
+ """Run a single scanner and return its result.
135
+
136
+ This method is thread-safe and catches all exceptions.
137
+ """
138
+ scanner = get_scanner_plugin(scanner_name, project_root=context.project_root)
139
+ if not scanner:
140
+ LOGGER.error(f"Scanner plugin '{scanner_name}' not found")
141
+ return ScannerResult(
142
+ scanner_name=scanner_name,
143
+ scanner_version="unknown",
144
+ domains=[],
145
+ error=f"Plugin '{scanner_name}' not found",
146
+ success=False,
147
+ )
148
+
149
+ LOGGER.info(f"Running {scanner_name} scanner...")
150
+
151
+ try:
152
+ issues = scanner.scan(context)
153
+ LOGGER.info(f"{scanner_name}: found {len(issues)} issues")
154
+
155
+ return ScannerResult(
156
+ scanner_name=scanner_name,
157
+ scanner_version=scanner.get_version(),
158
+ domains=[d.value for d in scanner.domains],
159
+ issues=issues,
160
+ )
161
+
162
+ except Exception as e:
163
+ LOGGER.error(f"Scanner {scanner_name} failed: {e}")
164
+ # Try to get version even on failure
165
+ try:
166
+ version = scanner.get_version()
167
+ except Exception:
168
+ version = "unknown"
169
+
170
+ try:
171
+ domains = [d.value for d in scanner.domains]
172
+ except Exception:
173
+ domains = []
174
+
175
+ return ScannerResult(
176
+ scanner_name=scanner_name,
177
+ scanner_version=version,
178
+ domains=domains,
179
+ error=str(e),
180
+ success=False,
181
+ )
@@ -0,0 +1,40 @@
1
+ """Plugin infrastructure for lucidscan.
2
+
3
+ This package provides the plugin discovery and management infrastructure
4
+ for all plugin types:
5
+ - Scanner plugins (lucidscan.scanners) - Security scanners
6
+ - Linter plugins (lucidscan.linters) - Code linting
7
+ - Type checker plugins (lucidscan.type_checkers) - Type checking
8
+ - Test runner plugins (lucidscan.test_runners) - Test execution
9
+ - Coverage plugins (lucidscan.coverage) - Coverage analysis
10
+ - Enricher plugins (lucidscan.enrichers) - Post-processing
11
+ - Reporter plugins (lucidscan.reporters) - Output formatting
12
+
13
+ Plugins are discovered via Python entry points.
14
+ """
15
+
16
+ from lucidscan.plugins.discovery import (
17
+ discover_plugins,
18
+ get_plugin,
19
+ list_available_plugins,
20
+ SCANNER_ENTRY_POINT_GROUP,
21
+ ENRICHER_ENTRY_POINT_GROUP,
22
+ REPORTER_ENTRY_POINT_GROUP,
23
+ LINTER_ENTRY_POINT_GROUP,
24
+ TYPE_CHECKER_ENTRY_POINT_GROUP,
25
+ TEST_RUNNER_ENTRY_POINT_GROUP,
26
+ COVERAGE_ENTRY_POINT_GROUP,
27
+ )
28
+
29
+ __all__ = [
30
+ "discover_plugins",
31
+ "get_plugin",
32
+ "list_available_plugins",
33
+ "SCANNER_ENTRY_POINT_GROUP",
34
+ "ENRICHER_ENTRY_POINT_GROUP",
35
+ "REPORTER_ENTRY_POINT_GROUP",
36
+ "LINTER_ENTRY_POINT_GROUP",
37
+ "TYPE_CHECKER_ENTRY_POINT_GROUP",
38
+ "TEST_RUNNER_ENTRY_POINT_GROUP",
39
+ "COVERAGE_ENTRY_POINT_GROUP",
40
+ ]
@@ -0,0 +1,28 @@
1
+ """Coverage plugins for lucidscan.
2
+
3
+ This module provides coverage analysis integrations for the quality pipeline.
4
+ Coverage plugins are discovered via the lucidscan.coverage entry point group.
5
+ """
6
+
7
+ from lucidscan.plugins.coverage.base import CoveragePlugin, CoverageResult, FileCoverage
8
+ from lucidscan.plugins.discovery import (
9
+ discover_plugins,
10
+ COVERAGE_ENTRY_POINT_GROUP,
11
+ )
12
+
13
+
14
+ def discover_coverage_plugins():
15
+ """Discover all installed coverage plugins.
16
+
17
+ Returns:
18
+ Dictionary mapping plugin names to plugin classes.
19
+ """
20
+ return discover_plugins(COVERAGE_ENTRY_POINT_GROUP, CoveragePlugin)
21
+
22
+
23
+ __all__ = [
24
+ "CoveragePlugin",
25
+ "CoverageResult",
26
+ "FileCoverage",
27
+ "discover_coverage_plugins",
28
+ ]