lucidscan 0.5.12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lucidscan/__init__.py +12 -0
- lucidscan/bootstrap/__init__.py +26 -0
- lucidscan/bootstrap/paths.py +160 -0
- lucidscan/bootstrap/platform.py +111 -0
- lucidscan/bootstrap/validation.py +76 -0
- lucidscan/bootstrap/versions.py +119 -0
- lucidscan/cli/__init__.py +50 -0
- lucidscan/cli/__main__.py +8 -0
- lucidscan/cli/arguments.py +405 -0
- lucidscan/cli/commands/__init__.py +64 -0
- lucidscan/cli/commands/autoconfigure.py +294 -0
- lucidscan/cli/commands/help.py +69 -0
- lucidscan/cli/commands/init.py +656 -0
- lucidscan/cli/commands/list_scanners.py +59 -0
- lucidscan/cli/commands/scan.py +307 -0
- lucidscan/cli/commands/serve.py +142 -0
- lucidscan/cli/commands/status.py +84 -0
- lucidscan/cli/commands/validate.py +105 -0
- lucidscan/cli/config_bridge.py +152 -0
- lucidscan/cli/exit_codes.py +17 -0
- lucidscan/cli/runner.py +284 -0
- lucidscan/config/__init__.py +29 -0
- lucidscan/config/ignore.py +178 -0
- lucidscan/config/loader.py +431 -0
- lucidscan/config/models.py +316 -0
- lucidscan/config/validation.py +645 -0
- lucidscan/core/__init__.py +3 -0
- lucidscan/core/domain_runner.py +463 -0
- lucidscan/core/git.py +174 -0
- lucidscan/core/logging.py +34 -0
- lucidscan/core/models.py +207 -0
- lucidscan/core/streaming.py +340 -0
- lucidscan/core/subprocess_runner.py +164 -0
- lucidscan/detection/__init__.py +21 -0
- lucidscan/detection/detector.py +154 -0
- lucidscan/detection/frameworks.py +270 -0
- lucidscan/detection/languages.py +328 -0
- lucidscan/detection/tools.py +229 -0
- lucidscan/generation/__init__.py +15 -0
- lucidscan/generation/config_generator.py +275 -0
- lucidscan/generation/package_installer.py +330 -0
- lucidscan/mcp/__init__.py +20 -0
- lucidscan/mcp/formatter.py +510 -0
- lucidscan/mcp/server.py +297 -0
- lucidscan/mcp/tools.py +1049 -0
- lucidscan/mcp/watcher.py +237 -0
- lucidscan/pipeline/__init__.py +17 -0
- lucidscan/pipeline/executor.py +187 -0
- lucidscan/pipeline/parallel.py +181 -0
- lucidscan/plugins/__init__.py +40 -0
- lucidscan/plugins/coverage/__init__.py +28 -0
- lucidscan/plugins/coverage/base.py +160 -0
- lucidscan/plugins/coverage/coverage_py.py +454 -0
- lucidscan/plugins/coverage/istanbul.py +411 -0
- lucidscan/plugins/discovery.py +107 -0
- lucidscan/plugins/enrichers/__init__.py +61 -0
- lucidscan/plugins/enrichers/base.py +63 -0
- lucidscan/plugins/linters/__init__.py +26 -0
- lucidscan/plugins/linters/base.py +125 -0
- lucidscan/plugins/linters/biome.py +448 -0
- lucidscan/plugins/linters/checkstyle.py +393 -0
- lucidscan/plugins/linters/eslint.py +368 -0
- lucidscan/plugins/linters/ruff.py +498 -0
- lucidscan/plugins/reporters/__init__.py +45 -0
- lucidscan/plugins/reporters/base.py +30 -0
- lucidscan/plugins/reporters/json_reporter.py +79 -0
- lucidscan/plugins/reporters/sarif_reporter.py +303 -0
- lucidscan/plugins/reporters/summary_reporter.py +61 -0
- lucidscan/plugins/reporters/table_reporter.py +81 -0
- lucidscan/plugins/scanners/__init__.py +57 -0
- lucidscan/plugins/scanners/base.py +60 -0
- lucidscan/plugins/scanners/checkov.py +484 -0
- lucidscan/plugins/scanners/opengrep.py +464 -0
- lucidscan/plugins/scanners/trivy.py +492 -0
- lucidscan/plugins/test_runners/__init__.py +27 -0
- lucidscan/plugins/test_runners/base.py +111 -0
- lucidscan/plugins/test_runners/jest.py +381 -0
- lucidscan/plugins/test_runners/karma.py +481 -0
- lucidscan/plugins/test_runners/playwright.py +434 -0
- lucidscan/plugins/test_runners/pytest.py +598 -0
- lucidscan/plugins/type_checkers/__init__.py +27 -0
- lucidscan/plugins/type_checkers/base.py +106 -0
- lucidscan/plugins/type_checkers/mypy.py +355 -0
- lucidscan/plugins/type_checkers/pyright.py +313 -0
- lucidscan/plugins/type_checkers/typescript.py +280 -0
- lucidscan-0.5.12.dist-info/METADATA +242 -0
- lucidscan-0.5.12.dist-info/RECORD +91 -0
- lucidscan-0.5.12.dist-info/WHEEL +5 -0
- lucidscan-0.5.12.dist-info/entry_points.txt +34 -0
- lucidscan-0.5.12.dist-info/licenses/LICENSE +201 -0
- lucidscan-0.5.12.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,498 @@
|
|
|
1
|
+
"""Ruff linter plugin.
|
|
2
|
+
|
|
3
|
+
Ruff is an extremely fast Python linter written in Rust.
|
|
4
|
+
https://github.com/astral-sh/ruff
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
import hashlib
|
|
10
|
+
import json
|
|
11
|
+
import platform
|
|
12
|
+
import subprocess
|
|
13
|
+
import tarfile
|
|
14
|
+
import zipfile
|
|
15
|
+
from pathlib import Path
|
|
16
|
+
from typing import Any, Dict, List, Optional
|
|
17
|
+
|
|
18
|
+
from lucidscan.bootstrap.paths import LucidscanPaths
|
|
19
|
+
from lucidscan.bootstrap.versions import get_tool_version
|
|
20
|
+
from lucidscan.core.logging import get_logger
|
|
21
|
+
from lucidscan.core.models import (
|
|
22
|
+
ScanContext,
|
|
23
|
+
Severity,
|
|
24
|
+
ToolDomain,
|
|
25
|
+
UnifiedIssue,
|
|
26
|
+
)
|
|
27
|
+
from lucidscan.core.subprocess_runner import run_with_streaming
|
|
28
|
+
from lucidscan.plugins.linters.base import LinterPlugin, FixResult
|
|
29
|
+
|
|
30
|
+
LOGGER = get_logger(__name__)
|
|
31
|
+
|
|
32
|
+
# Default version from pyproject.toml [tool.lucidscan.tools]
|
|
33
|
+
DEFAULT_VERSION = get_tool_version("ruff")
|
|
34
|
+
|
|
35
|
+
# Ruff severity mapping
|
|
36
|
+
# Ruff outputs: E=error, W=warning, F=flake8, I=isort, etc.
|
|
37
|
+
# We map based on rule category
|
|
38
|
+
SEVERITY_MAP = {
|
|
39
|
+
"E": Severity.MEDIUM, # pycodestyle error
|
|
40
|
+
"W": Severity.LOW, # pycodestyle warning
|
|
41
|
+
"F": Severity.MEDIUM, # pyflakes
|
|
42
|
+
"I": Severity.LOW, # isort
|
|
43
|
+
"N": Severity.LOW, # pep8-naming
|
|
44
|
+
"D": Severity.LOW, # pydocstyle
|
|
45
|
+
"UP": Severity.LOW, # pyupgrade
|
|
46
|
+
"YTT": Severity.MEDIUM, # flake8-2020
|
|
47
|
+
"ANN": Severity.LOW, # flake8-annotations
|
|
48
|
+
"ASYNC": Severity.MEDIUM,
|
|
49
|
+
"S": Severity.HIGH, # flake8-bandit (security)
|
|
50
|
+
"BLE": Severity.MEDIUM, # flake8-blind-except
|
|
51
|
+
"FBT": Severity.LOW, # flake8-boolean-trap
|
|
52
|
+
"B": Severity.MEDIUM, # flake8-bugbear
|
|
53
|
+
"A": Severity.LOW, # flake8-builtins
|
|
54
|
+
"COM": Severity.LOW, # flake8-commas
|
|
55
|
+
"C4": Severity.LOW, # flake8-comprehensions
|
|
56
|
+
"DTZ": Severity.MEDIUM, # flake8-datetimez
|
|
57
|
+
"T10": Severity.HIGH, # flake8-debugger
|
|
58
|
+
"DJ": Severity.MEDIUM, # flake8-django
|
|
59
|
+
"EM": Severity.LOW, # flake8-errmsg
|
|
60
|
+
"EXE": Severity.LOW, # flake8-executable
|
|
61
|
+
"FA": Severity.LOW, # flake8-future-annotations
|
|
62
|
+
"ISC": Severity.LOW, # flake8-implicit-str-concat
|
|
63
|
+
"ICN": Severity.LOW, # flake8-import-conventions
|
|
64
|
+
"LOG": Severity.LOW, # flake8-logging
|
|
65
|
+
"G": Severity.LOW, # flake8-logging-format
|
|
66
|
+
"INP": Severity.LOW, # flake8-no-pep420
|
|
67
|
+
"PIE": Severity.LOW, # flake8-pie
|
|
68
|
+
"T20": Severity.LOW, # flake8-print
|
|
69
|
+
"PYI": Severity.LOW, # flake8-pyi
|
|
70
|
+
"PT": Severity.LOW, # flake8-pytest-style
|
|
71
|
+
"Q": Severity.LOW, # flake8-quotes
|
|
72
|
+
"RSE": Severity.LOW, # flake8-raise
|
|
73
|
+
"RET": Severity.LOW, # flake8-return
|
|
74
|
+
"SLF": Severity.MEDIUM, # flake8-self
|
|
75
|
+
"SLOT": Severity.LOW, # flake8-slots
|
|
76
|
+
"SIM": Severity.LOW, # flake8-simplify
|
|
77
|
+
"TID": Severity.LOW, # flake8-tidy-imports
|
|
78
|
+
"TCH": Severity.LOW, # flake8-type-checking
|
|
79
|
+
"INT": Severity.LOW, # flake8-gettext
|
|
80
|
+
"ARG": Severity.LOW, # flake8-unused-arguments
|
|
81
|
+
"PTH": Severity.LOW, # flake8-use-pathlib
|
|
82
|
+
"TD": Severity.INFO, # flake8-todos
|
|
83
|
+
"FIX": Severity.INFO, # flake8-fixme
|
|
84
|
+
"ERA": Severity.LOW, # eradicate
|
|
85
|
+
"PD": Severity.LOW, # pandas-vet
|
|
86
|
+
"PGH": Severity.LOW, # pygrep-hooks
|
|
87
|
+
"PL": Severity.MEDIUM, # Pylint
|
|
88
|
+
"TRY": Severity.LOW, # tryceratops
|
|
89
|
+
"FLY": Severity.LOW, # flynt
|
|
90
|
+
"NPY": Severity.MEDIUM, # NumPy
|
|
91
|
+
"PERF": Severity.LOW, # Perflint
|
|
92
|
+
"FURB": Severity.LOW, # refurb
|
|
93
|
+
"RUF": Severity.MEDIUM, # Ruff-specific
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
class RuffLinter(LinterPlugin):
|
|
98
|
+
"""Ruff linter plugin for Python code analysis."""
|
|
99
|
+
|
|
100
|
+
def __init__(
|
|
101
|
+
self,
|
|
102
|
+
version: str = DEFAULT_VERSION,
|
|
103
|
+
project_root: Optional[Path] = None,
|
|
104
|
+
):
|
|
105
|
+
"""Initialize RuffLinter.
|
|
106
|
+
|
|
107
|
+
Args:
|
|
108
|
+
version: Ruff version to use.
|
|
109
|
+
project_root: Optional project root for tool installation.
|
|
110
|
+
"""
|
|
111
|
+
self._version = version
|
|
112
|
+
if project_root:
|
|
113
|
+
self._paths = LucidscanPaths.for_project(project_root)
|
|
114
|
+
else:
|
|
115
|
+
self._paths = LucidscanPaths.default()
|
|
116
|
+
|
|
117
|
+
@property
|
|
118
|
+
def name(self) -> str:
|
|
119
|
+
"""Plugin identifier."""
|
|
120
|
+
return "ruff"
|
|
121
|
+
|
|
122
|
+
@property
|
|
123
|
+
def languages(self) -> List[str]:
|
|
124
|
+
"""Supported languages."""
|
|
125
|
+
return ["python"]
|
|
126
|
+
|
|
127
|
+
@property
|
|
128
|
+
def supports_fix(self) -> bool:
|
|
129
|
+
"""Ruff supports auto-fix."""
|
|
130
|
+
return True
|
|
131
|
+
|
|
132
|
+
def get_version(self) -> str:
|
|
133
|
+
"""Get Ruff version."""
|
|
134
|
+
return self._version
|
|
135
|
+
|
|
136
|
+
def ensure_binary(self) -> Path:
|
|
137
|
+
"""Ensure Ruff binary is available.
|
|
138
|
+
|
|
139
|
+
Downloads from GitHub releases if not present.
|
|
140
|
+
|
|
141
|
+
Returns:
|
|
142
|
+
Path to Ruff binary.
|
|
143
|
+
"""
|
|
144
|
+
binary_dir = self._paths.plugin_bin_dir(self.name, self._version)
|
|
145
|
+
binary_name = "ruff.exe" if platform.system() == "Windows" else "ruff"
|
|
146
|
+
binary_path = binary_dir / binary_name
|
|
147
|
+
|
|
148
|
+
if binary_path.exists():
|
|
149
|
+
return binary_path
|
|
150
|
+
|
|
151
|
+
# Download binary
|
|
152
|
+
LOGGER.info(f"Downloading Ruff {self._version}...")
|
|
153
|
+
binary_dir.mkdir(parents=True, exist_ok=True)
|
|
154
|
+
|
|
155
|
+
archive_path = self._download_release(binary_dir)
|
|
156
|
+
self._extract_binary(archive_path, binary_dir, binary_name)
|
|
157
|
+
|
|
158
|
+
# Make executable on Unix
|
|
159
|
+
if platform.system() != "Windows":
|
|
160
|
+
binary_path.chmod(0o755)
|
|
161
|
+
|
|
162
|
+
# Clean up archive
|
|
163
|
+
archive_path.unlink(missing_ok=True)
|
|
164
|
+
|
|
165
|
+
LOGGER.info(f"Ruff {self._version} installed to {binary_dir}")
|
|
166
|
+
return binary_path
|
|
167
|
+
|
|
168
|
+
def lint(self, context: ScanContext) -> List[UnifiedIssue]:
|
|
169
|
+
"""Run Ruff linting.
|
|
170
|
+
|
|
171
|
+
Args:
|
|
172
|
+
context: Scan context with paths and configuration.
|
|
173
|
+
|
|
174
|
+
Returns:
|
|
175
|
+
List of linting issues.
|
|
176
|
+
"""
|
|
177
|
+
binary = self.ensure_binary()
|
|
178
|
+
|
|
179
|
+
# Build command
|
|
180
|
+
cmd = [
|
|
181
|
+
str(binary),
|
|
182
|
+
"check",
|
|
183
|
+
"--output-format", "json",
|
|
184
|
+
]
|
|
185
|
+
|
|
186
|
+
# Add paths to check
|
|
187
|
+
paths = [str(p) for p in context.paths] if context.paths else ["."]
|
|
188
|
+
cmd.extend(paths)
|
|
189
|
+
|
|
190
|
+
# Add exclude patterns
|
|
191
|
+
exclude_patterns = context.get_exclude_patterns()
|
|
192
|
+
for pattern in exclude_patterns:
|
|
193
|
+
cmd.extend(["--exclude", pattern])
|
|
194
|
+
|
|
195
|
+
# Run Ruff
|
|
196
|
+
LOGGER.debug(f"Running: {' '.join(cmd)}")
|
|
197
|
+
|
|
198
|
+
try:
|
|
199
|
+
result = run_with_streaming(
|
|
200
|
+
cmd=cmd,
|
|
201
|
+
cwd=context.project_root,
|
|
202
|
+
tool_name="ruff",
|
|
203
|
+
stream_handler=context.stream_handler,
|
|
204
|
+
timeout=120,
|
|
205
|
+
)
|
|
206
|
+
except subprocess.TimeoutExpired:
|
|
207
|
+
LOGGER.warning("Ruff lint timed out after 120 seconds")
|
|
208
|
+
return []
|
|
209
|
+
except Exception as e:
|
|
210
|
+
LOGGER.error(f"Failed to run Ruff: {e}")
|
|
211
|
+
return []
|
|
212
|
+
|
|
213
|
+
# Parse output
|
|
214
|
+
issues = self._parse_output(result.stdout, context.project_root)
|
|
215
|
+
|
|
216
|
+
LOGGER.info(f"Ruff found {len(issues)} issues")
|
|
217
|
+
return issues
|
|
218
|
+
|
|
219
|
+
def fix(self, context: ScanContext) -> FixResult:
|
|
220
|
+
"""Apply Ruff auto-fixes.
|
|
221
|
+
|
|
222
|
+
Args:
|
|
223
|
+
context: Scan context with paths and configuration.
|
|
224
|
+
|
|
225
|
+
Returns:
|
|
226
|
+
FixResult with statistics.
|
|
227
|
+
"""
|
|
228
|
+
binary = self.ensure_binary()
|
|
229
|
+
|
|
230
|
+
# Run without fix to count issues first
|
|
231
|
+
pre_issues = self.lint(context)
|
|
232
|
+
|
|
233
|
+
# Build fix command
|
|
234
|
+
cmd = [
|
|
235
|
+
str(binary),
|
|
236
|
+
"check",
|
|
237
|
+
"--fix",
|
|
238
|
+
"--output-format", "json",
|
|
239
|
+
]
|
|
240
|
+
|
|
241
|
+
paths = [str(p) for p in context.paths] if context.paths else ["."]
|
|
242
|
+
cmd.extend(paths)
|
|
243
|
+
|
|
244
|
+
exclude_patterns = context.get_exclude_patterns()
|
|
245
|
+
for pattern in exclude_patterns:
|
|
246
|
+
cmd.extend(["--exclude", pattern])
|
|
247
|
+
|
|
248
|
+
LOGGER.debug(f"Running: {' '.join(cmd)}")
|
|
249
|
+
|
|
250
|
+
try:
|
|
251
|
+
result = run_with_streaming(
|
|
252
|
+
cmd=cmd,
|
|
253
|
+
cwd=context.project_root,
|
|
254
|
+
tool_name="ruff-fix",
|
|
255
|
+
stream_handler=context.stream_handler,
|
|
256
|
+
timeout=120,
|
|
257
|
+
)
|
|
258
|
+
except subprocess.TimeoutExpired:
|
|
259
|
+
LOGGER.warning("Ruff fix timed out after 120 seconds")
|
|
260
|
+
return FixResult()
|
|
261
|
+
except Exception as e:
|
|
262
|
+
LOGGER.error(f"Failed to run Ruff fix: {e}")
|
|
263
|
+
return FixResult()
|
|
264
|
+
|
|
265
|
+
# Parse remaining issues
|
|
266
|
+
post_issues = self._parse_output(result.stdout, context.project_root)
|
|
267
|
+
|
|
268
|
+
# Calculate stats
|
|
269
|
+
files_modified = len(set(
|
|
270
|
+
str(issue.file_path)
|
|
271
|
+
for issue in pre_issues
|
|
272
|
+
if issue not in post_issues
|
|
273
|
+
))
|
|
274
|
+
|
|
275
|
+
return FixResult(
|
|
276
|
+
files_modified=files_modified,
|
|
277
|
+
issues_fixed=len(pre_issues) - len(post_issues),
|
|
278
|
+
issues_remaining=len(post_issues),
|
|
279
|
+
)
|
|
280
|
+
|
|
281
|
+
def _download_release(self, target_dir: Path) -> Path:
|
|
282
|
+
"""Download Ruff release archive.
|
|
283
|
+
|
|
284
|
+
Args:
|
|
285
|
+
target_dir: Directory to download to.
|
|
286
|
+
|
|
287
|
+
Returns:
|
|
288
|
+
Path to downloaded archive.
|
|
289
|
+
"""
|
|
290
|
+
import urllib.request
|
|
291
|
+
|
|
292
|
+
system = platform.system().lower()
|
|
293
|
+
machine = platform.machine().lower()
|
|
294
|
+
|
|
295
|
+
# Map platform names
|
|
296
|
+
if system == "darwin":
|
|
297
|
+
system = "apple-darwin"
|
|
298
|
+
elif system == "linux":
|
|
299
|
+
system = "unknown-linux-gnu"
|
|
300
|
+
elif system == "windows":
|
|
301
|
+
system = "pc-windows-msvc"
|
|
302
|
+
|
|
303
|
+
# Map architecture
|
|
304
|
+
if machine in ("x86_64", "amd64"):
|
|
305
|
+
arch = "x86_64"
|
|
306
|
+
elif machine in ("arm64", "aarch64"):
|
|
307
|
+
arch = "aarch64"
|
|
308
|
+
else:
|
|
309
|
+
arch = machine
|
|
310
|
+
|
|
311
|
+
# Build download URL
|
|
312
|
+
ext = "zip" if platform.system() == "Windows" else "tar.gz"
|
|
313
|
+
filename = f"ruff-{arch}-{system}.{ext}"
|
|
314
|
+
url = f"https://github.com/astral-sh/ruff/releases/download/{self._version}/{filename}"
|
|
315
|
+
|
|
316
|
+
archive_path = target_dir / filename
|
|
317
|
+
|
|
318
|
+
LOGGER.debug(f"Downloading from {url}")
|
|
319
|
+
|
|
320
|
+
# Validate URL scheme and domain for security
|
|
321
|
+
if not url.startswith("https://github.com/"):
|
|
322
|
+
raise ValueError(f"Invalid download URL: {url}")
|
|
323
|
+
|
|
324
|
+
try:
|
|
325
|
+
urllib.request.urlretrieve(url, archive_path) # nosec B310 nosemgrep
|
|
326
|
+
except Exception as e:
|
|
327
|
+
raise RuntimeError(f"Failed to download Ruff: {e}") from e
|
|
328
|
+
|
|
329
|
+
return archive_path
|
|
330
|
+
|
|
331
|
+
def _extract_binary(self, archive_path: Path, target_dir: Path, binary_name: str) -> None:
|
|
332
|
+
"""Extract binary from archive.
|
|
333
|
+
|
|
334
|
+
Args:
|
|
335
|
+
archive_path: Path to archive file.
|
|
336
|
+
target_dir: Directory to extract to.
|
|
337
|
+
binary_name: Name of the binary file.
|
|
338
|
+
"""
|
|
339
|
+
if str(archive_path).endswith(".zip"):
|
|
340
|
+
with zipfile.ZipFile(archive_path, "r") as zf:
|
|
341
|
+
for member in zf.namelist():
|
|
342
|
+
if member.endswith(binary_name):
|
|
343
|
+
# Extract to target dir
|
|
344
|
+
zf.extract(member, target_dir)
|
|
345
|
+
# Move from subdirectory if needed
|
|
346
|
+
extracted = target_dir / member
|
|
347
|
+
if extracted.parent != target_dir:
|
|
348
|
+
extracted.rename(target_dir / binary_name)
|
|
349
|
+
break
|
|
350
|
+
else:
|
|
351
|
+
with tarfile.open(archive_path, "r:gz") as tf:
|
|
352
|
+
for tarinfo in tf.getmembers():
|
|
353
|
+
if tarinfo.name.endswith(binary_name):
|
|
354
|
+
tarinfo.name = binary_name
|
|
355
|
+
tf.extract(tarinfo, target_dir)
|
|
356
|
+
break
|
|
357
|
+
|
|
358
|
+
def _parse_output(self, output: str, project_root: Path) -> List[UnifiedIssue]:
|
|
359
|
+
"""Parse Ruff JSON output.
|
|
360
|
+
|
|
361
|
+
Args:
|
|
362
|
+
output: JSON output from Ruff.
|
|
363
|
+
project_root: Project root directory.
|
|
364
|
+
|
|
365
|
+
Returns:
|
|
366
|
+
List of UnifiedIssue objects.
|
|
367
|
+
"""
|
|
368
|
+
if not output.strip():
|
|
369
|
+
return []
|
|
370
|
+
|
|
371
|
+
try:
|
|
372
|
+
violations = json.loads(output)
|
|
373
|
+
except json.JSONDecodeError:
|
|
374
|
+
LOGGER.warning("Failed to parse Ruff output as JSON")
|
|
375
|
+
return []
|
|
376
|
+
|
|
377
|
+
if not isinstance(violations, list):
|
|
378
|
+
LOGGER.warning(f"Expected list from Ruff, got {type(violations).__name__}")
|
|
379
|
+
return []
|
|
380
|
+
|
|
381
|
+
issues = []
|
|
382
|
+
for violation in violations:
|
|
383
|
+
if not isinstance(violation, dict):
|
|
384
|
+
LOGGER.warning(f"Skipping non-dict violation: {type(violation).__name__}")
|
|
385
|
+
continue
|
|
386
|
+
issue = self._violation_to_issue(violation, project_root)
|
|
387
|
+
if issue:
|
|
388
|
+
issues.append(issue)
|
|
389
|
+
|
|
390
|
+
return issues
|
|
391
|
+
|
|
392
|
+
def _violation_to_issue(
|
|
393
|
+
self,
|
|
394
|
+
violation: Dict[str, Any],
|
|
395
|
+
project_root: Path,
|
|
396
|
+
) -> Optional[UnifiedIssue]:
|
|
397
|
+
"""Convert Ruff violation to UnifiedIssue.
|
|
398
|
+
|
|
399
|
+
Args:
|
|
400
|
+
violation: Ruff violation dict.
|
|
401
|
+
project_root: Project root directory.
|
|
402
|
+
|
|
403
|
+
Returns:
|
|
404
|
+
UnifiedIssue or None.
|
|
405
|
+
"""
|
|
406
|
+
try:
|
|
407
|
+
code = violation.get("code", "")
|
|
408
|
+
message = violation.get("message", "")
|
|
409
|
+
filename = violation.get("filename", "")
|
|
410
|
+
location = violation.get("location") or {}
|
|
411
|
+
|
|
412
|
+
# Get severity based on rule category
|
|
413
|
+
severity = self._get_severity(code)
|
|
414
|
+
|
|
415
|
+
# Generate deterministic ID
|
|
416
|
+
issue_id = self._generate_issue_id(code, filename, location, message)
|
|
417
|
+
|
|
418
|
+
file_path = Path(filename)
|
|
419
|
+
if not file_path.is_absolute():
|
|
420
|
+
file_path = project_root / file_path
|
|
421
|
+
|
|
422
|
+
# Extract code snippet if available
|
|
423
|
+
code_snippet = None
|
|
424
|
+
source_line = violation.get("source")
|
|
425
|
+
if source_line:
|
|
426
|
+
code_snippet = source_line
|
|
427
|
+
|
|
428
|
+
# Extract fix information
|
|
429
|
+
fix_info = violation.get("fix") or {}
|
|
430
|
+
is_fixable = fix_info.get("applicability") == "safe" or bool(fix_info.get("edits"))
|
|
431
|
+
fix_message = fix_info.get("message")
|
|
432
|
+
|
|
433
|
+
return UnifiedIssue(
|
|
434
|
+
id=issue_id,
|
|
435
|
+
domain=ToolDomain.LINTING,
|
|
436
|
+
source_tool="ruff",
|
|
437
|
+
severity=severity,
|
|
438
|
+
rule_id=code,
|
|
439
|
+
title=f"{code}: {message}",
|
|
440
|
+
description=message,
|
|
441
|
+
documentation_url=violation.get("url"),
|
|
442
|
+
file_path=file_path,
|
|
443
|
+
line_start=location.get("row"),
|
|
444
|
+
line_end=location.get("row"),
|
|
445
|
+
column_start=location.get("column"),
|
|
446
|
+
column_end=violation.get("end_location", {}).get("column"),
|
|
447
|
+
code_snippet=code_snippet,
|
|
448
|
+
fixable=is_fixable,
|
|
449
|
+
suggested_fix=fix_message,
|
|
450
|
+
recommendation=fix_message,
|
|
451
|
+
metadata={
|
|
452
|
+
"noqa_row": violation.get("noqa_row"),
|
|
453
|
+
},
|
|
454
|
+
)
|
|
455
|
+
except Exception as e:
|
|
456
|
+
LOGGER.warning(f"Failed to parse violation: {e}")
|
|
457
|
+
return None
|
|
458
|
+
|
|
459
|
+
def _get_severity(self, code: str) -> Severity:
|
|
460
|
+
"""Get severity for a Ruff rule code.
|
|
461
|
+
|
|
462
|
+
Args:
|
|
463
|
+
code: Ruff rule code (e.g., 'E501', 'F401').
|
|
464
|
+
|
|
465
|
+
Returns:
|
|
466
|
+
Severity level.
|
|
467
|
+
"""
|
|
468
|
+
# Extract category prefix (letters before numbers)
|
|
469
|
+
prefix = ""
|
|
470
|
+
for char in code:
|
|
471
|
+
if char.isalpha():
|
|
472
|
+
prefix += char
|
|
473
|
+
else:
|
|
474
|
+
break
|
|
475
|
+
|
|
476
|
+
return SEVERITY_MAP.get(prefix, Severity.MEDIUM)
|
|
477
|
+
|
|
478
|
+
def _generate_issue_id(
|
|
479
|
+
self,
|
|
480
|
+
code: str,
|
|
481
|
+
filename: str,
|
|
482
|
+
location: Dict[str, int],
|
|
483
|
+
message: str,
|
|
484
|
+
) -> str:
|
|
485
|
+
"""Generate deterministic issue ID.
|
|
486
|
+
|
|
487
|
+
Args:
|
|
488
|
+
code: Rule code.
|
|
489
|
+
filename: File path.
|
|
490
|
+
location: Line/column info.
|
|
491
|
+
message: Error message.
|
|
492
|
+
|
|
493
|
+
Returns:
|
|
494
|
+
Unique issue ID.
|
|
495
|
+
"""
|
|
496
|
+
content = f"{code}:{filename}:{location.get('row', 0)}:{location.get('column', 0)}:{message}"
|
|
497
|
+
hash_val = hashlib.sha256(content.encode()).hexdigest()[:12]
|
|
498
|
+
return f"ruff-{code}-{hash_val}"
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
"""Reporter plugins for lucidscan output formatting.
|
|
2
|
+
|
|
3
|
+
Plugins are discovered via Python entry points (lucidscan.reporters group).
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from typing import Dict, Type
|
|
7
|
+
|
|
8
|
+
from lucidscan.plugins.reporters.base import ReporterPlugin
|
|
9
|
+
from lucidscan.plugins.reporters.json_reporter import JSONReporter
|
|
10
|
+
from lucidscan.plugins.reporters.table_reporter import TableReporter
|
|
11
|
+
from lucidscan.plugins.reporters.summary_reporter import SummaryReporter
|
|
12
|
+
from lucidscan.plugins.reporters.sarif_reporter import SARIFReporter
|
|
13
|
+
from lucidscan.plugins import REPORTER_ENTRY_POINT_GROUP
|
|
14
|
+
from lucidscan.plugins.discovery import (
|
|
15
|
+
discover_plugins,
|
|
16
|
+
get_plugin,
|
|
17
|
+
list_available_plugins as _list_plugins,
|
|
18
|
+
)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def discover_reporter_plugins() -> Dict[str, Type[ReporterPlugin]]:
|
|
22
|
+
"""Discover all installed reporter plugins via entry points."""
|
|
23
|
+
return discover_plugins(REPORTER_ENTRY_POINT_GROUP, ReporterPlugin)
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def get_reporter_plugin(name: str) -> ReporterPlugin | None:
|
|
27
|
+
"""Get an instantiated reporter plugin by name."""
|
|
28
|
+
return get_plugin(REPORTER_ENTRY_POINT_GROUP, name, ReporterPlugin)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def list_available_reporters() -> list[str]:
|
|
32
|
+
"""List names of all available reporter plugins."""
|
|
33
|
+
return _list_plugins(REPORTER_ENTRY_POINT_GROUP)
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
__all__ = [
|
|
37
|
+
"ReporterPlugin",
|
|
38
|
+
"JSONReporter",
|
|
39
|
+
"TableReporter",
|
|
40
|
+
"SummaryReporter",
|
|
41
|
+
"SARIFReporter",
|
|
42
|
+
"discover_reporter_plugins",
|
|
43
|
+
"get_reporter_plugin",
|
|
44
|
+
"list_available_reporters",
|
|
45
|
+
]
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
"""Base class for reporter plugins."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from abc import ABC, abstractmethod
|
|
6
|
+
from typing import IO
|
|
7
|
+
|
|
8
|
+
from lucidscan.core.models import ScanResult
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class ReporterPlugin(ABC):
|
|
12
|
+
"""Base class for all reporter plugins.
|
|
13
|
+
|
|
14
|
+
Reporter plugins format and output scan results in various formats.
|
|
15
|
+
Each reporter implements a specific output format (JSON, table, SARIF, etc.)
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
@property
|
|
19
|
+
@abstractmethod
|
|
20
|
+
def name(self) -> str:
|
|
21
|
+
"""Reporter identifier (e.g., 'json', 'table', 'sarif')."""
|
|
22
|
+
|
|
23
|
+
@abstractmethod
|
|
24
|
+
def report(self, result: ScanResult, output: IO[str]) -> None:
|
|
25
|
+
"""Format and write the scan result.
|
|
26
|
+
|
|
27
|
+
Args:
|
|
28
|
+
result: The aggregated scan result to format.
|
|
29
|
+
output: Output stream to write the formatted result.
|
|
30
|
+
"""
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
"""JSON reporter plugin for lucidscan."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
from dataclasses import asdict
|
|
7
|
+
from typing import Any, Dict, IO
|
|
8
|
+
|
|
9
|
+
from lucidscan.core.models import ScanResult, UnifiedIssue
|
|
10
|
+
from lucidscan.plugins.reporters.base import ReporterPlugin
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class JSONReporter(ReporterPlugin):
|
|
14
|
+
"""Reporter plugin that outputs scan results as JSON.
|
|
15
|
+
|
|
16
|
+
Produces machine-readable JSON output containing:
|
|
17
|
+
- Schema version
|
|
18
|
+
- All issues with normalized fields
|
|
19
|
+
- Scan metadata (timestamps, scanners used)
|
|
20
|
+
- Summary statistics
|
|
21
|
+
"""
|
|
22
|
+
|
|
23
|
+
@property
|
|
24
|
+
def name(self) -> str:
|
|
25
|
+
return "json"
|
|
26
|
+
|
|
27
|
+
def report(self, result: ScanResult, output: IO[str]) -> None:
|
|
28
|
+
"""Format scan result as JSON and write to output.
|
|
29
|
+
|
|
30
|
+
Args:
|
|
31
|
+
result: The scan result to format.
|
|
32
|
+
output: Output stream to write to.
|
|
33
|
+
"""
|
|
34
|
+
formatted = self._format_result(result)
|
|
35
|
+
json.dump(formatted, output, indent=2)
|
|
36
|
+
output.write("\n")
|
|
37
|
+
|
|
38
|
+
def _format_result(self, result: ScanResult) -> Dict[str, Any]:
|
|
39
|
+
"""Convert ScanResult to a JSON-serializable dict."""
|
|
40
|
+
output: Dict[str, Any] = {
|
|
41
|
+
"schema_version": result.schema_version,
|
|
42
|
+
"issues": [self._issue_to_dict(issue) for issue in result.issues],
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
if result.metadata:
|
|
46
|
+
output["metadata"] = asdict(result.metadata)
|
|
47
|
+
|
|
48
|
+
if result.summary:
|
|
49
|
+
output["summary"] = asdict(result.summary)
|
|
50
|
+
|
|
51
|
+
if result.coverage_summary:
|
|
52
|
+
output["coverage_summary"] = asdict(result.coverage_summary)
|
|
53
|
+
|
|
54
|
+
return output
|
|
55
|
+
|
|
56
|
+
def _issue_to_dict(self, issue: UnifiedIssue) -> Dict[str, Any]:
|
|
57
|
+
"""Convert a UnifiedIssue to a JSON-serializable dict."""
|
|
58
|
+
return {
|
|
59
|
+
"id": issue.id,
|
|
60
|
+
"domain": issue.domain.value,
|
|
61
|
+
"source_tool": issue.source_tool,
|
|
62
|
+
"severity": issue.severity.value,
|
|
63
|
+
"rule_id": issue.rule_id,
|
|
64
|
+
"title": issue.title,
|
|
65
|
+
"description": issue.description,
|
|
66
|
+
"recommendation": issue.recommendation,
|
|
67
|
+
"documentation_url": issue.documentation_url,
|
|
68
|
+
"file_path": str(issue.file_path) if issue.file_path else None,
|
|
69
|
+
"line_start": issue.line_start,
|
|
70
|
+
"line_end": issue.line_end,
|
|
71
|
+
"column_start": issue.column_start,
|
|
72
|
+
"column_end": issue.column_end,
|
|
73
|
+
"code_snippet": issue.code_snippet,
|
|
74
|
+
"fixable": issue.fixable,
|
|
75
|
+
"suggested_fix": issue.suggested_fix,
|
|
76
|
+
"dependency": issue.dependency,
|
|
77
|
+
"iac_resource": issue.iac_resource,
|
|
78
|
+
"metadata": issue.metadata,
|
|
79
|
+
}
|