kekkai-cli 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (90) hide show
  1. kekkai/__init__.py +7 -0
  2. kekkai/cli.py +1038 -0
  3. kekkai/config.py +403 -0
  4. kekkai/dojo.py +419 -0
  5. kekkai/dojo_import.py +213 -0
  6. kekkai/github/__init__.py +16 -0
  7. kekkai/github/commenter.py +198 -0
  8. kekkai/github/models.py +56 -0
  9. kekkai/github/sanitizer.py +112 -0
  10. kekkai/installer/__init__.py +39 -0
  11. kekkai/installer/errors.py +23 -0
  12. kekkai/installer/extract.py +161 -0
  13. kekkai/installer/manager.py +252 -0
  14. kekkai/installer/manifest.py +189 -0
  15. kekkai/installer/verify.py +86 -0
  16. kekkai/manifest.py +77 -0
  17. kekkai/output.py +218 -0
  18. kekkai/paths.py +46 -0
  19. kekkai/policy.py +326 -0
  20. kekkai/runner.py +70 -0
  21. kekkai/scanners/__init__.py +67 -0
  22. kekkai/scanners/backends/__init__.py +14 -0
  23. kekkai/scanners/backends/base.py +73 -0
  24. kekkai/scanners/backends/docker.py +178 -0
  25. kekkai/scanners/backends/native.py +240 -0
  26. kekkai/scanners/base.py +110 -0
  27. kekkai/scanners/container.py +144 -0
  28. kekkai/scanners/falco.py +237 -0
  29. kekkai/scanners/gitleaks.py +237 -0
  30. kekkai/scanners/semgrep.py +227 -0
  31. kekkai/scanners/trivy.py +246 -0
  32. kekkai/scanners/url_policy.py +163 -0
  33. kekkai/scanners/zap.py +340 -0
  34. kekkai/threatflow/__init__.py +94 -0
  35. kekkai/threatflow/artifacts.py +476 -0
  36. kekkai/threatflow/chunking.py +361 -0
  37. kekkai/threatflow/core.py +438 -0
  38. kekkai/threatflow/mermaid.py +374 -0
  39. kekkai/threatflow/model_adapter.py +491 -0
  40. kekkai/threatflow/prompts.py +277 -0
  41. kekkai/threatflow/redaction.py +228 -0
  42. kekkai/threatflow/sanitizer.py +643 -0
  43. kekkai/triage/__init__.py +33 -0
  44. kekkai/triage/app.py +168 -0
  45. kekkai/triage/audit.py +203 -0
  46. kekkai/triage/ignore.py +269 -0
  47. kekkai/triage/models.py +185 -0
  48. kekkai/triage/screens.py +341 -0
  49. kekkai/triage/widgets.py +169 -0
  50. kekkai_cli-1.0.0.dist-info/METADATA +135 -0
  51. kekkai_cli-1.0.0.dist-info/RECORD +90 -0
  52. kekkai_cli-1.0.0.dist-info/WHEEL +5 -0
  53. kekkai_cli-1.0.0.dist-info/entry_points.txt +3 -0
  54. kekkai_cli-1.0.0.dist-info/top_level.txt +3 -0
  55. kekkai_core/__init__.py +3 -0
  56. kekkai_core/ci/__init__.py +11 -0
  57. kekkai_core/ci/benchmarks.py +354 -0
  58. kekkai_core/ci/metadata.py +104 -0
  59. kekkai_core/ci/validators.py +92 -0
  60. kekkai_core/docker/__init__.py +17 -0
  61. kekkai_core/docker/metadata.py +153 -0
  62. kekkai_core/docker/sbom.py +173 -0
  63. kekkai_core/docker/security.py +158 -0
  64. kekkai_core/docker/signing.py +135 -0
  65. kekkai_core/redaction.py +84 -0
  66. kekkai_core/slsa/__init__.py +13 -0
  67. kekkai_core/slsa/verify.py +121 -0
  68. kekkai_core/windows/__init__.py +29 -0
  69. kekkai_core/windows/chocolatey.py +335 -0
  70. kekkai_core/windows/installer.py +256 -0
  71. kekkai_core/windows/scoop.py +165 -0
  72. kekkai_core/windows/validators.py +220 -0
  73. portal/__init__.py +19 -0
  74. portal/api.py +155 -0
  75. portal/auth.py +103 -0
  76. portal/enterprise/__init__.py +32 -0
  77. portal/enterprise/audit.py +435 -0
  78. portal/enterprise/licensing.py +342 -0
  79. portal/enterprise/rbac.py +276 -0
  80. portal/enterprise/saml.py +595 -0
  81. portal/ops/__init__.py +53 -0
  82. portal/ops/backup.py +553 -0
  83. portal/ops/log_shipper.py +469 -0
  84. portal/ops/monitoring.py +517 -0
  85. portal/ops/restore.py +469 -0
  86. portal/ops/secrets.py +408 -0
  87. portal/ops/upgrade.py +591 -0
  88. portal/tenants.py +340 -0
  89. portal/uploads.py +259 -0
  90. portal/web.py +384 -0
@@ -0,0 +1,144 @@
1
+ from __future__ import annotations
2
+
3
+ import shutil
4
+ import subprocess # nosec B404
5
+ import time
6
+ from dataclasses import dataclass
7
+ from pathlib import Path
8
+
9
+ DEFAULT_TIMEOUT = 600
10
+
11
+
12
+ @dataclass(frozen=True)
13
+ class ContainerConfig:
14
+ image: str
15
+ image_digest: str | None = None
16
+ read_only: bool = True
17
+ network_disabled: bool = True
18
+ no_new_privileges: bool = True
19
+ memory_limit: str = "2g"
20
+ cpu_limit: str = "2"
21
+
22
+
23
+ @dataclass(frozen=True)
24
+ class ContainerResult:
25
+ exit_code: int
26
+ stdout: str
27
+ stderr: str
28
+ duration_ms: int
29
+ timed_out: bool
30
+
31
+
32
+ def docker_command() -> str:
33
+ docker = shutil.which("docker")
34
+ if not docker:
35
+ raise RuntimeError("Docker not found; install docker to run scanners")
36
+ return docker
37
+
38
+
39
+ def run_container(
40
+ config: ContainerConfig,
41
+ repo_path: Path,
42
+ output_path: Path,
43
+ command: list[str],
44
+ timeout_seconds: int = DEFAULT_TIMEOUT,
45
+ workdir: str | None = None,
46
+ output_mount: str | None = None,
47
+ skip_repo_mount: bool = False,
48
+ user: str | None = "1000:1000",
49
+ ) -> ContainerResult:
50
+ """Run a command in a Docker container with security controls.
51
+
52
+ Args:
53
+ config: Container configuration (image, security settings)
54
+ repo_path: Path to repository to mount (read-only)
55
+ output_path: Path for output files (read-write)
56
+ command: Command and arguments to run
57
+ timeout_seconds: Timeout for container execution
58
+ workdir: Override working directory (default: /repo)
59
+ output_mount: Override output mount point (default: /output)
60
+ skip_repo_mount: Skip mounting repo (for DAST scanners)
61
+ user: User to run as (default: 1000:1000, None for container default)
62
+ """
63
+ docker = docker_command()
64
+ image_ref = f"{config.image}@{config.image_digest}" if config.image_digest else config.image
65
+
66
+ args = [
67
+ docker,
68
+ "run",
69
+ "--rm",
70
+ ]
71
+
72
+ if user:
73
+ args.extend(["--user", user])
74
+
75
+ if config.read_only:
76
+ args.extend(["--read-only", "--tmpfs", "/tmp:rw,noexec,nosuid,size=512m"]) # nosec B108 # noqa: S108
77
+
78
+ if config.network_disabled:
79
+ args.extend(["--network", "none"])
80
+
81
+ if config.no_new_privileges:
82
+ args.append("--security-opt=no-new-privileges")
83
+
84
+ if config.memory_limit:
85
+ args.extend(["--memory", config.memory_limit])
86
+
87
+ if config.cpu_limit:
88
+ args.extend(["--cpus", config.cpu_limit])
89
+
90
+ # Mount repository (optional for DAST scanners)
91
+ if not skip_repo_mount:
92
+ args.extend(["-v", f"{repo_path.resolve()}:/repo:ro"])
93
+
94
+ # Mount output directory
95
+ mount_point = output_mount or "/output"
96
+ args.extend(["-v", f"{output_path.resolve()}:{mount_point}:rw"])
97
+
98
+ # Set working directory
99
+ args.extend(["-w", workdir or "/repo"])
100
+
101
+ args.append(image_ref)
102
+ args.extend(command)
103
+
104
+ start = time.monotonic()
105
+ try:
106
+ proc = subprocess.run( # noqa: S603 # nosec B603
107
+ args,
108
+ capture_output=True,
109
+ text=True,
110
+ timeout=timeout_seconds,
111
+ check=False,
112
+ )
113
+ duration_ms = int((time.monotonic() - start) * 1000)
114
+ return ContainerResult(
115
+ exit_code=proc.returncode,
116
+ stdout=proc.stdout,
117
+ stderr=proc.stderr,
118
+ duration_ms=duration_ms,
119
+ timed_out=False,
120
+ )
121
+ except subprocess.TimeoutExpired as exc:
122
+ duration_ms = int((time.monotonic() - start) * 1000)
123
+ stdout = exc.stdout.decode() if isinstance(exc.stdout, bytes) else (exc.stdout or "")
124
+ stderr = exc.stderr.decode() if isinstance(exc.stderr, bytes) else (exc.stderr or "")
125
+ return ContainerResult(
126
+ exit_code=124,
127
+ stdout=stdout,
128
+ stderr=stderr,
129
+ duration_ms=duration_ms,
130
+ timed_out=True,
131
+ )
132
+
133
+
134
+ def pull_image(image: str, digest: str | None = None) -> bool:
135
+ docker = docker_command()
136
+ ref = f"{image}@{digest}" if digest else image
137
+ proc = subprocess.run( # noqa: S603 # nosec B603
138
+ [docker, "pull", ref],
139
+ capture_output=True,
140
+ text=True,
141
+ timeout=300,
142
+ check=False,
143
+ )
144
+ return proc.returncode == 0
@@ -0,0 +1,237 @@
1
+ from __future__ import annotations
2
+
3
+ import json
4
+ import platform
5
+ from pathlib import Path
6
+ from typing import Any
7
+
8
+ from .backends import (
9
+ BackendType,
10
+ ToolNotFoundError,
11
+ ToolVersionError,
12
+ detect_tool,
13
+ )
14
+ from .base import Finding, ScanContext, ScanResult, Severity
15
+
16
+ SCAN_TYPE = "Falco Scan"
17
+
18
+
19
+ class FalcoNotAvailableError(RuntimeError):
20
+ """Raised when Falco is not available or not enabled."""
21
+
22
+
23
+ class FalcoScanner:
24
+ """Falco runtime security scanner adapter.
25
+
26
+ EXPERIMENTAL: Linux-only scanner that monitors runtime behavior.
27
+ Requires explicit opt-in via --enable-falco flag.
28
+
29
+ Security notes:
30
+ - Falco requires kernel access (eBPF or kernel module)
31
+ - Should only be used in controlled environments
32
+ - Requires elevated privileges on the host
33
+
34
+ Note: Falco only runs in native mode (no Docker container support)
35
+ as it requires direct kernel access.
36
+ """
37
+
38
+ def __init__(
39
+ self,
40
+ enabled: bool = False,
41
+ rules_file: Path | None = None,
42
+ timeout_seconds: int = 300,
43
+ backend: BackendType | None = None,
44
+ ) -> None:
45
+ self._enabled = enabled
46
+ self._rules_file = rules_file
47
+ self._timeout = timeout_seconds
48
+ self._backend = backend
49
+ self._resolved_backend: BackendType | None = None
50
+
51
+ @property
52
+ def name(self) -> str:
53
+ return "falco"
54
+
55
+ @property
56
+ def scan_type(self) -> str:
57
+ return SCAN_TYPE
58
+
59
+ @property
60
+ def backend_used(self) -> BackendType | None:
61
+ """Return the backend used for the last scan."""
62
+ return self._resolved_backend
63
+
64
+ def is_available(self) -> tuple[bool, str]:
65
+ """Check if Falco is available and can be run.
66
+
67
+ Returns:
68
+ Tuple of (available, reason)
69
+ """
70
+ if platform.system() != "Linux":
71
+ return False, "Falco is Linux-only (experimental)"
72
+
73
+ if not self._enabled:
74
+ return False, "Falco requires explicit --enable-falco flag"
75
+
76
+ try:
77
+ detect_tool("falco", min_version=(0, 35, 0))
78
+ return True, "Falco available"
79
+ except ToolNotFoundError:
80
+ return False, "Falco binary not found in PATH"
81
+ except ToolVersionError as e:
82
+ return False, str(e)
83
+
84
+ def run(self, ctx: ScanContext) -> ScanResult:
85
+ """Run Falco scanner.
86
+
87
+ Note: Falco is designed for continuous monitoring. This adapter
88
+ runs Falco in a one-shot mode to analyze existing log files or
89
+ capture events for a limited duration.
90
+
91
+ Falco always runs in native mode (requires kernel access).
92
+ """
93
+ self._resolved_backend = BackendType.NATIVE
94
+
95
+ available, reason = self.is_available()
96
+ if not available:
97
+ return ScanResult(
98
+ scanner=self.name,
99
+ success=False,
100
+ findings=[],
101
+ error=f"Falco not available: {reason}",
102
+ duration_ms=0,
103
+ )
104
+
105
+ existing_alerts = self._find_alerts_file(ctx)
106
+ if existing_alerts:
107
+ try:
108
+ findings = self.parse(existing_alerts.read_text())
109
+ return ScanResult(
110
+ scanner=self.name,
111
+ success=True,
112
+ findings=findings,
113
+ raw_output_path=existing_alerts,
114
+ duration_ms=0,
115
+ )
116
+ except (json.JSONDecodeError, KeyError) as exc:
117
+ return ScanResult(
118
+ scanner=self.name,
119
+ success=False,
120
+ findings=[],
121
+ error=f"Parse error: {exc}",
122
+ duration_ms=0,
123
+ )
124
+
125
+ return ScanResult(
126
+ scanner=self.name,
127
+ success=True,
128
+ findings=[],
129
+ error=None,
130
+ duration_ms=0,
131
+ )
132
+
133
+ def _find_alerts_file(self, ctx: ScanContext) -> Path | None:
134
+ """Find existing Falco alerts file."""
135
+ candidates = [
136
+ ctx.output_dir / "falco-alerts.json",
137
+ ctx.repo_path / "falco-alerts.json",
138
+ Path("/var/log/falco/alerts.json"),
139
+ ]
140
+ for path in candidates:
141
+ if path.exists():
142
+ return path
143
+ return None
144
+
145
+ def parse(self, raw_output: str) -> list[Finding]:
146
+ """Parse Falco JSON alerts to Finding objects.
147
+
148
+ Falco outputs one JSON object per line (JSONL format).
149
+ """
150
+ findings: list[Finding] = []
151
+
152
+ for line in raw_output.strip().split("\n"):
153
+ if not line.strip():
154
+ continue
155
+ try:
156
+ alert = json.loads(line)
157
+ findings.append(self._parse_alert(alert))
158
+ except json.JSONDecodeError:
159
+ continue
160
+
161
+ return findings
162
+
163
+ def _parse_alert(self, alert: dict[str, Any]) -> Finding:
164
+ """Parse a single Falco alert to a Finding."""
165
+ priority = alert.get("priority", "").lower()
166
+ severity = self._map_priority_to_severity(priority)
167
+
168
+ # Extract relevant fields
169
+ output = alert.get("output", "")
170
+ rule = alert.get("rule", "Unknown Rule")
171
+
172
+ # Get output fields for additional context
173
+ output_fields = alert.get("output_fields", {})
174
+ container_id = output_fields.get("container.id", "")
175
+ container_name = output_fields.get("container.name", "")
176
+ process = output_fields.get("proc.name", "")
177
+ cmdline = output_fields.get("proc.cmdline", "")
178
+
179
+ description_parts = [output]
180
+ if process:
181
+ description_parts.append(f"Process: {process}")
182
+ if cmdline:
183
+ description_parts.append(f"Command: {cmdline}")
184
+ if container_name:
185
+ description_parts.append(f"Container: {container_name}")
186
+
187
+ return Finding(
188
+ scanner=self.name,
189
+ title=rule,
190
+ severity=severity,
191
+ description="\n".join(description_parts),
192
+ file_path=container_id or None,
193
+ rule_id=rule,
194
+ extra={
195
+ "priority": priority,
196
+ "container_id": container_id,
197
+ "container_name": container_name,
198
+ "process": process,
199
+ "time": alert.get("time", ""),
200
+ },
201
+ )
202
+
203
+ def _map_priority_to_severity(self, priority: str) -> Severity:
204
+ """Map Falco priority to Severity."""
205
+ mapping = {
206
+ "emergency": Severity.CRITICAL,
207
+ "alert": Severity.CRITICAL,
208
+ "critical": Severity.CRITICAL,
209
+ "error": Severity.HIGH,
210
+ "warning": Severity.MEDIUM,
211
+ "notice": Severity.LOW,
212
+ "informational": Severity.INFO,
213
+ "debug": Severity.INFO,
214
+ }
215
+ return mapping.get(priority.lower(), Severity.UNKNOWN)
216
+
217
+
218
+ def create_falco_scanner(
219
+ enabled: bool = False,
220
+ rules_file: Path | None = None,
221
+ timeout_seconds: int = 300,
222
+ ) -> FalcoScanner:
223
+ """Factory function to create a Falco scanner.
224
+
225
+ Args:
226
+ enabled: Whether Falco scanning is enabled (requires explicit opt-in)
227
+ rules_file: Optional custom rules file
228
+ timeout_seconds: Scan timeout
229
+
230
+ Returns:
231
+ Configured FalcoScanner instance
232
+ """
233
+ return FalcoScanner(
234
+ enabled=enabled,
235
+ rules_file=rules_file,
236
+ timeout_seconds=timeout_seconds,
237
+ )
@@ -0,0 +1,237 @@
1
+ from __future__ import annotations
2
+
3
+ import json
4
+ from pathlib import Path
5
+ from typing import Any
6
+
7
+ from .backends import (
8
+ BackendType,
9
+ NativeBackend,
10
+ ToolNotFoundError,
11
+ ToolVersionError,
12
+ detect_tool,
13
+ docker_available,
14
+ )
15
+ from .base import Finding, ScanContext, ScanResult, Severity
16
+ from .container import ContainerConfig, run_container
17
+
18
+ GITLEAKS_IMAGE = "zricethezav/gitleaks"
19
+ GITLEAKS_DIGEST = "sha256:691af3c7c5a48b16f187ce3446d5f194838f91238f27270ed36eef6359a574d9"
20
+ SCAN_TYPE = "Gitleaks Scan"
21
+
22
+
23
+ class GitleaksScanner:
24
+ def __init__(
25
+ self,
26
+ image: str = GITLEAKS_IMAGE,
27
+ digest: str | None = GITLEAKS_DIGEST,
28
+ timeout_seconds: int = 300,
29
+ backend: BackendType | None = None,
30
+ ) -> None:
31
+ self._image = image
32
+ self._digest = digest
33
+ self._timeout = timeout_seconds
34
+ self._backend = backend
35
+ self._resolved_backend: BackendType | None = None
36
+
37
+ @property
38
+ def name(self) -> str:
39
+ return "gitleaks"
40
+
41
+ @property
42
+ def scan_type(self) -> str:
43
+ return SCAN_TYPE
44
+
45
+ @property
46
+ def backend_used(self) -> BackendType | None:
47
+ """Return the backend used for the last scan."""
48
+ return self._resolved_backend
49
+
50
+ def _select_backend(self) -> BackendType:
51
+ """Select backend: explicit choice, or auto-detect (Docker preferred)."""
52
+ if self._backend is not None:
53
+ return self._backend
54
+
55
+ available, _ = docker_available()
56
+ if available:
57
+ return BackendType.DOCKER
58
+
59
+ try:
60
+ detect_tool("gitleaks")
61
+ return BackendType.NATIVE
62
+ except (ToolNotFoundError, ToolVersionError):
63
+ return BackendType.DOCKER
64
+
65
+ def run(self, ctx: ScanContext) -> ScanResult:
66
+ backend = self._select_backend()
67
+ self._resolved_backend = backend
68
+
69
+ if backend == BackendType.NATIVE:
70
+ return self._run_native(ctx)
71
+ return self._run_docker(ctx)
72
+
73
+ def _run_docker(self, ctx: ScanContext) -> ScanResult:
74
+ """Run Gitleaks in Docker container."""
75
+ output_file = ctx.output_dir / "gitleaks-results.json"
76
+ config = ContainerConfig(
77
+ image=self._image,
78
+ image_digest=self._digest,
79
+ read_only=True,
80
+ network_disabled=True,
81
+ no_new_privileges=True,
82
+ )
83
+
84
+ command = [
85
+ "detect",
86
+ "--source",
87
+ "/repo",
88
+ "--report-format",
89
+ "json",
90
+ "--report-path",
91
+ "/output/gitleaks-results.json",
92
+ "--exit-code",
93
+ "0",
94
+ ]
95
+
96
+ result = run_container(
97
+ config=config,
98
+ repo_path=ctx.repo_path,
99
+ output_path=ctx.output_dir,
100
+ command=command,
101
+ timeout_seconds=self._timeout,
102
+ )
103
+
104
+ return self._process_result(
105
+ result.timed_out, result.exit_code, result.duration_ms, result.stderr, output_file
106
+ )
107
+
108
+ def _run_native(self, ctx: ScanContext) -> ScanResult:
109
+ """Run Gitleaks natively."""
110
+ try:
111
+ tool_info = detect_tool("gitleaks")
112
+ except (ToolNotFoundError, ToolVersionError) as e:
113
+ return ScanResult(
114
+ scanner=self.name,
115
+ success=False,
116
+ findings=[],
117
+ error=str(e),
118
+ duration_ms=0,
119
+ )
120
+
121
+ output_file = ctx.output_dir / "gitleaks-results.json"
122
+ backend = NativeBackend()
123
+
124
+ args = [
125
+ "detect",
126
+ "--source",
127
+ str(ctx.repo_path),
128
+ "--report-format",
129
+ "json",
130
+ "--report-path",
131
+ str(output_file),
132
+ "--exit-code",
133
+ "0",
134
+ ]
135
+
136
+ result = backend.execute(
137
+ tool=tool_info.path,
138
+ args=args,
139
+ repo_path=ctx.repo_path,
140
+ output_path=ctx.output_dir,
141
+ timeout_seconds=self._timeout,
142
+ network_required=False,
143
+ )
144
+
145
+ return self._process_result(
146
+ result.timed_out, result.exit_code, result.duration_ms, result.stderr, output_file
147
+ )
148
+
149
+ def _process_result(
150
+ self, timed_out: bool, exit_code: int, duration_ms: int, stderr: str, output_file: Path
151
+ ) -> ScanResult:
152
+ """Process scan result from either backend."""
153
+ if timed_out:
154
+ return ScanResult(
155
+ scanner=self.name,
156
+ success=False,
157
+ findings=[],
158
+ error="Scan timed out",
159
+ duration_ms=duration_ms,
160
+ )
161
+
162
+ if not output_file.exists():
163
+ if exit_code == 0:
164
+ return ScanResult(
165
+ scanner=self.name,
166
+ success=True,
167
+ findings=[],
168
+ duration_ms=duration_ms,
169
+ )
170
+ return ScanResult(
171
+ scanner=self.name,
172
+ success=False,
173
+ findings=[],
174
+ error=stderr or "Scan failed",
175
+ duration_ms=duration_ms,
176
+ )
177
+
178
+ try:
179
+ content = output_file.read_text().strip()
180
+ if not content:
181
+ return ScanResult(
182
+ scanner=self.name,
183
+ success=True,
184
+ findings=[],
185
+ raw_output_path=output_file,
186
+ duration_ms=duration_ms,
187
+ )
188
+ findings = self.parse(content)
189
+ except (json.JSONDecodeError, KeyError) as exc:
190
+ return ScanResult(
191
+ scanner=self.name,
192
+ success=False,
193
+ findings=[],
194
+ raw_output_path=output_file,
195
+ error=f"Parse error: {exc}",
196
+ duration_ms=duration_ms,
197
+ )
198
+
199
+ return ScanResult(
200
+ scanner=self.name,
201
+ success=True,
202
+ findings=findings,
203
+ raw_output_path=output_file,
204
+ duration_ms=duration_ms,
205
+ )
206
+
207
+ def parse(self, raw_output: str) -> list[Finding]:
208
+ data = json.loads(raw_output)
209
+ findings: list[Finding] = []
210
+
211
+ if not isinstance(data, list):
212
+ return findings
213
+
214
+ for leak in data:
215
+ findings.append(self._parse_leak(leak))
216
+
217
+ return findings
218
+
219
+ def _parse_leak(self, leak: dict[str, Any]) -> Finding:
220
+ # Redact the actual secret from description
221
+ match = leak.get("Match", "")
222
+ redacted_match = match[:10] + "..." if len(match) > 10 else "[REDACTED]"
223
+
224
+ return Finding(
225
+ scanner=self.name,
226
+ title=f"Secret detected: {leak.get('RuleID', 'unknown')}",
227
+ severity=Severity.HIGH, # Secrets are always high severity
228
+ description=f"Potential secret found: {redacted_match}",
229
+ file_path=leak.get("File"),
230
+ line=leak.get("StartLine"),
231
+ rule_id=leak.get("RuleID"),
232
+ extra={
233
+ "commit": leak.get("Commit", ""),
234
+ "author": leak.get("Author", ""),
235
+ "entropy": str(leak.get("Entropy", "")),
236
+ },
237
+ )