kekkai-cli 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (90) hide show
  1. kekkai/__init__.py +7 -0
  2. kekkai/cli.py +1038 -0
  3. kekkai/config.py +403 -0
  4. kekkai/dojo.py +419 -0
  5. kekkai/dojo_import.py +213 -0
  6. kekkai/github/__init__.py +16 -0
  7. kekkai/github/commenter.py +198 -0
  8. kekkai/github/models.py +56 -0
  9. kekkai/github/sanitizer.py +112 -0
  10. kekkai/installer/__init__.py +39 -0
  11. kekkai/installer/errors.py +23 -0
  12. kekkai/installer/extract.py +161 -0
  13. kekkai/installer/manager.py +252 -0
  14. kekkai/installer/manifest.py +189 -0
  15. kekkai/installer/verify.py +86 -0
  16. kekkai/manifest.py +77 -0
  17. kekkai/output.py +218 -0
  18. kekkai/paths.py +46 -0
  19. kekkai/policy.py +326 -0
  20. kekkai/runner.py +70 -0
  21. kekkai/scanners/__init__.py +67 -0
  22. kekkai/scanners/backends/__init__.py +14 -0
  23. kekkai/scanners/backends/base.py +73 -0
  24. kekkai/scanners/backends/docker.py +178 -0
  25. kekkai/scanners/backends/native.py +240 -0
  26. kekkai/scanners/base.py +110 -0
  27. kekkai/scanners/container.py +144 -0
  28. kekkai/scanners/falco.py +237 -0
  29. kekkai/scanners/gitleaks.py +237 -0
  30. kekkai/scanners/semgrep.py +227 -0
  31. kekkai/scanners/trivy.py +246 -0
  32. kekkai/scanners/url_policy.py +163 -0
  33. kekkai/scanners/zap.py +340 -0
  34. kekkai/threatflow/__init__.py +94 -0
  35. kekkai/threatflow/artifacts.py +476 -0
  36. kekkai/threatflow/chunking.py +361 -0
  37. kekkai/threatflow/core.py +438 -0
  38. kekkai/threatflow/mermaid.py +374 -0
  39. kekkai/threatflow/model_adapter.py +491 -0
  40. kekkai/threatflow/prompts.py +277 -0
  41. kekkai/threatflow/redaction.py +228 -0
  42. kekkai/threatflow/sanitizer.py +643 -0
  43. kekkai/triage/__init__.py +33 -0
  44. kekkai/triage/app.py +168 -0
  45. kekkai/triage/audit.py +203 -0
  46. kekkai/triage/ignore.py +269 -0
  47. kekkai/triage/models.py +185 -0
  48. kekkai/triage/screens.py +341 -0
  49. kekkai/triage/widgets.py +169 -0
  50. kekkai_cli-1.0.0.dist-info/METADATA +135 -0
  51. kekkai_cli-1.0.0.dist-info/RECORD +90 -0
  52. kekkai_cli-1.0.0.dist-info/WHEEL +5 -0
  53. kekkai_cli-1.0.0.dist-info/entry_points.txt +3 -0
  54. kekkai_cli-1.0.0.dist-info/top_level.txt +3 -0
  55. kekkai_core/__init__.py +3 -0
  56. kekkai_core/ci/__init__.py +11 -0
  57. kekkai_core/ci/benchmarks.py +354 -0
  58. kekkai_core/ci/metadata.py +104 -0
  59. kekkai_core/ci/validators.py +92 -0
  60. kekkai_core/docker/__init__.py +17 -0
  61. kekkai_core/docker/metadata.py +153 -0
  62. kekkai_core/docker/sbom.py +173 -0
  63. kekkai_core/docker/security.py +158 -0
  64. kekkai_core/docker/signing.py +135 -0
  65. kekkai_core/redaction.py +84 -0
  66. kekkai_core/slsa/__init__.py +13 -0
  67. kekkai_core/slsa/verify.py +121 -0
  68. kekkai_core/windows/__init__.py +29 -0
  69. kekkai_core/windows/chocolatey.py +335 -0
  70. kekkai_core/windows/installer.py +256 -0
  71. kekkai_core/windows/scoop.py +165 -0
  72. kekkai_core/windows/validators.py +220 -0
  73. portal/__init__.py +19 -0
  74. portal/api.py +155 -0
  75. portal/auth.py +103 -0
  76. portal/enterprise/__init__.py +32 -0
  77. portal/enterprise/audit.py +435 -0
  78. portal/enterprise/licensing.py +342 -0
  79. portal/enterprise/rbac.py +276 -0
  80. portal/enterprise/saml.py +595 -0
  81. portal/ops/__init__.py +53 -0
  82. portal/ops/backup.py +553 -0
  83. portal/ops/log_shipper.py +469 -0
  84. portal/ops/monitoring.py +517 -0
  85. portal/ops/restore.py +469 -0
  86. portal/ops/secrets.py +408 -0
  87. portal/ops/upgrade.py +591 -0
  88. portal/tenants.py +340 -0
  89. portal/uploads.py +259 -0
  90. portal/web.py +384 -0
@@ -0,0 +1,104 @@
1
+ """Metadata extraction utilities for CI/CD distribution triggers."""
2
+
3
+ import hashlib
4
+ import re
5
+ from pathlib import Path
6
+
7
+
8
+ def extract_version_from_tag(tag: str) -> str:
9
+ """
10
+ Extract semantic version from Git tag.
11
+
12
+ Args:
13
+ tag: Git tag string (e.g., "v0.0.1", "v0.0.1-rc1")
14
+
15
+ Returns:
16
+ Version string without 'v' prefix (e.g., "0.0.1", "0.0.1-rc1")
17
+
18
+ Raises:
19
+ ValueError: If tag format is invalid
20
+ """
21
+ if not tag:
22
+ raise ValueError("Tag cannot be empty")
23
+
24
+ # Remove 'v' prefix if present
25
+ version = tag[1:] if tag.startswith("v") else tag
26
+
27
+ # Validate basic semver pattern (with optional pre-release and build metadata)
28
+ pattern = r"^\d+\.\d+\.\d+(-[a-zA-Z0-9]+(\.[a-zA-Z0-9]+)*)?(\+[a-zA-Z0-9]+(\.[a-zA-Z0-9]+)*)?$"
29
+ if not re.match(pattern, version):
30
+ raise ValueError(f"Invalid tag format: {tag}. Expected format: v0.0.1 or v0.0.1-rc1")
31
+
32
+ return version
33
+
34
+
35
+ def calculate_sha256(file_path: Path) -> str:
36
+ """
37
+ Calculate SHA256 checksum of a file.
38
+
39
+ Args:
40
+ file_path: Path to file to checksum
41
+
42
+ Returns:
43
+ SHA256 hex digest string
44
+
45
+ Raises:
46
+ FileNotFoundError: If file doesn't exist
47
+ OSError: If file cannot be read
48
+ """
49
+ if not file_path.exists():
50
+ raise FileNotFoundError(f"File not found: {file_path}")
51
+
52
+ sha256_hash = hashlib.sha256()
53
+ with file_path.open("rb") as f:
54
+ # Read in 64KB chunks for memory efficiency
55
+ for chunk in iter(lambda: f.read(65536), b""):
56
+ sha256_hash.update(chunk)
57
+
58
+ return sha256_hash.hexdigest()
59
+
60
+
61
+ def extract_tarball_url(repo: str, version: str) -> str:
62
+ """
63
+ Generate GitHub release tarball URL.
64
+
65
+ Args:
66
+ repo: Repository name (e.g., "kademoslabs/kekkai")
67
+ version: Version string (e.g., "0.0.1")
68
+
69
+ Returns:
70
+ GitHub release tarball URL
71
+ """
72
+ # Remove 'v' prefix if present for URL consistency
73
+ clean_version = version[1:] if version.startswith("v") else version
74
+ return f"https://github.com/{repo}/archive/refs/tags/v{clean_version}.tar.gz"
75
+
76
+
77
+ def format_dispatch_payload(
78
+ event_type: str,
79
+ version: str,
80
+ sha256: str | None = None,
81
+ ) -> dict[str, object]:
82
+ """
83
+ Format repository_dispatch payload for distribution updates.
84
+
85
+ Args:
86
+ event_type: Dispatch event type (e.g., "kekkai-release")
87
+ version: Version string
88
+ sha256: Optional SHA256 checksum
89
+
90
+ Returns:
91
+ JSON-serializable dispatch payload
92
+ """
93
+ payload: dict[str, object] = {
94
+ "event_type": event_type,
95
+ "client_payload": {
96
+ "version": version,
97
+ },
98
+ }
99
+
100
+ if sha256:
101
+ assert isinstance(payload["client_payload"], dict) # nosec B101
102
+ payload["client_payload"]["sha256"] = sha256
103
+
104
+ return payload
@@ -0,0 +1,92 @@
1
+ """Validation utilities for CI/CD distribution triggers."""
2
+
3
+ import re
4
+ from pathlib import Path
5
+
6
+
7
+ def validate_semver(version: str) -> bool:
8
+ """
9
+ Validate semantic versioning format.
10
+
11
+ Args:
12
+ version: Version string to validate
13
+
14
+ Returns:
15
+ True if valid semver, False otherwise
16
+
17
+ Examples:
18
+ >>> validate_semver("0.0.1")
19
+ True
20
+ >>> validate_semver("0.0.1-rc1")
21
+ True
22
+ >>> validate_semver("v0.0.1")
23
+ False
24
+ >>> validate_semver("1.2")
25
+ False
26
+ """
27
+ # Strict semver: MAJOR.MINOR.PATCH[-PRERELEASE][+BUILD]
28
+ pattern = r"^\d+\.\d+\.\d+(-[a-zA-Z0-9]+(\.[a-zA-Z0-9]+)*)?(\+[a-zA-Z0-9]+(\.[a-zA-Z0-9]+)*)?$"
29
+ return bool(re.match(pattern, version))
30
+
31
+
32
+ def verify_checksum(file_path: Path, expected_sha256: str) -> bool:
33
+ """
34
+ Verify file SHA256 checksum matches expected value.
35
+
36
+ Args:
37
+ file_path: Path to file to verify
38
+ expected_sha256: Expected SHA256 hex digest
39
+
40
+ Returns:
41
+ True if checksums match, False otherwise
42
+
43
+ Raises:
44
+ FileNotFoundError: If file doesn't exist
45
+ """
46
+ from kekkai_core.ci.metadata import calculate_sha256
47
+
48
+ actual_sha256 = calculate_sha256(file_path)
49
+ return actual_sha256.lower() == expected_sha256.lower()
50
+
51
+
52
+ def validate_repo_format(repo: str) -> bool:
53
+ """
54
+ Validate GitHub repository format.
55
+
56
+ Args:
57
+ repo: Repository string (e.g., "kademoslabs/kekkai")
58
+
59
+ Returns:
60
+ True if valid format, False otherwise
61
+
62
+ Examples:
63
+ >>> validate_repo_format("kademoslabs/kekkai")
64
+ True
65
+ >>> validate_repo_format("kademoslabs")
66
+ False
67
+ >>> validate_repo_format("kademoslabs/kekkai/extra")
68
+ False
69
+ """
70
+ pattern = r"^[a-zA-Z0-9_-]+/[a-zA-Z0-9_-]+$"
71
+ return bool(re.match(pattern, repo))
72
+
73
+
74
+ def validate_github_token(token: str) -> bool:
75
+ """
76
+ Validate GitHub token format (basic check).
77
+
78
+ Args:
79
+ token: GitHub token string
80
+
81
+ Returns:
82
+ True if format looks valid, False otherwise
83
+
84
+ Note:
85
+ This only validates format, not token validity or permissions.
86
+ """
87
+ if not token:
88
+ return False
89
+
90
+ # GitHub personal access tokens are typically 40+ characters
91
+ # Classic tokens start with ghp_, fine-grained with github_pat_
92
+ return not len(token) < 20
@@ -0,0 +1,17 @@
1
+ """Docker security utilities for scanning, signing, and SBOM generation."""
2
+
3
+ from kekkai_core.docker.metadata import extract_image_metadata, parse_manifest
4
+ from kekkai_core.docker.sbom import generate_sbom, validate_sbom_format
5
+ from kekkai_core.docker.security import filter_vulnerabilities, run_trivy_scan
6
+ from kekkai_core.docker.signing import sign_image, verify_signature
7
+
8
+ __all__ = [
9
+ "run_trivy_scan",
10
+ "filter_vulnerabilities",
11
+ "sign_image",
12
+ "verify_signature",
13
+ "generate_sbom",
14
+ "validate_sbom_format",
15
+ "extract_image_metadata",
16
+ "parse_manifest",
17
+ ]
@@ -0,0 +1,153 @@
1
+ """Docker image metadata extraction and validation."""
2
+
3
+ import json
4
+ import subprocess
5
+ from typing import Any
6
+
7
+
8
+ class DockerMetadataError(Exception):
9
+ """Raised when metadata extraction fails."""
10
+
11
+
12
+ def extract_image_metadata(image: str) -> dict[str, Any]:
13
+ """
14
+ Extract metadata from Docker image.
15
+
16
+ Args:
17
+ image: Docker image (e.g., 'kademoslabs/kekkai:latest')
18
+
19
+ Returns:
20
+ Image metadata as dictionary
21
+
22
+ Raises:
23
+ DockerMetadataError: If extraction fails
24
+ """
25
+ cmd = ["docker", "inspect", image]
26
+
27
+ try:
28
+ result = subprocess.run(
29
+ cmd,
30
+ capture_output=True,
31
+ text=True,
32
+ check=True,
33
+ timeout=30,
34
+ )
35
+
36
+ metadata = json.loads(result.stdout)
37
+
38
+ if not metadata or not isinstance(metadata, list):
39
+ raise DockerMetadataError(f"Invalid metadata format for image: {image}")
40
+
41
+ return metadata[0] if metadata else {}
42
+
43
+ except subprocess.CalledProcessError as e:
44
+ raise DockerMetadataError(f"Failed to extract metadata: {e.stderr}") from e
45
+ except subprocess.TimeoutExpired as e:
46
+ raise DockerMetadataError("Metadata extraction timed out after 30s") from e
47
+ except json.JSONDecodeError as e:
48
+ raise DockerMetadataError(f"Failed to parse metadata: {e}") from e
49
+
50
+
51
+ def get_oci_labels(metadata: dict[str, Any]) -> dict[str, str]:
52
+ """
53
+ Extract OCI labels from image metadata.
54
+
55
+ Args:
56
+ metadata: Image metadata dictionary
57
+
58
+ Returns:
59
+ Dictionary of OCI labels
60
+ """
61
+ config = metadata.get("Config", {})
62
+ labels = config.get("Labels") or {}
63
+
64
+ # Filter for OCI labels (org.opencontainers.image.*)
65
+ oci_labels = {
66
+ key: value for key, value in labels.items() if key.startswith("org.opencontainers.image.")
67
+ }
68
+
69
+ return oci_labels
70
+
71
+
72
+ def parse_manifest(image: str) -> dict[str, Any]:
73
+ """
74
+ Parse Docker image manifest.
75
+
76
+ Args:
77
+ image: Docker image (e.g., 'kademoslabs/kekkai:latest')
78
+
79
+ Returns:
80
+ Manifest as dictionary
81
+
82
+ Raises:
83
+ DockerMetadataError: If parsing fails
84
+ """
85
+ cmd = ["docker", "manifest", "inspect", image]
86
+
87
+ try:
88
+ result = subprocess.run(
89
+ cmd,
90
+ capture_output=True,
91
+ text=True,
92
+ check=True,
93
+ timeout=30,
94
+ )
95
+
96
+ manifest: dict[str, Any] = json.loads(result.stdout)
97
+ return manifest
98
+
99
+ except subprocess.CalledProcessError as e:
100
+ raise DockerMetadataError(f"Failed to parse manifest: {e.stderr}") from e
101
+ except subprocess.TimeoutExpired as e:
102
+ raise DockerMetadataError("Manifest parsing timed out after 30s") from e
103
+ except json.JSONDecodeError as e:
104
+ raise DockerMetadataError(f"Failed to parse manifest JSON: {e}") from e
105
+
106
+
107
+ def get_supported_architectures(manifest: dict[str, Any]) -> list[str]:
108
+ """
109
+ Extract supported architectures from manifest.
110
+
111
+ Args:
112
+ manifest: Image manifest dictionary
113
+
114
+ Returns:
115
+ List of supported architectures (e.g., ['amd64', 'arm64'])
116
+ """
117
+ architectures: list[str] = []
118
+
119
+ # Multi-arch manifests have a "manifests" array
120
+ manifests = manifest.get("manifests", [])
121
+
122
+ if manifests:
123
+ for m in manifests:
124
+ platform = m.get("platform", {})
125
+ arch = platform.get("architecture", "")
126
+ if arch:
127
+ architectures.append(arch)
128
+ else:
129
+ # Single-arch image
130
+ platform = manifest.get("platform", {})
131
+ arch = platform.get("architecture", "")
132
+ if arch:
133
+ architectures.append(arch)
134
+
135
+ return architectures
136
+
137
+
138
+ def verify_multi_arch_support(
139
+ manifest: dict[str, Any],
140
+ required_archs: list[str],
141
+ ) -> bool:
142
+ """
143
+ Verify image supports required architectures.
144
+
145
+ Args:
146
+ manifest: Image manifest dictionary
147
+ required_archs: List of required architectures
148
+
149
+ Returns:
150
+ True if all required architectures are supported
151
+ """
152
+ supported = get_supported_architectures(manifest)
153
+ return all(arch in supported for arch in required_archs)
@@ -0,0 +1,173 @@
1
+ """SBOM (Software Bill of Materials) generation for Docker images."""
2
+
3
+ import json
4
+ import subprocess
5
+ from pathlib import Path
6
+ from typing import Any, Literal
7
+
8
+ SBOMFormat = Literal["spdx", "spdx-json", "cyclonedx", "cyclonedx-json"]
9
+
10
+
11
+ class SBOMError(Exception):
12
+ """Raised when SBOM generation fails."""
13
+
14
+
15
+ def generate_sbom(
16
+ image: str,
17
+ output_format: SBOMFormat = "spdx-json",
18
+ output_file: Path | None = None,
19
+ ) -> dict[str, Any]:
20
+ """
21
+ Generate SBOM for Docker image using Trivy.
22
+
23
+ Args:
24
+ image: Docker image to analyze (e.g., 'kademoslabs/kekkai:latest')
25
+ output_format: SBOM format (spdx-json, cyclonedx-json, etc.)
26
+ output_file: Path to write SBOM (optional)
27
+
28
+ Returns:
29
+ SBOM as dictionary
30
+
31
+ Raises:
32
+ SBOMError: If SBOM generation fails
33
+ """
34
+ # Map our format to Trivy's format argument
35
+ format_map = {
36
+ "spdx": "spdx",
37
+ "spdx-json": "spdx-json",
38
+ "cyclonedx": "cyclonedx",
39
+ "cyclonedx-json": "cyclonedx-json",
40
+ }
41
+
42
+ trivy_format = format_map.get(output_format, "spdx-json")
43
+
44
+ cmd = ["trivy", "image", "--format", trivy_format]
45
+
46
+ if output_file:
47
+ cmd.extend(["--output", str(output_file)])
48
+
49
+ cmd.append(image)
50
+
51
+ try:
52
+ result = subprocess.run(
53
+ cmd,
54
+ capture_output=True,
55
+ text=True,
56
+ check=True,
57
+ timeout=300,
58
+ )
59
+
60
+ # Parse JSON output
61
+ if output_format.endswith("-json"):
62
+ return json.loads(result.stdout) if result.stdout else {}
63
+ else:
64
+ # For non-JSON formats, return raw output
65
+ return {"sbom": result.stdout}
66
+
67
+ except subprocess.CalledProcessError as e:
68
+ raise SBOMError(f"SBOM generation failed: {e.stderr}") from e
69
+ except subprocess.TimeoutExpired as e:
70
+ raise SBOMError("SBOM generation timed out after 300s") from e
71
+ except json.JSONDecodeError as e:
72
+ raise SBOMError(f"Failed to parse SBOM output: {e}") from e
73
+
74
+
75
+ def validate_sbom_format(sbom_data: dict[str, Any], expected_format: SBOMFormat) -> bool:
76
+ """
77
+ Validate SBOM data structure matches expected format.
78
+
79
+ Args:
80
+ sbom_data: SBOM dictionary
81
+ expected_format: Expected SBOM format
82
+
83
+ Returns:
84
+ True if SBOM structure is valid
85
+ """
86
+ if expected_format == "spdx-json":
87
+ # SPDX must have these fields
88
+ required_fields = ["spdxVersion", "dataLicense", "name", "documentNamespace"]
89
+ return all(field in sbom_data for field in required_fields)
90
+
91
+ elif expected_format == "cyclonedx-json":
92
+ # CycloneDX must have these fields
93
+ required_fields = ["bomFormat", "specVersion", "version"]
94
+ return all(field in sbom_data for field in required_fields)
95
+
96
+ return False
97
+
98
+
99
+ def extract_dependencies(sbom_data: dict[str, Any], sbom_format: SBOMFormat) -> list[str]:
100
+ """
101
+ Extract package dependencies from SBOM.
102
+
103
+ Args:
104
+ sbom_data: SBOM dictionary
105
+ sbom_format: SBOM format
106
+
107
+ Returns:
108
+ List of package names
109
+ """
110
+ dependencies: list[str] = []
111
+
112
+ if sbom_format == "spdx-json":
113
+ # SPDX packages
114
+ packages = sbom_data.get("packages", [])
115
+ for pkg in packages:
116
+ name = pkg.get("name", "")
117
+ if name:
118
+ dependencies.append(name)
119
+
120
+ elif sbom_format == "cyclonedx-json":
121
+ # CycloneDX components
122
+ components = sbom_data.get("components", [])
123
+ for comp in components:
124
+ name = comp.get("name", "")
125
+ if name:
126
+ dependencies.append(name)
127
+
128
+ return dependencies
129
+
130
+
131
+ def attach_sbom_to_image(
132
+ image: str,
133
+ sbom_file: Path,
134
+ ) -> bool:
135
+ """
136
+ Attach SBOM to Docker image using Cosign.
137
+
138
+ Args:
139
+ image: Docker image (e.g., 'kademoslabs/kekkai:latest')
140
+ sbom_file: Path to SBOM file
141
+
142
+ Returns:
143
+ True if attachment succeeded
144
+
145
+ Raises:
146
+ SBOMError: If attachment fails
147
+ """
148
+ if not sbom_file.exists():
149
+ raise SBOMError(f"SBOM file not found: {sbom_file}")
150
+
151
+ cmd = [
152
+ "cosign",
153
+ "attach",
154
+ "sbom",
155
+ "--sbom",
156
+ str(sbom_file),
157
+ image,
158
+ ]
159
+
160
+ try:
161
+ result = subprocess.run(
162
+ cmd,
163
+ capture_output=True,
164
+ text=True,
165
+ check=True,
166
+ timeout=120,
167
+ )
168
+ return result.returncode == 0
169
+
170
+ except subprocess.CalledProcessError as e:
171
+ raise SBOMError(f"SBOM attachment failed: {e.stderr}") from e
172
+ except subprocess.TimeoutExpired as e:
173
+ raise SBOMError("SBOM attachment timed out after 120s") from e
@@ -0,0 +1,158 @@
1
+ """Docker image security scanning with Trivy."""
2
+
3
+ import json
4
+ import subprocess
5
+ from pathlib import Path
6
+ from typing import Any, Literal, cast
7
+
8
+ SeverityLevel = Literal["CRITICAL", "HIGH", "MEDIUM", "LOW", "UNKNOWN"]
9
+
10
+
11
+ class TrivyScanError(Exception):
12
+ """Raised when Trivy scan fails."""
13
+
14
+
15
+ def run_trivy_scan(
16
+ image: str,
17
+ output_format: Literal["json", "sarif", "table"] = "json",
18
+ severity: list[SeverityLevel] | None = None,
19
+ output_file: Path | None = None,
20
+ ) -> dict[str, Any]:
21
+ """
22
+ Run Trivy security scan on Docker image.
23
+
24
+ Args:
25
+ image: Docker image to scan (e.g., 'kademoslabs/kekkai:latest')
26
+ output_format: Output format (json, sarif, table)
27
+ severity: List of severity levels to include (default: all)
28
+ output_file: Path to write scan results (optional)
29
+
30
+ Returns:
31
+ Scan results as dictionary
32
+
33
+ Raises:
34
+ TrivyScanError: If scan fails
35
+ """
36
+ cmd = ["trivy", "image", "--format", output_format]
37
+
38
+ if severity:
39
+ cmd.extend(["--severity", ",".join(severity)])
40
+
41
+ if output_file:
42
+ cmd.extend(["--output", str(output_file)])
43
+
44
+ cmd.append(image)
45
+
46
+ try:
47
+ result = subprocess.run(
48
+ cmd,
49
+ capture_output=True,
50
+ text=True,
51
+ check=True,
52
+ timeout=300,
53
+ )
54
+
55
+ if output_format == "json" or output_format == "sarif":
56
+ return json.loads(result.stdout) if result.stdout else {}
57
+ else:
58
+ # For table format, return raw output
59
+ return {"output": result.stdout}
60
+
61
+ except subprocess.CalledProcessError as e:
62
+ raise TrivyScanError(f"Trivy scan failed: {e.stderr}") from e
63
+ except subprocess.TimeoutExpired as e:
64
+ raise TrivyScanError("Trivy scan timed out after 300s") from e
65
+ except json.JSONDecodeError as e:
66
+ raise TrivyScanError(f"Failed to parse Trivy output: {e}") from e
67
+ except Exception as e:
68
+ raise TrivyScanError(f"Trivy scan failed: {e}") from e
69
+
70
+
71
+ def filter_vulnerabilities(
72
+ scan_results: dict[str, Any],
73
+ severity_threshold: SeverityLevel = "HIGH",
74
+ ) -> list[dict[str, Any]]:
75
+ """
76
+ Filter vulnerabilities by severity threshold.
77
+
78
+ Args:
79
+ scan_results: Trivy scan results (JSON format)
80
+ severity_threshold: Minimum severity to include
81
+
82
+ Returns:
83
+ List of vulnerabilities meeting threshold
84
+ """
85
+ severity_order: dict[SeverityLevel, int] = {
86
+ "CRITICAL": 4,
87
+ "HIGH": 3,
88
+ "MEDIUM": 2,
89
+ "LOW": 1,
90
+ "UNKNOWN": 0,
91
+ }
92
+
93
+ threshold_level = severity_order.get(severity_threshold, 0)
94
+ filtered: list[dict[str, Any]] = []
95
+
96
+ # Trivy JSON format has "Results" array
97
+ results = scan_results.get("Results", [])
98
+
99
+ for result in results:
100
+ vulnerabilities = result.get("Vulnerabilities", [])
101
+ for vuln in vulnerabilities:
102
+ severity = vuln.get("Severity", "UNKNOWN")
103
+ severity_value = severity_order.get(severity, 0)
104
+ if severity_value >= threshold_level:
105
+ filtered.append(vuln)
106
+
107
+ return filtered
108
+
109
+
110
+ def count_vulnerabilities_by_severity(
111
+ scan_results: dict[str, Any],
112
+ ) -> dict[SeverityLevel, int]:
113
+ """
114
+ Count vulnerabilities by severity level.
115
+
116
+ Args:
117
+ scan_results: Trivy scan results (JSON format)
118
+
119
+ Returns:
120
+ Dictionary mapping severity to count
121
+ """
122
+ counts: dict[SeverityLevel, int] = {
123
+ "CRITICAL": 0,
124
+ "HIGH": 0,
125
+ "MEDIUM": 0,
126
+ "LOW": 0,
127
+ "UNKNOWN": 0,
128
+ }
129
+
130
+ results = scan_results.get("Results", [])
131
+
132
+ for result in results:
133
+ vulnerabilities = result.get("Vulnerabilities", [])
134
+ for vuln in vulnerabilities:
135
+ severity = vuln.get("Severity", "UNKNOWN")
136
+ if severity in counts:
137
+ severity_key = cast(SeverityLevel, severity)
138
+ counts[severity_key] += 1
139
+
140
+ return counts
141
+
142
+
143
+ def has_critical_vulnerabilities(
144
+ scan_results: dict[str, Any],
145
+ severity_threshold: SeverityLevel = "HIGH",
146
+ ) -> bool:
147
+ """
148
+ Check if scan results contain vulnerabilities at or above threshold.
149
+
150
+ Args:
151
+ scan_results: Trivy scan results (JSON format)
152
+ severity_threshold: Minimum severity to check
153
+
154
+ Returns:
155
+ True if vulnerabilities found at or above threshold
156
+ """
157
+ filtered = filter_vulnerabilities(scan_results, severity_threshold)
158
+ return len(filtered) > 0