kekkai-cli 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kekkai/__init__.py +7 -0
- kekkai/cli.py +1038 -0
- kekkai/config.py +403 -0
- kekkai/dojo.py +419 -0
- kekkai/dojo_import.py +213 -0
- kekkai/github/__init__.py +16 -0
- kekkai/github/commenter.py +198 -0
- kekkai/github/models.py +56 -0
- kekkai/github/sanitizer.py +112 -0
- kekkai/installer/__init__.py +39 -0
- kekkai/installer/errors.py +23 -0
- kekkai/installer/extract.py +161 -0
- kekkai/installer/manager.py +252 -0
- kekkai/installer/manifest.py +189 -0
- kekkai/installer/verify.py +86 -0
- kekkai/manifest.py +77 -0
- kekkai/output.py +218 -0
- kekkai/paths.py +46 -0
- kekkai/policy.py +326 -0
- kekkai/runner.py +70 -0
- kekkai/scanners/__init__.py +67 -0
- kekkai/scanners/backends/__init__.py +14 -0
- kekkai/scanners/backends/base.py +73 -0
- kekkai/scanners/backends/docker.py +178 -0
- kekkai/scanners/backends/native.py +240 -0
- kekkai/scanners/base.py +110 -0
- kekkai/scanners/container.py +144 -0
- kekkai/scanners/falco.py +237 -0
- kekkai/scanners/gitleaks.py +237 -0
- kekkai/scanners/semgrep.py +227 -0
- kekkai/scanners/trivy.py +246 -0
- kekkai/scanners/url_policy.py +163 -0
- kekkai/scanners/zap.py +340 -0
- kekkai/threatflow/__init__.py +94 -0
- kekkai/threatflow/artifacts.py +476 -0
- kekkai/threatflow/chunking.py +361 -0
- kekkai/threatflow/core.py +438 -0
- kekkai/threatflow/mermaid.py +374 -0
- kekkai/threatflow/model_adapter.py +491 -0
- kekkai/threatflow/prompts.py +277 -0
- kekkai/threatflow/redaction.py +228 -0
- kekkai/threatflow/sanitizer.py +643 -0
- kekkai/triage/__init__.py +33 -0
- kekkai/triage/app.py +168 -0
- kekkai/triage/audit.py +203 -0
- kekkai/triage/ignore.py +269 -0
- kekkai/triage/models.py +185 -0
- kekkai/triage/screens.py +341 -0
- kekkai/triage/widgets.py +169 -0
- kekkai_cli-1.0.0.dist-info/METADATA +135 -0
- kekkai_cli-1.0.0.dist-info/RECORD +90 -0
- kekkai_cli-1.0.0.dist-info/WHEEL +5 -0
- kekkai_cli-1.0.0.dist-info/entry_points.txt +3 -0
- kekkai_cli-1.0.0.dist-info/top_level.txt +3 -0
- kekkai_core/__init__.py +3 -0
- kekkai_core/ci/__init__.py +11 -0
- kekkai_core/ci/benchmarks.py +354 -0
- kekkai_core/ci/metadata.py +104 -0
- kekkai_core/ci/validators.py +92 -0
- kekkai_core/docker/__init__.py +17 -0
- kekkai_core/docker/metadata.py +153 -0
- kekkai_core/docker/sbom.py +173 -0
- kekkai_core/docker/security.py +158 -0
- kekkai_core/docker/signing.py +135 -0
- kekkai_core/redaction.py +84 -0
- kekkai_core/slsa/__init__.py +13 -0
- kekkai_core/slsa/verify.py +121 -0
- kekkai_core/windows/__init__.py +29 -0
- kekkai_core/windows/chocolatey.py +335 -0
- kekkai_core/windows/installer.py +256 -0
- kekkai_core/windows/scoop.py +165 -0
- kekkai_core/windows/validators.py +220 -0
- portal/__init__.py +19 -0
- portal/api.py +155 -0
- portal/auth.py +103 -0
- portal/enterprise/__init__.py +32 -0
- portal/enterprise/audit.py +435 -0
- portal/enterprise/licensing.py +342 -0
- portal/enterprise/rbac.py +276 -0
- portal/enterprise/saml.py +595 -0
- portal/ops/__init__.py +53 -0
- portal/ops/backup.py +553 -0
- portal/ops/log_shipper.py +469 -0
- portal/ops/monitoring.py +517 -0
- portal/ops/restore.py +469 -0
- portal/ops/secrets.py +408 -0
- portal/ops/upgrade.py +591 -0
- portal/tenants.py +340 -0
- portal/uploads.py +259 -0
- portal/web.py +384 -0
|
@@ -0,0 +1,165 @@
|
|
|
1
|
+
"""Scoop manifest generation and validation for Windows distribution."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import re
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def generate_scoop_manifest(
|
|
9
|
+
version: str,
|
|
10
|
+
sha256: str,
|
|
11
|
+
whl_url: str,
|
|
12
|
+
python_version: str = "3.12",
|
|
13
|
+
) -> dict[str, Any]:
|
|
14
|
+
"""
|
|
15
|
+
Generate Scoop manifest for Kekkai package.
|
|
16
|
+
|
|
17
|
+
Args:
|
|
18
|
+
version: Package version (e.g., "0.0.1")
|
|
19
|
+
sha256: SHA256 checksum of the wheel file
|
|
20
|
+
whl_url: URL to wheel file (typically GitHub release)
|
|
21
|
+
python_version: Minimum Python version required
|
|
22
|
+
|
|
23
|
+
Returns:
|
|
24
|
+
Scoop manifest as dictionary
|
|
25
|
+
|
|
26
|
+
Raises:
|
|
27
|
+
ValueError: If version format is invalid or URLs are not HTTPS
|
|
28
|
+
"""
|
|
29
|
+
# Validate version format (basic semver)
|
|
30
|
+
if not re.match(r"^\d+\.\d+\.\d+(-[a-zA-Z0-9]+(\.[a-zA-Z0-9]+)*)?$", version):
|
|
31
|
+
raise ValueError(f"Invalid version format: {version}")
|
|
32
|
+
|
|
33
|
+
# Validate HTTPS URLs only
|
|
34
|
+
if not whl_url.startswith("https://"):
|
|
35
|
+
raise ValueError(f"URL must use HTTPS: {whl_url}")
|
|
36
|
+
|
|
37
|
+
# Validate SHA256 format (64 hex characters)
|
|
38
|
+
if not re.match(r"^[a-fA-F0-9]{64}$", sha256):
|
|
39
|
+
raise ValueError(f"Invalid SHA256 format: {sha256}")
|
|
40
|
+
|
|
41
|
+
manifest: dict[str, Any] = {
|
|
42
|
+
"version": version,
|
|
43
|
+
"description": "Kekkai - Local-first AppSec orchestration and compliance checker",
|
|
44
|
+
"homepage": "https://github.com/kademoslabs/kekkai",
|
|
45
|
+
"license": "MIT",
|
|
46
|
+
"depends": "python",
|
|
47
|
+
"url": whl_url,
|
|
48
|
+
"hash": sha256,
|
|
49
|
+
"installer": {
|
|
50
|
+
"script": [
|
|
51
|
+
"# Validate Python version",
|
|
52
|
+
(
|
|
53
|
+
"$pythonVersion = python --version 2>&1 | "
|
|
54
|
+
'Select-String -Pattern "Python (\\d+\\.\\d+)"'
|
|
55
|
+
),
|
|
56
|
+
"$version = [version]$pythonVersion.Matches.Groups[1].Value",
|
|
57
|
+
f'if ($version -lt [version]"{python_version}") {{',
|
|
58
|
+
f' Write-Error "Python {python_version}+ required, found $version"',
|
|
59
|
+
" exit 1",
|
|
60
|
+
"}",
|
|
61
|
+
"",
|
|
62
|
+
"# Install Kekkai wheel",
|
|
63
|
+
f'python -m pip install --force-reinstall --no-deps "{whl_url}"',
|
|
64
|
+
"if ($LASTEXITCODE -ne 0) {",
|
|
65
|
+
' Write-Error "pip install failed"',
|
|
66
|
+
" exit 1",
|
|
67
|
+
"}",
|
|
68
|
+
]
|
|
69
|
+
},
|
|
70
|
+
"uninstaller": {
|
|
71
|
+
"script": [
|
|
72
|
+
"python -m pip uninstall -y kekkai",
|
|
73
|
+
]
|
|
74
|
+
},
|
|
75
|
+
"checkver": {
|
|
76
|
+
"github": "https://github.com/kademoslabs/kekkai",
|
|
77
|
+
},
|
|
78
|
+
"autoupdate": {
|
|
79
|
+
"url": "https://github.com/kademoslabs/kekkai/releases/download/v$version/kekkai-$version-py3-none-any.whl",
|
|
80
|
+
},
|
|
81
|
+
"notes": [
|
|
82
|
+
"Kekkai has been installed successfully!",
|
|
83
|
+
"Run 'kekkai --help' to get started.",
|
|
84
|
+
"For documentation, visit: https://github.com/kademoslabs/kekkai",
|
|
85
|
+
],
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
return manifest
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
def validate_scoop_manifest(manifest: dict[str, Any]) -> bool:
|
|
92
|
+
"""
|
|
93
|
+
Validate Scoop manifest structure and required fields.
|
|
94
|
+
|
|
95
|
+
Args:
|
|
96
|
+
manifest: Scoop manifest dictionary
|
|
97
|
+
|
|
98
|
+
Returns:
|
|
99
|
+
True if manifest is valid
|
|
100
|
+
|
|
101
|
+
Raises:
|
|
102
|
+
ValueError: If manifest is invalid with detailed error message
|
|
103
|
+
"""
|
|
104
|
+
# Required fields
|
|
105
|
+
required_fields = ["version", "description", "homepage", "license", "url", "hash"]
|
|
106
|
+
|
|
107
|
+
for field in required_fields:
|
|
108
|
+
if field not in manifest:
|
|
109
|
+
raise ValueError(f"Missing required field: {field}")
|
|
110
|
+
|
|
111
|
+
# Validate version format
|
|
112
|
+
version = manifest["version"]
|
|
113
|
+
if not re.match(r"^\d+\.\d+\.\d+(-[a-zA-Z0-9]+(\.[a-zA-Z0-9]+)*)?$", version):
|
|
114
|
+
raise ValueError(f"Invalid version format: {version}")
|
|
115
|
+
|
|
116
|
+
# Validate URL is HTTPS
|
|
117
|
+
url = manifest["url"]
|
|
118
|
+
if not url.startswith("https://"):
|
|
119
|
+
raise ValueError(f"URL must use HTTPS: {url}")
|
|
120
|
+
|
|
121
|
+
# Validate SHA256 format
|
|
122
|
+
sha256 = manifest["hash"]
|
|
123
|
+
if not re.match(r"^[a-fA-F0-9]{64}$", sha256):
|
|
124
|
+
raise ValueError(f"Invalid SHA256 format: {sha256}")
|
|
125
|
+
|
|
126
|
+
# Validate installer/uninstaller structure
|
|
127
|
+
if "installer" in manifest and "script" not in manifest["installer"]:
|
|
128
|
+
raise ValueError("installer must contain 'script' field")
|
|
129
|
+
|
|
130
|
+
if "uninstaller" in manifest and "script" not in manifest["uninstaller"]:
|
|
131
|
+
raise ValueError("uninstaller must contain 'script' field")
|
|
132
|
+
|
|
133
|
+
# Validate homepage URL
|
|
134
|
+
homepage = manifest["homepage"]
|
|
135
|
+
if not homepage.startswith("https://") and not homepage.startswith("http://"):
|
|
136
|
+
raise ValueError(f"Invalid homepage URL: {homepage}")
|
|
137
|
+
|
|
138
|
+
return True
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
def format_scoop_manifest_json(manifest: dict[str, Any]) -> str:
|
|
142
|
+
"""
|
|
143
|
+
Format Scoop manifest as pretty-printed JSON.
|
|
144
|
+
|
|
145
|
+
Args:
|
|
146
|
+
manifest: Scoop manifest dictionary
|
|
147
|
+
|
|
148
|
+
Returns:
|
|
149
|
+
JSON string with 2-space indentation
|
|
150
|
+
"""
|
|
151
|
+
return json.dumps(manifest, indent=2, ensure_ascii=False)
|
|
152
|
+
|
|
153
|
+
|
|
154
|
+
def generate_scoop_checksum_file(version: str, sha256: str) -> str:
|
|
155
|
+
"""
|
|
156
|
+
Generate checksums.txt file for Scoop verification.
|
|
157
|
+
|
|
158
|
+
Args:
|
|
159
|
+
version: Package version
|
|
160
|
+
sha256: SHA256 checksum
|
|
161
|
+
|
|
162
|
+
Returns:
|
|
163
|
+
Formatted checksum file content
|
|
164
|
+
"""
|
|
165
|
+
return f"kekkai-{version}-py3-none-any.whl: {sha256}\n"
|
|
@@ -0,0 +1,220 @@
|
|
|
1
|
+
"""Windows-specific validation utilities."""
|
|
2
|
+
|
|
3
|
+
import re
|
|
4
|
+
import subprocess
|
|
5
|
+
import sys
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def validate_python_version(
|
|
10
|
+
required_version: str = "3.12",
|
|
11
|
+
) -> tuple[bool, str]:
|
|
12
|
+
"""
|
|
13
|
+
Validate that Python version meets minimum requirement.
|
|
14
|
+
|
|
15
|
+
Args:
|
|
16
|
+
required_version: Minimum Python version (e.g., "3.12")
|
|
17
|
+
|
|
18
|
+
Returns:
|
|
19
|
+
Tuple of (is_valid, message)
|
|
20
|
+
"""
|
|
21
|
+
try:
|
|
22
|
+
# Get current Python version
|
|
23
|
+
version_info = sys.version_info
|
|
24
|
+
current_version = f"{version_info.major}.{version_info.minor}"
|
|
25
|
+
|
|
26
|
+
# Parse required version
|
|
27
|
+
req_parts = required_version.split(".")
|
|
28
|
+
if len(req_parts) < 2:
|
|
29
|
+
return False, f"Invalid required version format: {required_version}"
|
|
30
|
+
|
|
31
|
+
req_major = int(req_parts[0])
|
|
32
|
+
req_minor = int(req_parts[1])
|
|
33
|
+
|
|
34
|
+
# Compare versions
|
|
35
|
+
if version_info.major > req_major or (
|
|
36
|
+
version_info.major == req_major and version_info.minor >= req_minor
|
|
37
|
+
):
|
|
38
|
+
return True, f"Python {current_version} meets requirement >= {required_version}"
|
|
39
|
+
else:
|
|
40
|
+
return (
|
|
41
|
+
False,
|
|
42
|
+
f"Python {required_version}+ required, found {current_version}",
|
|
43
|
+
)
|
|
44
|
+
|
|
45
|
+
except (ValueError, IndexError) as e:
|
|
46
|
+
return False, f"Failed to validate Python version: {e}"
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def validate_windows_path(executable: str) -> tuple[bool, str | None]:
|
|
50
|
+
"""
|
|
51
|
+
Validate that an executable is in Windows PATH.
|
|
52
|
+
|
|
53
|
+
Args:
|
|
54
|
+
executable: Executable name (e.g., "python", "docker")
|
|
55
|
+
|
|
56
|
+
Returns:
|
|
57
|
+
Tuple of (is_found, path_or_none)
|
|
58
|
+
"""
|
|
59
|
+
try:
|
|
60
|
+
# For non-Windows systems, use 'which' or 'where' based on platform
|
|
61
|
+
if sys.platform.startswith("win"):
|
|
62
|
+
cmd = ["where", executable]
|
|
63
|
+
else:
|
|
64
|
+
cmd = ["which", executable]
|
|
65
|
+
|
|
66
|
+
result = subprocess.run( # noqa: S603
|
|
67
|
+
cmd,
|
|
68
|
+
capture_output=True,
|
|
69
|
+
text=True,
|
|
70
|
+
check=False,
|
|
71
|
+
timeout=5,
|
|
72
|
+
)
|
|
73
|
+
|
|
74
|
+
if result.returncode == 0 and result.stdout.strip():
|
|
75
|
+
path = result.stdout.strip().split("\n")[0] # Take first match
|
|
76
|
+
return True, path
|
|
77
|
+
else:
|
|
78
|
+
return False, None
|
|
79
|
+
|
|
80
|
+
except (subprocess.TimeoutExpired, FileNotFoundError):
|
|
81
|
+
return False, None
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def validate_pip_available() -> tuple[bool, str]:
|
|
85
|
+
"""
|
|
86
|
+
Validate that pip is available via python -m pip.
|
|
87
|
+
|
|
88
|
+
Returns:
|
|
89
|
+
Tuple of (is_available, message)
|
|
90
|
+
"""
|
|
91
|
+
try:
|
|
92
|
+
result = subprocess.run( # noqa: S603
|
|
93
|
+
[sys.executable, "-m", "pip", "--version"],
|
|
94
|
+
capture_output=True,
|
|
95
|
+
text=True,
|
|
96
|
+
check=False,
|
|
97
|
+
timeout=10,
|
|
98
|
+
)
|
|
99
|
+
|
|
100
|
+
if result.returncode == 0:
|
|
101
|
+
version_line = result.stdout.strip()
|
|
102
|
+
return True, f"pip is available: {version_line}"
|
|
103
|
+
else:
|
|
104
|
+
return False, "pip is not available or not working correctly"
|
|
105
|
+
|
|
106
|
+
except (subprocess.TimeoutExpired, FileNotFoundError) as e:
|
|
107
|
+
return False, f"Failed to check pip: {e}"
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
def validate_scoop_format(manifest_path: Path) -> tuple[bool, list[str]]:
|
|
111
|
+
"""
|
|
112
|
+
Validate Scoop manifest file format and structure.
|
|
113
|
+
|
|
114
|
+
Args:
|
|
115
|
+
manifest_path: Path to Scoop manifest JSON file
|
|
116
|
+
|
|
117
|
+
Returns:
|
|
118
|
+
Tuple of (is_valid, list_of_errors)
|
|
119
|
+
"""
|
|
120
|
+
import json
|
|
121
|
+
|
|
122
|
+
errors: list[str] = []
|
|
123
|
+
|
|
124
|
+
# Check file exists
|
|
125
|
+
if not manifest_path.exists():
|
|
126
|
+
errors.append(f"Manifest file not found: {manifest_path}")
|
|
127
|
+
return False, errors
|
|
128
|
+
|
|
129
|
+
# Check file is readable
|
|
130
|
+
try:
|
|
131
|
+
content = manifest_path.read_text(encoding="utf-8")
|
|
132
|
+
except Exception as e:
|
|
133
|
+
errors.append(f"Failed to read manifest: {e}")
|
|
134
|
+
return False, errors
|
|
135
|
+
|
|
136
|
+
# Check valid JSON
|
|
137
|
+
try:
|
|
138
|
+
manifest = json.loads(content)
|
|
139
|
+
except json.JSONDecodeError as e:
|
|
140
|
+
errors.append(f"Invalid JSON: {e}")
|
|
141
|
+
return False, errors
|
|
142
|
+
|
|
143
|
+
# Validate required fields
|
|
144
|
+
required_fields = ["version", "description", "homepage", "license", "url", "hash"]
|
|
145
|
+
for field in required_fields:
|
|
146
|
+
if field not in manifest:
|
|
147
|
+
errors.append(f"Missing required field: {field}")
|
|
148
|
+
|
|
149
|
+
# Validate version format
|
|
150
|
+
if "version" in manifest:
|
|
151
|
+
version = manifest["version"]
|
|
152
|
+
if not re.match(
|
|
153
|
+
r"^\d+\.\d+\.\d+(-[a-zA-Z0-9]+(\.[a-zA-Z0-9]+)*)?$",
|
|
154
|
+
version,
|
|
155
|
+
):
|
|
156
|
+
errors.append(f"Invalid version format: {version}")
|
|
157
|
+
|
|
158
|
+
# Validate URL is HTTPS
|
|
159
|
+
if "url" in manifest:
|
|
160
|
+
url = manifest["url"]
|
|
161
|
+
if not url.startswith("https://"):
|
|
162
|
+
errors.append(f"URL must use HTTPS: {url}")
|
|
163
|
+
|
|
164
|
+
# Validate SHA256 format
|
|
165
|
+
if "hash" in manifest:
|
|
166
|
+
sha256 = manifest["hash"]
|
|
167
|
+
if not re.match(r"^[a-fA-F0-9]{64}$", sha256):
|
|
168
|
+
errors.append(f"Invalid SHA256 format: {sha256}")
|
|
169
|
+
|
|
170
|
+
# Validate installer/uninstaller structure
|
|
171
|
+
if "installer" in manifest and "script" not in manifest["installer"]:
|
|
172
|
+
errors.append("installer must contain 'script' field")
|
|
173
|
+
|
|
174
|
+
if "uninstaller" in manifest and "script" not in manifest["uninstaller"]:
|
|
175
|
+
errors.append("uninstaller must contain 'script' field")
|
|
176
|
+
|
|
177
|
+
return len(errors) == 0, errors
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
def validate_chocolatey_nuspec(nuspec_path: Path) -> tuple[bool, list[str]]:
|
|
181
|
+
"""
|
|
182
|
+
Validate Chocolatey .nuspec file format and structure.
|
|
183
|
+
|
|
184
|
+
Args:
|
|
185
|
+
nuspec_path: Path to .nuspec XML file
|
|
186
|
+
|
|
187
|
+
Returns:
|
|
188
|
+
Tuple of (is_valid, list_of_errors)
|
|
189
|
+
"""
|
|
190
|
+
import xml.etree.ElementTree as ET # nosec B405 - validates local trusted nuspec files
|
|
191
|
+
|
|
192
|
+
errors: list[str] = []
|
|
193
|
+
|
|
194
|
+
# Check file exists
|
|
195
|
+
if not nuspec_path.exists():
|
|
196
|
+
errors.append(f"Nuspec file not found: {nuspec_path}")
|
|
197
|
+
return False, errors
|
|
198
|
+
|
|
199
|
+
# Parse XML
|
|
200
|
+
try:
|
|
201
|
+
tree = ET.parse(nuspec_path) # noqa: S314 # nosec B314 - Local trusted file validation only
|
|
202
|
+
root = tree.getroot()
|
|
203
|
+
except ET.ParseError as e:
|
|
204
|
+
errors.append(f"Invalid XML: {e}")
|
|
205
|
+
return False, errors
|
|
206
|
+
|
|
207
|
+
# Validate required fields (simplified)
|
|
208
|
+
# Note: Full validation would need to handle XML namespaces
|
|
209
|
+
required_fields = ["id", "version", "authors", "description"]
|
|
210
|
+
metadata = root.find("metadata")
|
|
211
|
+
|
|
212
|
+
if metadata is None:
|
|
213
|
+
errors.append("Missing <metadata> element")
|
|
214
|
+
return False, errors
|
|
215
|
+
|
|
216
|
+
for field in required_fields:
|
|
217
|
+
if metadata.find(field) is None:
|
|
218
|
+
errors.append(f"Missing required field: {field}")
|
|
219
|
+
|
|
220
|
+
return len(errors) == 0, errors
|
portal/__init__.py
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
"""Kekkai Hosted Portal - DefectDojo-backed multi-tenant security dashboard."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
__all__ = [
|
|
6
|
+
"AuthMethod",
|
|
7
|
+
"AuthResult",
|
|
8
|
+
"SAMLTenantConfig",
|
|
9
|
+
"Tenant",
|
|
10
|
+
"TenantStore",
|
|
11
|
+
"UploadResult",
|
|
12
|
+
"authenticate_request",
|
|
13
|
+
"process_upload",
|
|
14
|
+
"validate_upload",
|
|
15
|
+
]
|
|
16
|
+
|
|
17
|
+
from .auth import AuthResult, authenticate_request
|
|
18
|
+
from .tenants import AuthMethod, SAMLTenantConfig, Tenant, TenantStore
|
|
19
|
+
from .uploads import UploadResult, process_upload, validate_upload
|
portal/api.py
ADDED
|
@@ -0,0 +1,155 @@
|
|
|
1
|
+
"""Portal API endpoints for programmatic access.
|
|
2
|
+
|
|
3
|
+
Provides REST API endpoints that expose the same data visible in the UI.
|
|
4
|
+
All endpoints require authentication and enforce tenant isolation.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
import json
|
|
10
|
+
import logging
|
|
11
|
+
import os
|
|
12
|
+
from dataclasses import dataclass
|
|
13
|
+
from pathlib import Path
|
|
14
|
+
from typing import Any
|
|
15
|
+
|
|
16
|
+
from .tenants import Tenant
|
|
17
|
+
|
|
18
|
+
logger = logging.getLogger(__name__)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
@dataclass(frozen=True)
|
|
22
|
+
class UploadInfo:
|
|
23
|
+
"""Information about an upload."""
|
|
24
|
+
|
|
25
|
+
upload_id: str
|
|
26
|
+
filename: str
|
|
27
|
+
timestamp: str
|
|
28
|
+
file_hash: str
|
|
29
|
+
size_bytes: int
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
@dataclass(frozen=True)
|
|
33
|
+
class TenantStats:
|
|
34
|
+
"""Statistics for a tenant."""
|
|
35
|
+
|
|
36
|
+
total_uploads: int
|
|
37
|
+
total_size_bytes: int
|
|
38
|
+
last_upload_time: str | None
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def get_tenant_info(tenant: Tenant) -> dict[str, Any]:
|
|
42
|
+
"""Get tenant information for API response.
|
|
43
|
+
|
|
44
|
+
Args:
|
|
45
|
+
tenant: The authenticated tenant
|
|
46
|
+
|
|
47
|
+
Returns:
|
|
48
|
+
Dictionary containing tenant metadata
|
|
49
|
+
"""
|
|
50
|
+
return {
|
|
51
|
+
"id": tenant.id,
|
|
52
|
+
"name": tenant.name,
|
|
53
|
+
"dojo_product_id": tenant.dojo_product_id,
|
|
54
|
+
"dojo_engagement_id": tenant.dojo_engagement_id,
|
|
55
|
+
"enabled": tenant.enabled,
|
|
56
|
+
"max_upload_size_mb": tenant.max_upload_size_mb,
|
|
57
|
+
"auth_method": tenant.auth_method.value,
|
|
58
|
+
"default_role": tenant.default_role,
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def list_uploads(tenant: Tenant, limit: int = 50) -> list[dict[str, Any]]:
|
|
63
|
+
"""List recent uploads for a tenant.
|
|
64
|
+
|
|
65
|
+
Args:
|
|
66
|
+
tenant: The authenticated tenant
|
|
67
|
+
limit: Maximum number of uploads to return
|
|
68
|
+
|
|
69
|
+
Returns:
|
|
70
|
+
List of upload metadata dictionaries
|
|
71
|
+
"""
|
|
72
|
+
upload_dir = Path(os.environ.get("PORTAL_UPLOAD_DIR", "/var/lib/kekkai-portal/uploads"))
|
|
73
|
+
tenant_dir = upload_dir / tenant.id
|
|
74
|
+
|
|
75
|
+
if not tenant_dir.exists():
|
|
76
|
+
return []
|
|
77
|
+
|
|
78
|
+
uploads: list[dict[str, Any]] = []
|
|
79
|
+
|
|
80
|
+
# Get all upload files for this tenant
|
|
81
|
+
try:
|
|
82
|
+
upload_files = sorted(
|
|
83
|
+
tenant_dir.glob("*.json"),
|
|
84
|
+
key=lambda p: p.stat().st_mtime,
|
|
85
|
+
reverse=True,
|
|
86
|
+
)[:limit]
|
|
87
|
+
|
|
88
|
+
for upload_file in upload_files:
|
|
89
|
+
stat = upload_file.stat()
|
|
90
|
+
uploads.append(
|
|
91
|
+
{
|
|
92
|
+
"upload_id": upload_file.stem,
|
|
93
|
+
"filename": upload_file.name,
|
|
94
|
+
"timestamp": str(int(stat.st_mtime)),
|
|
95
|
+
"size_bytes": stat.st_size,
|
|
96
|
+
}
|
|
97
|
+
)
|
|
98
|
+
except (OSError, PermissionError) as e:
|
|
99
|
+
logger.warning("Failed to list uploads for tenant %s: %s", tenant.id, e)
|
|
100
|
+
|
|
101
|
+
return uploads
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
def get_tenant_stats(tenant: Tenant) -> dict[str, Any]:
|
|
105
|
+
"""Get statistics for a tenant.
|
|
106
|
+
|
|
107
|
+
Args:
|
|
108
|
+
tenant: The authenticated tenant
|
|
109
|
+
|
|
110
|
+
Returns:
|
|
111
|
+
Dictionary containing tenant statistics
|
|
112
|
+
"""
|
|
113
|
+
upload_dir = Path(os.environ.get("PORTAL_UPLOAD_DIR", "/var/lib/kekkai-portal/uploads"))
|
|
114
|
+
tenant_dir = upload_dir / tenant.id
|
|
115
|
+
|
|
116
|
+
if not tenant_dir.exists():
|
|
117
|
+
return {
|
|
118
|
+
"total_uploads": 0,
|
|
119
|
+
"total_size_bytes": 0,
|
|
120
|
+
"last_upload_time": None,
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
total_uploads = 0
|
|
124
|
+
total_size_bytes = 0
|
|
125
|
+
last_upload_time: int | None = None
|
|
126
|
+
|
|
127
|
+
try:
|
|
128
|
+
for upload_file in tenant_dir.glob("*.json"):
|
|
129
|
+
stat = upload_file.stat()
|
|
130
|
+
total_uploads += 1
|
|
131
|
+
total_size_bytes += stat.st_size
|
|
132
|
+
|
|
133
|
+
if last_upload_time is None or stat.st_mtime > last_upload_time:
|
|
134
|
+
last_upload_time = int(stat.st_mtime)
|
|
135
|
+
|
|
136
|
+
except (OSError, PermissionError) as e:
|
|
137
|
+
logger.warning("Failed to get stats for tenant %s: %s", tenant.id, e)
|
|
138
|
+
|
|
139
|
+
return {
|
|
140
|
+
"total_uploads": total_uploads,
|
|
141
|
+
"total_size_bytes": total_size_bytes,
|
|
142
|
+
"last_upload_time": str(last_upload_time) if last_upload_time else None,
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
def serialize_api_response(data: dict[str, Any]) -> bytes:
|
|
147
|
+
"""Serialize API response to JSON bytes.
|
|
148
|
+
|
|
149
|
+
Args:
|
|
150
|
+
data: Response data dictionary
|
|
151
|
+
|
|
152
|
+
Returns:
|
|
153
|
+
JSON-encoded bytes
|
|
154
|
+
"""
|
|
155
|
+
return json.dumps(data, indent=2).encode("utf-8")
|
portal/auth.py
ADDED
|
@@ -0,0 +1,103 @@
|
|
|
1
|
+
"""Authentication middleware for portal API.
|
|
2
|
+
|
|
3
|
+
Security controls:
|
|
4
|
+
- ASVS V16.3.2: Log failed authorization attempts
|
|
5
|
+
- Constant-time API key comparison to prevent timing attacks
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
import logging
|
|
11
|
+
import re
|
|
12
|
+
from dataclasses import dataclass
|
|
13
|
+
from typing import TYPE_CHECKING
|
|
14
|
+
|
|
15
|
+
from kekkai_core import redact
|
|
16
|
+
|
|
17
|
+
from .tenants import Tenant, TenantStore
|
|
18
|
+
|
|
19
|
+
if TYPE_CHECKING:
|
|
20
|
+
from collections.abc import Mapping
|
|
21
|
+
|
|
22
|
+
logger = logging.getLogger(__name__)
|
|
23
|
+
|
|
24
|
+
BEARER_PATTERN = re.compile(r"^Bearer\s+(\S+)$", re.IGNORECASE)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
@dataclass(frozen=True)
|
|
28
|
+
class AuthResult:
|
|
29
|
+
"""Result of authentication attempt."""
|
|
30
|
+
|
|
31
|
+
authenticated: bool
|
|
32
|
+
tenant: Tenant | None = None
|
|
33
|
+
error: str | None = None
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def authenticate_request(
|
|
37
|
+
headers: Mapping[str, str],
|
|
38
|
+
tenant_store: TenantStore,
|
|
39
|
+
client_ip: str = "unknown",
|
|
40
|
+
) -> AuthResult:
|
|
41
|
+
"""Authenticate a request using Bearer token.
|
|
42
|
+
|
|
43
|
+
Args:
|
|
44
|
+
headers: Request headers (case-insensitive lookup)
|
|
45
|
+
tenant_store: Tenant storage for API key verification
|
|
46
|
+
client_ip: Client IP for logging failed attempts
|
|
47
|
+
|
|
48
|
+
Returns:
|
|
49
|
+
AuthResult with tenant if authenticated, error otherwise
|
|
50
|
+
"""
|
|
51
|
+
auth_header = _get_header(headers, "Authorization")
|
|
52
|
+
if not auth_header:
|
|
53
|
+
_log_auth_failure(client_ip, "missing_header")
|
|
54
|
+
return AuthResult(authenticated=False, error="Missing Authorization header")
|
|
55
|
+
|
|
56
|
+
match = BEARER_PATTERN.match(auth_header)
|
|
57
|
+
if not match:
|
|
58
|
+
_log_auth_failure(client_ip, "invalid_format")
|
|
59
|
+
return AuthResult(authenticated=False, error="Invalid Authorization format")
|
|
60
|
+
|
|
61
|
+
api_key = match.group(1)
|
|
62
|
+
if not api_key:
|
|
63
|
+
_log_auth_failure(client_ip, "empty_token")
|
|
64
|
+
return AuthResult(authenticated=False, error="Empty API token")
|
|
65
|
+
|
|
66
|
+
tenant = tenant_store.get_by_api_key(api_key)
|
|
67
|
+
if not tenant:
|
|
68
|
+
_log_auth_failure(client_ip, "invalid_token", api_key_prefix=api_key[:8])
|
|
69
|
+
return AuthResult(authenticated=False, error="Invalid API key")
|
|
70
|
+
|
|
71
|
+
if not tenant.enabled:
|
|
72
|
+
_log_auth_failure(client_ip, "tenant_disabled", tenant_id=tenant.id)
|
|
73
|
+
return AuthResult(authenticated=False, error="Tenant is disabled")
|
|
74
|
+
|
|
75
|
+
logger.info(
|
|
76
|
+
"auth.success client_ip=%s tenant_id=%s",
|
|
77
|
+
redact(client_ip),
|
|
78
|
+
tenant.id,
|
|
79
|
+
)
|
|
80
|
+
return AuthResult(authenticated=True, tenant=tenant)
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def _get_header(headers: Mapping[str, str], name: str) -> str | None:
|
|
84
|
+
"""Get header value with case-insensitive lookup."""
|
|
85
|
+
for key, value in headers.items():
|
|
86
|
+
if key.lower() == name.lower():
|
|
87
|
+
return value
|
|
88
|
+
return None
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
def _log_auth_failure(
|
|
92
|
+
client_ip: str,
|
|
93
|
+
reason: str,
|
|
94
|
+
tenant_id: str | None = None,
|
|
95
|
+
api_key_prefix: str | None = None,
|
|
96
|
+
) -> None:
|
|
97
|
+
"""Log authentication failure for security monitoring (ASVS V16.3.2)."""
|
|
98
|
+
parts = [f"auth.failure reason={reason}", f"client_ip={redact(client_ip)}"]
|
|
99
|
+
if tenant_id:
|
|
100
|
+
parts.append(f"tenant_id={tenant_id}")
|
|
101
|
+
if api_key_prefix:
|
|
102
|
+
parts.append(f"api_key_prefix={api_key_prefix}...")
|
|
103
|
+
logger.warning(" ".join(parts))
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
"""Enterprise features for Kekkai Portal.
|
|
2
|
+
|
|
3
|
+
Provides:
|
|
4
|
+
- RBAC (Role-Based Access Control)
|
|
5
|
+
- SAML 2.0 SSO integration
|
|
6
|
+
- Audit logging
|
|
7
|
+
- Enterprise license gating
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
from __future__ import annotations
|
|
11
|
+
|
|
12
|
+
from .audit import AuditEvent, AuditEventType, AuditLog
|
|
13
|
+
from .licensing import EnterpriseLicense, LicenseStatus, LicenseValidator
|
|
14
|
+
from .rbac import AuthorizationResult, Permission, RBACManager, Role
|
|
15
|
+
from .saml import SAMLAssertion, SAMLConfig, SAMLError, SAMLProcessor
|
|
16
|
+
|
|
17
|
+
__all__ = [
|
|
18
|
+
"AuditEvent",
|
|
19
|
+
"AuditEventType",
|
|
20
|
+
"AuditLog",
|
|
21
|
+
"AuthorizationResult",
|
|
22
|
+
"EnterpriseLicense",
|
|
23
|
+
"LicenseStatus",
|
|
24
|
+
"LicenseValidator",
|
|
25
|
+
"Permission",
|
|
26
|
+
"RBACManager",
|
|
27
|
+
"Role",
|
|
28
|
+
"SAMLAssertion",
|
|
29
|
+
"SAMLConfig",
|
|
30
|
+
"SAMLError",
|
|
31
|
+
"SAMLProcessor",
|
|
32
|
+
]
|