kekkai-cli 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kekkai/__init__.py +7 -0
- kekkai/cli.py +1038 -0
- kekkai/config.py +403 -0
- kekkai/dojo.py +419 -0
- kekkai/dojo_import.py +213 -0
- kekkai/github/__init__.py +16 -0
- kekkai/github/commenter.py +198 -0
- kekkai/github/models.py +56 -0
- kekkai/github/sanitizer.py +112 -0
- kekkai/installer/__init__.py +39 -0
- kekkai/installer/errors.py +23 -0
- kekkai/installer/extract.py +161 -0
- kekkai/installer/manager.py +252 -0
- kekkai/installer/manifest.py +189 -0
- kekkai/installer/verify.py +86 -0
- kekkai/manifest.py +77 -0
- kekkai/output.py +218 -0
- kekkai/paths.py +46 -0
- kekkai/policy.py +326 -0
- kekkai/runner.py +70 -0
- kekkai/scanners/__init__.py +67 -0
- kekkai/scanners/backends/__init__.py +14 -0
- kekkai/scanners/backends/base.py +73 -0
- kekkai/scanners/backends/docker.py +178 -0
- kekkai/scanners/backends/native.py +240 -0
- kekkai/scanners/base.py +110 -0
- kekkai/scanners/container.py +144 -0
- kekkai/scanners/falco.py +237 -0
- kekkai/scanners/gitleaks.py +237 -0
- kekkai/scanners/semgrep.py +227 -0
- kekkai/scanners/trivy.py +246 -0
- kekkai/scanners/url_policy.py +163 -0
- kekkai/scanners/zap.py +340 -0
- kekkai/threatflow/__init__.py +94 -0
- kekkai/threatflow/artifacts.py +476 -0
- kekkai/threatflow/chunking.py +361 -0
- kekkai/threatflow/core.py +438 -0
- kekkai/threatflow/mermaid.py +374 -0
- kekkai/threatflow/model_adapter.py +491 -0
- kekkai/threatflow/prompts.py +277 -0
- kekkai/threatflow/redaction.py +228 -0
- kekkai/threatflow/sanitizer.py +643 -0
- kekkai/triage/__init__.py +33 -0
- kekkai/triage/app.py +168 -0
- kekkai/triage/audit.py +203 -0
- kekkai/triage/ignore.py +269 -0
- kekkai/triage/models.py +185 -0
- kekkai/triage/screens.py +341 -0
- kekkai/triage/widgets.py +169 -0
- kekkai_cli-1.0.0.dist-info/METADATA +135 -0
- kekkai_cli-1.0.0.dist-info/RECORD +90 -0
- kekkai_cli-1.0.0.dist-info/WHEEL +5 -0
- kekkai_cli-1.0.0.dist-info/entry_points.txt +3 -0
- kekkai_cli-1.0.0.dist-info/top_level.txt +3 -0
- kekkai_core/__init__.py +3 -0
- kekkai_core/ci/__init__.py +11 -0
- kekkai_core/ci/benchmarks.py +354 -0
- kekkai_core/ci/metadata.py +104 -0
- kekkai_core/ci/validators.py +92 -0
- kekkai_core/docker/__init__.py +17 -0
- kekkai_core/docker/metadata.py +153 -0
- kekkai_core/docker/sbom.py +173 -0
- kekkai_core/docker/security.py +158 -0
- kekkai_core/docker/signing.py +135 -0
- kekkai_core/redaction.py +84 -0
- kekkai_core/slsa/__init__.py +13 -0
- kekkai_core/slsa/verify.py +121 -0
- kekkai_core/windows/__init__.py +29 -0
- kekkai_core/windows/chocolatey.py +335 -0
- kekkai_core/windows/installer.py +256 -0
- kekkai_core/windows/scoop.py +165 -0
- kekkai_core/windows/validators.py +220 -0
- portal/__init__.py +19 -0
- portal/api.py +155 -0
- portal/auth.py +103 -0
- portal/enterprise/__init__.py +32 -0
- portal/enterprise/audit.py +435 -0
- portal/enterprise/licensing.py +342 -0
- portal/enterprise/rbac.py +276 -0
- portal/enterprise/saml.py +595 -0
- portal/ops/__init__.py +53 -0
- portal/ops/backup.py +553 -0
- portal/ops/log_shipper.py +469 -0
- portal/ops/monitoring.py +517 -0
- portal/ops/restore.py +469 -0
- portal/ops/secrets.py +408 -0
- portal/ops/upgrade.py +591 -0
- portal/tenants.py +340 -0
- portal/uploads.py +259 -0
- portal/web.py +384 -0
kekkai/dojo.py
ADDED
|
@@ -0,0 +1,419 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import contextlib
|
|
4
|
+
import json
|
|
5
|
+
import secrets
|
|
6
|
+
import shutil
|
|
7
|
+
import socket
|
|
8
|
+
import string
|
|
9
|
+
import subprocess # nosec B404
|
|
10
|
+
import time
|
|
11
|
+
import webbrowser
|
|
12
|
+
from dataclasses import dataclass
|
|
13
|
+
from pathlib import Path
|
|
14
|
+
from typing import Any
|
|
15
|
+
from urllib.error import HTTPError, URLError
|
|
16
|
+
from urllib.request import Request, urlopen
|
|
17
|
+
|
|
18
|
+
from .paths import app_base_dir, ensure_dir
|
|
19
|
+
|
|
20
|
+
DEFAULT_PORT = 8080
|
|
21
|
+
DEFAULT_TLS_PORT = 8443
|
|
22
|
+
DEFAULT_PROJECT_NAME = "kekkai-dojo"
|
|
23
|
+
DEFAULT_DJANGO_VERSION = "latest"
|
|
24
|
+
DEFAULT_NGINX_VERSION = "latest"
|
|
25
|
+
DOJO_PROFILE = "dojo"
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
@dataclass(frozen=True)
|
|
29
|
+
class ServiceStatus:
|
|
30
|
+
name: str
|
|
31
|
+
state: str
|
|
32
|
+
health: str | None
|
|
33
|
+
exit_code: int | None
|
|
34
|
+
ports: str | None
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def compose_dir(override: str | None = None) -> Path:
|
|
38
|
+
if override:
|
|
39
|
+
return Path(override).expanduser().resolve()
|
|
40
|
+
return app_base_dir() / "dojo"
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def compose_command() -> list[str]:
|
|
44
|
+
docker = shutil.which("docker")
|
|
45
|
+
if docker:
|
|
46
|
+
proc = subprocess.run([docker, "compose", "version"], capture_output=True, text=True) # noqa: S603 # nosec B603
|
|
47
|
+
if proc.returncode == 0:
|
|
48
|
+
return [docker, "compose"]
|
|
49
|
+
docker_compose = shutil.which("docker-compose")
|
|
50
|
+
if docker_compose:
|
|
51
|
+
return [docker_compose]
|
|
52
|
+
raise RuntimeError("Docker Compose not found; install docker and docker compose")
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def check_port_available(port: int, host: str = "127.0.0.1") -> bool:
|
|
56
|
+
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
|
|
57
|
+
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
|
58
|
+
try:
|
|
59
|
+
sock.bind((host, port))
|
|
60
|
+
except OSError:
|
|
61
|
+
return False
|
|
62
|
+
return True
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def load_env_file(path: Path) -> dict[str, str]:
|
|
66
|
+
if not path.exists():
|
|
67
|
+
return {}
|
|
68
|
+
env: dict[str, str] = {}
|
|
69
|
+
for line in path.read_text().splitlines():
|
|
70
|
+
stripped = line.strip()
|
|
71
|
+
if not stripped or stripped.startswith("#") or "=" not in stripped:
|
|
72
|
+
continue
|
|
73
|
+
key, value = stripped.split("=", 1)
|
|
74
|
+
env[key.strip()] = value.strip()
|
|
75
|
+
return env
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def write_env_file(path: Path, env: dict[str, str]) -> None:
|
|
79
|
+
lines = [f"{key}={env[key]}" for key in sorted(env.keys())]
|
|
80
|
+
path.write_text("\n".join(lines) + "\n")
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def ensure_env(path: Path, port: int, tls_port: int) -> dict[str, str]:
|
|
84
|
+
env = load_env_file(path)
|
|
85
|
+
env.setdefault("DD_ADMIN_USER", "admin")
|
|
86
|
+
env.setdefault("DD_ADMIN_MAIL", "admin@defectdojo.local")
|
|
87
|
+
env.setdefault("DD_ADMIN_FIRST_NAME", "Admin")
|
|
88
|
+
env.setdefault("DD_ADMIN_LAST_NAME", "User")
|
|
89
|
+
env.setdefault("DD_ADMIN_PASSWORD", _random_string(20))
|
|
90
|
+
env.setdefault("DD_DATABASE_NAME", "defectdojo")
|
|
91
|
+
env.setdefault("DD_DATABASE_USER", "defectdojo")
|
|
92
|
+
env.setdefault("DD_DATABASE_PASSWORD", _random_string(24))
|
|
93
|
+
env.setdefault("DD_DATABASE_HOST", "postgres")
|
|
94
|
+
env.setdefault("DD_DATABASE_PORT", "5432")
|
|
95
|
+
env.setdefault(
|
|
96
|
+
"DD_DATABASE_URL",
|
|
97
|
+
f"postgresql://{env['DD_DATABASE_USER']}:{env['DD_DATABASE_PASSWORD']}@"
|
|
98
|
+
f"{env['DD_DATABASE_HOST']}:{env['DD_DATABASE_PORT']}/{env['DD_DATABASE_NAME']}",
|
|
99
|
+
)
|
|
100
|
+
env.setdefault("DD_CELERY_BROKER_URL", "redis://valkey:6379/0")
|
|
101
|
+
env.setdefault("DD_SECRET_KEY", _random_string(50))
|
|
102
|
+
env.setdefault("DD_CREDENTIAL_AES_256_KEY", _random_string(32))
|
|
103
|
+
env.setdefault("DD_INITIALIZE", "true")
|
|
104
|
+
env.setdefault("DD_ALLOWED_HOSTS", "*")
|
|
105
|
+
env.setdefault("DD_DATABASE_READINESS_TIMEOUT", "30")
|
|
106
|
+
env.setdefault("DD_DJANGO_METRICS_ENABLED", "False")
|
|
107
|
+
env.setdefault("DD_CELERY_WORKER_CONCURRENCY", "1")
|
|
108
|
+
env.setdefault("DD_CELERY_WORKER_PREFETCH_MULTIPLIER", "1")
|
|
109
|
+
env.setdefault("DD_PORT", str(port))
|
|
110
|
+
env.setdefault("DD_TLS_PORT", str(tls_port))
|
|
111
|
+
env.setdefault("DJANGO_VERSION", DEFAULT_DJANGO_VERSION)
|
|
112
|
+
env.setdefault("NGINX_VERSION", DEFAULT_NGINX_VERSION)
|
|
113
|
+
return env
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
def build_compose_yaml() -> str:
|
|
117
|
+
return (
|
|
118
|
+
'version: "3.9"\n'
|
|
119
|
+
"services:\n"
|
|
120
|
+
" nginx:\n"
|
|
121
|
+
" image: defectdojo/defectdojo-nginx:${NGINX_VERSION:-latest}\n"
|
|
122
|
+
' profiles: ["dojo"]\n'
|
|
123
|
+
" depends_on:\n"
|
|
124
|
+
" uwsgi:\n"
|
|
125
|
+
" condition: service_started\n"
|
|
126
|
+
" environment:\n"
|
|
127
|
+
' NGINX_METRICS_ENABLED: "false"\n'
|
|
128
|
+
' DD_UWSGI_HOST: "uwsgi"\n'
|
|
129
|
+
' DD_UWSGI_PORT: "3031"\n'
|
|
130
|
+
" volumes:\n"
|
|
131
|
+
" - defectdojo_media:/usr/share/nginx/html/media\n"
|
|
132
|
+
" ports:\n"
|
|
133
|
+
" - target: 8080\n"
|
|
134
|
+
" published: ${DD_PORT:-8080}\n"
|
|
135
|
+
" protocol: tcp\n"
|
|
136
|
+
" mode: host\n"
|
|
137
|
+
" - target: 8443\n"
|
|
138
|
+
" published: ${DD_TLS_PORT:-8443}\n"
|
|
139
|
+
" protocol: tcp\n"
|
|
140
|
+
" mode: host\n"
|
|
141
|
+
" healthcheck:\n"
|
|
142
|
+
' test: ["CMD", "wget", "-q", "-O", "-", "http://localhost:8080/"]\n'
|
|
143
|
+
" interval: 10s\n"
|
|
144
|
+
" timeout: 3s\n"
|
|
145
|
+
" retries: 15\n"
|
|
146
|
+
" uwsgi:\n"
|
|
147
|
+
" image: defectdojo/defectdojo-django:${DJANGO_VERSION:-latest}\n"
|
|
148
|
+
' profiles: ["dojo"]\n'
|
|
149
|
+
" depends_on:\n"
|
|
150
|
+
" initializer:\n"
|
|
151
|
+
" condition: service_completed_successfully\n"
|
|
152
|
+
" postgres:\n"
|
|
153
|
+
" condition: service_healthy\n"
|
|
154
|
+
" valkey:\n"
|
|
155
|
+
" condition: service_started\n"
|
|
156
|
+
' entrypoint: ["/wait-for-it.sh", '
|
|
157
|
+
'"${DD_DATABASE_HOST:-postgres}:${DD_DATABASE_PORT:-5432}", '
|
|
158
|
+
'"-t", "30", "--", "/entrypoint-uwsgi.sh"]\n'
|
|
159
|
+
" environment:\n"
|
|
160
|
+
' DD_DEBUG: "False"\n'
|
|
161
|
+
" DD_DJANGO_METRICS_ENABLED: ${DD_DJANGO_METRICS_ENABLED:-False}\n"
|
|
162
|
+
" DD_ALLOWED_HOSTS: ${DD_ALLOWED_HOSTS:-*}\n"
|
|
163
|
+
" DD_DATABASE_URL: ${DD_DATABASE_URL:-postgresql://defectdojo:defectdojo@postgres:5432/defectdojo}\n"
|
|
164
|
+
" DD_CELERY_BROKER_URL: ${DD_CELERY_BROKER_URL:-redis://valkey:6379/0}\n"
|
|
165
|
+
" DD_SECRET_KEY: ${DD_SECRET_KEY:-change-me}\n"
|
|
166
|
+
" DD_CREDENTIAL_AES_256_KEY: ${DD_CREDENTIAL_AES_256_KEY:-change-me}\n"
|
|
167
|
+
" DD_DATABASE_READINESS_TIMEOUT: ${DD_DATABASE_READINESS_TIMEOUT:-30}\n"
|
|
168
|
+
" volumes:\n"
|
|
169
|
+
" - defectdojo_media:${DD_MEDIA_ROOT:-/app/media}\n"
|
|
170
|
+
" celerybeat:\n"
|
|
171
|
+
" image: defectdojo/defectdojo-django:${DJANGO_VERSION:-latest}\n"
|
|
172
|
+
' profiles: ["dojo"]\n'
|
|
173
|
+
" depends_on:\n"
|
|
174
|
+
" initializer:\n"
|
|
175
|
+
" condition: service_completed_successfully\n"
|
|
176
|
+
" postgres:\n"
|
|
177
|
+
" condition: service_healthy\n"
|
|
178
|
+
" valkey:\n"
|
|
179
|
+
" condition: service_started\n"
|
|
180
|
+
' entrypoint: ["/wait-for-it.sh", '
|
|
181
|
+
'"${DD_DATABASE_HOST:-postgres}:${DD_DATABASE_PORT:-5432}", '
|
|
182
|
+
'"-t", "30", "--", "/entrypoint-celery-beat.sh"]\n'
|
|
183
|
+
" environment:\n"
|
|
184
|
+
" DD_DATABASE_URL: ${DD_DATABASE_URL:-postgresql://defectdojo:defectdojo@postgres:5432/defectdojo}\n"
|
|
185
|
+
" DD_CELERY_BROKER_URL: ${DD_CELERY_BROKER_URL:-redis://valkey:6379/0}\n"
|
|
186
|
+
" DD_SECRET_KEY: ${DD_SECRET_KEY:-change-me}\n"
|
|
187
|
+
" DD_CREDENTIAL_AES_256_KEY: ${DD_CREDENTIAL_AES_256_KEY:-change-me}\n"
|
|
188
|
+
" DD_DATABASE_READINESS_TIMEOUT: ${DD_DATABASE_READINESS_TIMEOUT:-30}\n"
|
|
189
|
+
" celeryworker:\n"
|
|
190
|
+
" image: defectdojo/defectdojo-django:${DJANGO_VERSION:-latest}\n"
|
|
191
|
+
' profiles: ["dojo"]\n'
|
|
192
|
+
" depends_on:\n"
|
|
193
|
+
" initializer:\n"
|
|
194
|
+
" condition: service_completed_successfully\n"
|
|
195
|
+
" postgres:\n"
|
|
196
|
+
" condition: service_healthy\n"
|
|
197
|
+
" valkey:\n"
|
|
198
|
+
" condition: service_started\n"
|
|
199
|
+
' entrypoint: ["/wait-for-it.sh", '
|
|
200
|
+
'"${DD_DATABASE_HOST:-postgres}:${DD_DATABASE_PORT:-5432}", '
|
|
201
|
+
'"-t", "30", "--", "/entrypoint-celery-worker.sh"]\n'
|
|
202
|
+
" environment:\n"
|
|
203
|
+
" DD_DATABASE_URL: ${DD_DATABASE_URL:-postgresql://defectdojo:defectdojo@postgres:5432/defectdojo}\n"
|
|
204
|
+
" DD_CELERY_BROKER_URL: ${DD_CELERY_BROKER_URL:-redis://valkey:6379/0}\n"
|
|
205
|
+
" DD_SECRET_KEY: ${DD_SECRET_KEY:-change-me}\n"
|
|
206
|
+
" DD_CREDENTIAL_AES_256_KEY: ${DD_CREDENTIAL_AES_256_KEY:-change-me}\n"
|
|
207
|
+
" DD_DATABASE_READINESS_TIMEOUT: ${DD_DATABASE_READINESS_TIMEOUT:-30}\n"
|
|
208
|
+
" DD_CELERY_WORKER_CONCURRENCY: ${DD_CELERY_WORKER_CONCURRENCY:-1}\n"
|
|
209
|
+
" DD_CELERY_WORKER_PREFETCH_MULTIPLIER: ${DD_CELERY_WORKER_PREFETCH_MULTIPLIER:-1}\n"
|
|
210
|
+
" volumes:\n"
|
|
211
|
+
" - defectdojo_media:${DD_MEDIA_ROOT:-/app/media}\n"
|
|
212
|
+
" initializer:\n"
|
|
213
|
+
" image: defectdojo/defectdojo-django:${DJANGO_VERSION:-latest}\n"
|
|
214
|
+
' profiles: ["dojo"]\n'
|
|
215
|
+
" depends_on:\n"
|
|
216
|
+
" postgres:\n"
|
|
217
|
+
" condition: service_healthy\n"
|
|
218
|
+
' entrypoint: ["/wait-for-it.sh", '
|
|
219
|
+
'"${DD_DATABASE_HOST:-postgres}:${DD_DATABASE_PORT:-5432}", '
|
|
220
|
+
'"--", "/entrypoint-initializer.sh"]\n'
|
|
221
|
+
" environment:\n"
|
|
222
|
+
" DD_DATABASE_URL: ${DD_DATABASE_URL:-postgresql://defectdojo:defectdojo@postgres:5432/defectdojo}\n"
|
|
223
|
+
" DD_ADMIN_USER: ${DD_ADMIN_USER:-admin}\n"
|
|
224
|
+
" DD_ADMIN_MAIL: ${DD_ADMIN_MAIL:-admin@defectdojo.local}\n"
|
|
225
|
+
" DD_ADMIN_FIRST_NAME: ${DD_ADMIN_FIRST_NAME:-Admin}\n"
|
|
226
|
+
" DD_ADMIN_LAST_NAME: ${DD_ADMIN_LAST_NAME:-User}\n"
|
|
227
|
+
" DD_ADMIN_PASSWORD: ${DD_ADMIN_PASSWORD:-admin}\n"
|
|
228
|
+
" DD_INITIALIZE: ${DD_INITIALIZE:-true}\n"
|
|
229
|
+
" DD_SECRET_KEY: ${DD_SECRET_KEY:-change-me}\n"
|
|
230
|
+
" DD_CREDENTIAL_AES_256_KEY: ${DD_CREDENTIAL_AES_256_KEY:-change-me}\n"
|
|
231
|
+
" DD_DATABASE_READINESS_TIMEOUT: ${DD_DATABASE_READINESS_TIMEOUT:-30}\n"
|
|
232
|
+
" postgres:\n"
|
|
233
|
+
" image: postgres:18.1-alpine\n"
|
|
234
|
+
' profiles: ["dojo"]\n'
|
|
235
|
+
" environment:\n"
|
|
236
|
+
" POSTGRES_DB: ${DD_DATABASE_NAME:-defectdojo}\n"
|
|
237
|
+
" POSTGRES_USER: ${DD_DATABASE_USER:-defectdojo}\n"
|
|
238
|
+
" POSTGRES_PASSWORD: ${DD_DATABASE_PASSWORD:-defectdojo}\n"
|
|
239
|
+
' command: ["postgres", "-c", "shared_buffers=256MB", "-c", '
|
|
240
|
+
'"work_mem=16MB", "-c", "maintenance_work_mem=128MB", '
|
|
241
|
+
'"-c", "max_connections=50"]\n'
|
|
242
|
+
" volumes:\n"
|
|
243
|
+
" - defectdojo_postgres:/var/lib/postgresql/data\n"
|
|
244
|
+
" healthcheck:\n"
|
|
245
|
+
' test: ["CMD-SHELL", '
|
|
246
|
+
'"pg_isready -U ${DD_DATABASE_USER:-defectdojo} -d '
|
|
247
|
+
'${DD_DATABASE_NAME:-defectdojo}"]\n'
|
|
248
|
+
" interval: 10s\n"
|
|
249
|
+
" timeout: 5s\n"
|
|
250
|
+
" retries: 10\n"
|
|
251
|
+
" valkey:\n"
|
|
252
|
+
" image: valkey/valkey:7.2.11-alpine\n"
|
|
253
|
+
' profiles: ["dojo"]\n'
|
|
254
|
+
" volumes:\n"
|
|
255
|
+
" - defectdojo_redis:/data\n"
|
|
256
|
+
"volumes:\n"
|
|
257
|
+
" defectdojo_postgres: {}\n"
|
|
258
|
+
" defectdojo_media: {}\n"
|
|
259
|
+
" defectdojo_redis: {}\n"
|
|
260
|
+
)
|
|
261
|
+
|
|
262
|
+
|
|
263
|
+
def ensure_compose_files(
|
|
264
|
+
compose_path: Path, env_path: Path, port: int, tls_port: int
|
|
265
|
+
) -> dict[str, str]:
|
|
266
|
+
ensure_dir(compose_path.parent)
|
|
267
|
+
env = ensure_env(env_path, port=port, tls_port=tls_port)
|
|
268
|
+
write_env_file(env_path, env)
|
|
269
|
+
compose_path.write_text(build_compose_yaml())
|
|
270
|
+
return env
|
|
271
|
+
|
|
272
|
+
|
|
273
|
+
def compose_up(
|
|
274
|
+
*,
|
|
275
|
+
compose_root: Path,
|
|
276
|
+
project_name: str,
|
|
277
|
+
port: int,
|
|
278
|
+
tls_port: int,
|
|
279
|
+
wait: bool,
|
|
280
|
+
open_browser: bool,
|
|
281
|
+
) -> dict[str, str]:
|
|
282
|
+
if not check_port_available(port):
|
|
283
|
+
raise RuntimeError(f"Port {port} is already in use")
|
|
284
|
+
if not check_port_available(tls_port):
|
|
285
|
+
raise RuntimeError(f"Port {tls_port} is already in use")
|
|
286
|
+
|
|
287
|
+
compose_file = compose_root / "docker-compose.yml"
|
|
288
|
+
env_file = compose_root / ".env"
|
|
289
|
+
env = ensure_compose_files(compose_file, env_file, port, tls_port)
|
|
290
|
+
|
|
291
|
+
cmd = compose_command() + [
|
|
292
|
+
"--project-name",
|
|
293
|
+
project_name,
|
|
294
|
+
"--file",
|
|
295
|
+
str(compose_file),
|
|
296
|
+
"--profile",
|
|
297
|
+
DOJO_PROFILE,
|
|
298
|
+
]
|
|
299
|
+
proc = subprocess.run(cmd + ["up", "-d"], capture_output=True, text=True) # noqa: S603 # nosec B603
|
|
300
|
+
if proc.returncode != 0:
|
|
301
|
+
raise RuntimeError(proc.stderr.strip() or "Failed to start DefectDojo")
|
|
302
|
+
|
|
303
|
+
if wait:
|
|
304
|
+
wait_for_ui(port, timeout=300)
|
|
305
|
+
|
|
306
|
+
if open_browser:
|
|
307
|
+
open_ui(port)
|
|
308
|
+
return env
|
|
309
|
+
|
|
310
|
+
|
|
311
|
+
def compose_down(*, compose_root: Path, project_name: str) -> None:
|
|
312
|
+
compose_file = compose_root / "docker-compose.yml"
|
|
313
|
+
cmd = compose_command() + [
|
|
314
|
+
"--project-name",
|
|
315
|
+
project_name,
|
|
316
|
+
"--file",
|
|
317
|
+
str(compose_file),
|
|
318
|
+
"--profile",
|
|
319
|
+
DOJO_PROFILE,
|
|
320
|
+
]
|
|
321
|
+
proc = subprocess.run(cmd + ["down", "--remove-orphans"], capture_output=True, text=True) # noqa: S603 # nosec B603
|
|
322
|
+
if proc.returncode != 0:
|
|
323
|
+
raise RuntimeError(proc.stderr.strip() or "Failed to stop DefectDojo")
|
|
324
|
+
|
|
325
|
+
|
|
326
|
+
def compose_status(*, compose_root: Path, project_name: str) -> list[ServiceStatus]:
|
|
327
|
+
compose_file = compose_root / "docker-compose.yml"
|
|
328
|
+
cmd = compose_command() + [
|
|
329
|
+
"--project-name",
|
|
330
|
+
project_name,
|
|
331
|
+
"--file",
|
|
332
|
+
str(compose_file),
|
|
333
|
+
"--profile",
|
|
334
|
+
DOJO_PROFILE,
|
|
335
|
+
]
|
|
336
|
+
proc = subprocess.run(cmd + ["ps", "--format", "json"], capture_output=True, text=True) # noqa: S603 # nosec B603
|
|
337
|
+
if proc.returncode != 0:
|
|
338
|
+
raise RuntimeError(proc.stderr.strip() or "Failed to read status")
|
|
339
|
+
return parse_compose_ps(proc.stdout)
|
|
340
|
+
|
|
341
|
+
|
|
342
|
+
def parse_compose_ps(output: str) -> list[ServiceStatus]:
|
|
343
|
+
if not output.strip():
|
|
344
|
+
return []
|
|
345
|
+
try:
|
|
346
|
+
data: Any = json.loads(output)
|
|
347
|
+
except json.JSONDecodeError:
|
|
348
|
+
data = []
|
|
349
|
+
for line in output.splitlines():
|
|
350
|
+
if not line.strip():
|
|
351
|
+
continue
|
|
352
|
+
data.append(json.loads(line))
|
|
353
|
+
if isinstance(data, dict):
|
|
354
|
+
data = [data]
|
|
355
|
+
if not isinstance(data, list):
|
|
356
|
+
raise ValueError("Invalid compose ps json")
|
|
357
|
+
|
|
358
|
+
statuses: list[ServiceStatus] = []
|
|
359
|
+
for item in data:
|
|
360
|
+
if not isinstance(item, dict):
|
|
361
|
+
continue
|
|
362
|
+
statuses.append(
|
|
363
|
+
ServiceStatus(
|
|
364
|
+
name=str(item.get("Service") or item.get("Name") or "unknown"),
|
|
365
|
+
state=str(item.get("State") or "unknown"),
|
|
366
|
+
health=_optional_str(item.get("Health")),
|
|
367
|
+
exit_code=_optional_int(item.get("ExitCode")),
|
|
368
|
+
ports=_optional_str(item.get("Publishers")),
|
|
369
|
+
),
|
|
370
|
+
)
|
|
371
|
+
return statuses
|
|
372
|
+
|
|
373
|
+
|
|
374
|
+
def wait_for_ui(port: int, timeout: int = 300) -> None:
|
|
375
|
+
url = f"http://localhost:{port}/"
|
|
376
|
+
deadline = time.monotonic() + timeout
|
|
377
|
+
last_error: str | None = None
|
|
378
|
+
while time.monotonic() < deadline:
|
|
379
|
+
try:
|
|
380
|
+
req = Request(url, method="GET") # noqa: S310 # nosec B310
|
|
381
|
+
with urlopen(req, timeout=5) as resp: # noqa: S310 # nosec B310
|
|
382
|
+
if resp.status in {200, 302, 401}:
|
|
383
|
+
return
|
|
384
|
+
last_error = f"HTTP {resp.status}"
|
|
385
|
+
except (URLError, HTTPError) as exc:
|
|
386
|
+
last_error = str(exc)
|
|
387
|
+
time.sleep(2)
|
|
388
|
+
raise RuntimeError(f"DefectDojo UI did not become ready in time ({last_error})")
|
|
389
|
+
|
|
390
|
+
|
|
391
|
+
def open_ui(port: int) -> None:
|
|
392
|
+
url = f"http://localhost:{port}/"
|
|
393
|
+
print(f"Opening {url}")
|
|
394
|
+
with contextlib.suppress(Exception):
|
|
395
|
+
webbrowser.open(url)
|
|
396
|
+
|
|
397
|
+
|
|
398
|
+
def _random_string(length: int) -> str:
|
|
399
|
+
alphabet = string.ascii_letters + string.digits
|
|
400
|
+
return "".join(secrets.choice(alphabet) for _ in range(length))
|
|
401
|
+
|
|
402
|
+
|
|
403
|
+
def _optional_str(value: object) -> str | None:
|
|
404
|
+
if value is None:
|
|
405
|
+
return None
|
|
406
|
+
return str(value)
|
|
407
|
+
|
|
408
|
+
|
|
409
|
+
def _optional_int(value: object) -> int | None:
|
|
410
|
+
if value is None:
|
|
411
|
+
return None
|
|
412
|
+
if isinstance(value, int):
|
|
413
|
+
return value
|
|
414
|
+
if isinstance(value, str):
|
|
415
|
+
try:
|
|
416
|
+
return int(value)
|
|
417
|
+
except ValueError:
|
|
418
|
+
return None
|
|
419
|
+
return None
|
kekkai/dojo_import.py
ADDED
|
@@ -0,0 +1,213 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import urllib.parse
|
|
5
|
+
from dataclasses import dataclass
|
|
6
|
+
from typing import Any
|
|
7
|
+
from urllib.error import HTTPError, URLError
|
|
8
|
+
from urllib.request import Request, urlopen
|
|
9
|
+
|
|
10
|
+
from .scanners.base import ScanResult
|
|
11
|
+
|
|
12
|
+
DEFAULT_TIMEOUT = 30
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
@dataclass(frozen=True)
|
|
16
|
+
class DojoConfig:
|
|
17
|
+
base_url: str
|
|
18
|
+
api_key: str
|
|
19
|
+
product_name: str = "Kekkai Scans"
|
|
20
|
+
engagement_name: str = "Default Engagement"
|
|
21
|
+
verify_ssl: bool = True
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
@dataclass(frozen=True)
|
|
25
|
+
class ImportResult:
|
|
26
|
+
success: bool
|
|
27
|
+
test_id: int | None = None
|
|
28
|
+
findings_created: int = 0
|
|
29
|
+
findings_closed: int = 0
|
|
30
|
+
error: str | None = None
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class DojoClient:
|
|
34
|
+
def __init__(self, config: DojoConfig, timeout: int = DEFAULT_TIMEOUT) -> None:
|
|
35
|
+
self._config = config
|
|
36
|
+
self._timeout = timeout
|
|
37
|
+
self._base_url = config.base_url.rstrip("/")
|
|
38
|
+
|
|
39
|
+
def _request(
|
|
40
|
+
self,
|
|
41
|
+
method: str,
|
|
42
|
+
endpoint: str,
|
|
43
|
+
data: dict[str, Any] | None = None,
|
|
44
|
+
files: dict[str, tuple[str, bytes, str]] | None = None,
|
|
45
|
+
) -> dict[str, Any]:
|
|
46
|
+
url = f"{self._base_url}/api/v2/{endpoint}"
|
|
47
|
+
headers = {
|
|
48
|
+
"Authorization": f"Token {self._config.api_key}",
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
body: bytes | None = None
|
|
52
|
+
if files:
|
|
53
|
+
boundary = "----KekkaiFormBoundary"
|
|
54
|
+
headers["Content-Type"] = f"multipart/form-data; boundary={boundary}"
|
|
55
|
+
body = self._build_multipart(data or {}, files, boundary)
|
|
56
|
+
elif data:
|
|
57
|
+
headers["Content-Type"] = "application/json"
|
|
58
|
+
body = json.dumps(data).encode()
|
|
59
|
+
|
|
60
|
+
req = Request(url, data=body, headers=headers, method=method) # noqa: S310 # nosec B310
|
|
61
|
+
|
|
62
|
+
try:
|
|
63
|
+
with urlopen(req, timeout=self._timeout) as resp: # noqa: S310 # nosec B310
|
|
64
|
+
return json.loads(resp.read().decode()) if resp.read else {}
|
|
65
|
+
except HTTPError as exc:
|
|
66
|
+
error_body = exc.read().decode() if exc.fp else str(exc)
|
|
67
|
+
raise RuntimeError(f"Dojo API error {exc.code}: {error_body}") from exc
|
|
68
|
+
except URLError as exc:
|
|
69
|
+
raise RuntimeError(f"Dojo connection error: {exc.reason}") from exc
|
|
70
|
+
|
|
71
|
+
def _build_multipart(
|
|
72
|
+
self,
|
|
73
|
+
data: dict[str, Any],
|
|
74
|
+
files: dict[str, tuple[str, bytes, str]],
|
|
75
|
+
boundary: str,
|
|
76
|
+
) -> bytes:
|
|
77
|
+
lines: list[bytes] = []
|
|
78
|
+
for key, value in data.items():
|
|
79
|
+
lines.append(f"--{boundary}".encode())
|
|
80
|
+
lines.append(f'Content-Disposition: form-data; name="{key}"'.encode())
|
|
81
|
+
lines.append(b"")
|
|
82
|
+
lines.append(str(value).encode())
|
|
83
|
+
|
|
84
|
+
for field_name, (filename, content, content_type) in files.items():
|
|
85
|
+
lines.append(f"--{boundary}".encode())
|
|
86
|
+
disp = f'Content-Disposition: form-data; name="{field_name}"; filename="{filename}"'
|
|
87
|
+
lines.append(disp.encode())
|
|
88
|
+
lines.append(f"Content-Type: {content_type}".encode())
|
|
89
|
+
lines.append(b"")
|
|
90
|
+
lines.append(content)
|
|
91
|
+
|
|
92
|
+
lines.append(f"--{boundary}--".encode())
|
|
93
|
+
lines.append(b"")
|
|
94
|
+
return b"\r\n".join(lines)
|
|
95
|
+
|
|
96
|
+
def get_or_create_product(self, name: str) -> int:
|
|
97
|
+
resp = self._request("GET", f"products/?name={urllib.parse.quote(name)}")
|
|
98
|
+
results = resp.get("results", [])
|
|
99
|
+
if results:
|
|
100
|
+
return int(results[0]["id"])
|
|
101
|
+
|
|
102
|
+
resp = self._request(
|
|
103
|
+
"POST",
|
|
104
|
+
"products/",
|
|
105
|
+
data={
|
|
106
|
+
"name": name,
|
|
107
|
+
"description": "Created by Kekkai CLI",
|
|
108
|
+
"prod_type": 1,
|
|
109
|
+
},
|
|
110
|
+
)
|
|
111
|
+
return int(resp["id"])
|
|
112
|
+
|
|
113
|
+
def get_or_create_engagement(self, product_id: int, name: str) -> int:
|
|
114
|
+
resp = self._request(
|
|
115
|
+
"GET",
|
|
116
|
+
f"engagements/?product={product_id}&name={urllib.parse.quote(name)}",
|
|
117
|
+
)
|
|
118
|
+
results = resp.get("results", [])
|
|
119
|
+
if results:
|
|
120
|
+
return int(results[0]["id"])
|
|
121
|
+
|
|
122
|
+
resp = self._request(
|
|
123
|
+
"POST",
|
|
124
|
+
"engagements/",
|
|
125
|
+
data={
|
|
126
|
+
"name": name,
|
|
127
|
+
"product": product_id,
|
|
128
|
+
"target_start": "2024-01-01",
|
|
129
|
+
"target_end": "2099-12-31",
|
|
130
|
+
"engagement_type": "CI/CD",
|
|
131
|
+
"status": "In Progress",
|
|
132
|
+
},
|
|
133
|
+
)
|
|
134
|
+
return int(resp["id"])
|
|
135
|
+
|
|
136
|
+
def import_scan(
|
|
137
|
+
self,
|
|
138
|
+
scan_result: ScanResult,
|
|
139
|
+
scan_type: str,
|
|
140
|
+
engagement_id: int,
|
|
141
|
+
run_id: str,
|
|
142
|
+
commit_sha: str | None = None,
|
|
143
|
+
) -> ImportResult:
|
|
144
|
+
if not scan_result.raw_output_path or not scan_result.raw_output_path.exists():
|
|
145
|
+
return ImportResult(
|
|
146
|
+
success=False,
|
|
147
|
+
error="No raw output file to import",
|
|
148
|
+
)
|
|
149
|
+
|
|
150
|
+
file_content = scan_result.raw_output_path.read_bytes()
|
|
151
|
+
filename = scan_result.raw_output_path.name
|
|
152
|
+
|
|
153
|
+
data = {
|
|
154
|
+
"engagement": engagement_id,
|
|
155
|
+
"scan_type": scan_type,
|
|
156
|
+
"active": True,
|
|
157
|
+
"verified": False,
|
|
158
|
+
"minimum_severity": "Info",
|
|
159
|
+
"close_old_findings": True,
|
|
160
|
+
"push_to_jira": False,
|
|
161
|
+
"version": run_id,
|
|
162
|
+
}
|
|
163
|
+
if commit_sha:
|
|
164
|
+
data["commit_hash"] = commit_sha
|
|
165
|
+
|
|
166
|
+
try:
|
|
167
|
+
resp = self._request(
|
|
168
|
+
"POST",
|
|
169
|
+
"import-scan/",
|
|
170
|
+
data=data,
|
|
171
|
+
files={"file": (filename, file_content, "application/json")},
|
|
172
|
+
)
|
|
173
|
+
return ImportResult(
|
|
174
|
+
success=True,
|
|
175
|
+
test_id=resp.get("test"),
|
|
176
|
+
findings_created=resp.get("statistics", {}).get("created", 0),
|
|
177
|
+
findings_closed=resp.get("statistics", {}).get("closed", 0),
|
|
178
|
+
)
|
|
179
|
+
except RuntimeError as exc:
|
|
180
|
+
return ImportResult(success=False, error=str(exc))
|
|
181
|
+
|
|
182
|
+
|
|
183
|
+
def import_results_to_dojo(
|
|
184
|
+
config: DojoConfig,
|
|
185
|
+
results: list[ScanResult],
|
|
186
|
+
scanners: dict[str, Any],
|
|
187
|
+
run_id: str,
|
|
188
|
+
commit_sha: str | None = None,
|
|
189
|
+
) -> list[ImportResult]:
|
|
190
|
+
client = DojoClient(config)
|
|
191
|
+
product_id = client.get_or_create_product(config.product_name)
|
|
192
|
+
engagement_id = client.get_or_create_engagement(product_id, config.engagement_name)
|
|
193
|
+
|
|
194
|
+
import_results: list[ImportResult] = []
|
|
195
|
+
for result in results:
|
|
196
|
+
scanner = scanners.get(result.scanner)
|
|
197
|
+
if not scanner:
|
|
198
|
+
import_results.append(
|
|
199
|
+
ImportResult(success=False, error=f"Unknown scanner: {result.scanner}")
|
|
200
|
+
)
|
|
201
|
+
continue
|
|
202
|
+
|
|
203
|
+
scan_type = getattr(scanner, "scan_type", result.scanner)
|
|
204
|
+
import_result = client.import_scan(
|
|
205
|
+
scan_result=result,
|
|
206
|
+
scan_type=scan_type,
|
|
207
|
+
engagement_id=engagement_id,
|
|
208
|
+
run_id=run_id,
|
|
209
|
+
commit_sha=commit_sha,
|
|
210
|
+
)
|
|
211
|
+
import_results.append(import_result)
|
|
212
|
+
|
|
213
|
+
return import_results
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
"""GitHub integration for Kekkai PR comments."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from .commenter import post_pr_comments
|
|
6
|
+
from .models import GitHubConfig, PRComment, PRCommentResult
|
|
7
|
+
from .sanitizer import escape_markdown, redact_secrets
|
|
8
|
+
|
|
9
|
+
__all__ = [
|
|
10
|
+
"GitHubConfig",
|
|
11
|
+
"PRComment",
|
|
12
|
+
"PRCommentResult",
|
|
13
|
+
"escape_markdown",
|
|
14
|
+
"post_pr_comments",
|
|
15
|
+
"redact_secrets",
|
|
16
|
+
]
|