kekkai-cli 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kekkai/__init__.py +7 -0
- kekkai/cli.py +1038 -0
- kekkai/config.py +403 -0
- kekkai/dojo.py +419 -0
- kekkai/dojo_import.py +213 -0
- kekkai/github/__init__.py +16 -0
- kekkai/github/commenter.py +198 -0
- kekkai/github/models.py +56 -0
- kekkai/github/sanitizer.py +112 -0
- kekkai/installer/__init__.py +39 -0
- kekkai/installer/errors.py +23 -0
- kekkai/installer/extract.py +161 -0
- kekkai/installer/manager.py +252 -0
- kekkai/installer/manifest.py +189 -0
- kekkai/installer/verify.py +86 -0
- kekkai/manifest.py +77 -0
- kekkai/output.py +218 -0
- kekkai/paths.py +46 -0
- kekkai/policy.py +326 -0
- kekkai/runner.py +70 -0
- kekkai/scanners/__init__.py +67 -0
- kekkai/scanners/backends/__init__.py +14 -0
- kekkai/scanners/backends/base.py +73 -0
- kekkai/scanners/backends/docker.py +178 -0
- kekkai/scanners/backends/native.py +240 -0
- kekkai/scanners/base.py +110 -0
- kekkai/scanners/container.py +144 -0
- kekkai/scanners/falco.py +237 -0
- kekkai/scanners/gitleaks.py +237 -0
- kekkai/scanners/semgrep.py +227 -0
- kekkai/scanners/trivy.py +246 -0
- kekkai/scanners/url_policy.py +163 -0
- kekkai/scanners/zap.py +340 -0
- kekkai/threatflow/__init__.py +94 -0
- kekkai/threatflow/artifacts.py +476 -0
- kekkai/threatflow/chunking.py +361 -0
- kekkai/threatflow/core.py +438 -0
- kekkai/threatflow/mermaid.py +374 -0
- kekkai/threatflow/model_adapter.py +491 -0
- kekkai/threatflow/prompts.py +277 -0
- kekkai/threatflow/redaction.py +228 -0
- kekkai/threatflow/sanitizer.py +643 -0
- kekkai/triage/__init__.py +33 -0
- kekkai/triage/app.py +168 -0
- kekkai/triage/audit.py +203 -0
- kekkai/triage/ignore.py +269 -0
- kekkai/triage/models.py +185 -0
- kekkai/triage/screens.py +341 -0
- kekkai/triage/widgets.py +169 -0
- kekkai_cli-1.0.0.dist-info/METADATA +135 -0
- kekkai_cli-1.0.0.dist-info/RECORD +90 -0
- kekkai_cli-1.0.0.dist-info/WHEEL +5 -0
- kekkai_cli-1.0.0.dist-info/entry_points.txt +3 -0
- kekkai_cli-1.0.0.dist-info/top_level.txt +3 -0
- kekkai_core/__init__.py +3 -0
- kekkai_core/ci/__init__.py +11 -0
- kekkai_core/ci/benchmarks.py +354 -0
- kekkai_core/ci/metadata.py +104 -0
- kekkai_core/ci/validators.py +92 -0
- kekkai_core/docker/__init__.py +17 -0
- kekkai_core/docker/metadata.py +153 -0
- kekkai_core/docker/sbom.py +173 -0
- kekkai_core/docker/security.py +158 -0
- kekkai_core/docker/signing.py +135 -0
- kekkai_core/redaction.py +84 -0
- kekkai_core/slsa/__init__.py +13 -0
- kekkai_core/slsa/verify.py +121 -0
- kekkai_core/windows/__init__.py +29 -0
- kekkai_core/windows/chocolatey.py +335 -0
- kekkai_core/windows/installer.py +256 -0
- kekkai_core/windows/scoop.py +165 -0
- kekkai_core/windows/validators.py +220 -0
- portal/__init__.py +19 -0
- portal/api.py +155 -0
- portal/auth.py +103 -0
- portal/enterprise/__init__.py +32 -0
- portal/enterprise/audit.py +435 -0
- portal/enterprise/licensing.py +342 -0
- portal/enterprise/rbac.py +276 -0
- portal/enterprise/saml.py +595 -0
- portal/ops/__init__.py +53 -0
- portal/ops/backup.py +553 -0
- portal/ops/log_shipper.py +469 -0
- portal/ops/monitoring.py +517 -0
- portal/ops/restore.py +469 -0
- portal/ops/secrets.py +408 -0
- portal/ops/upgrade.py +591 -0
- portal/tenants.py +340 -0
- portal/uploads.py +259 -0
- portal/web.py +384 -0
kekkai/config.py
ADDED
|
@@ -0,0 +1,403 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
import tomllib
|
|
5
|
+
from collections.abc import Mapping, Sequence
|
|
6
|
+
from dataclasses import dataclass, field
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
|
|
9
|
+
from .paths import app_base_dir
|
|
10
|
+
|
|
11
|
+
DEFAULT_TIMEOUT_SECONDS = 900
|
|
12
|
+
DEFAULT_ENV_ALLOWLIST = [
|
|
13
|
+
"PATH",
|
|
14
|
+
"HOME",
|
|
15
|
+
"USER",
|
|
16
|
+
"SHELL",
|
|
17
|
+
"LANG",
|
|
18
|
+
"LC_ALL",
|
|
19
|
+
"LC_CTYPE",
|
|
20
|
+
]
|
|
21
|
+
DEFAULT_SCANNERS = ["trivy", "semgrep", "gitleaks"]
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
@dataclass(frozen=True)
|
|
25
|
+
class PipelineStep:
|
|
26
|
+
name: str
|
|
27
|
+
args: list[str]
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
@dataclass(frozen=True)
|
|
31
|
+
class DojoSettings:
|
|
32
|
+
enabled: bool = False
|
|
33
|
+
base_url: str = "http://localhost:8080"
|
|
34
|
+
api_key: str = ""
|
|
35
|
+
product_name: str = "Kekkai Scans"
|
|
36
|
+
engagement_name: str = "Default Engagement"
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
@dataclass(frozen=True)
|
|
40
|
+
class ZapSettings:
|
|
41
|
+
"""ZAP DAST scanner settings.
|
|
42
|
+
|
|
43
|
+
ZAP requires explicit target URL and enforces URL policy by default.
|
|
44
|
+
Private IPs are blocked unless explicitly allowed.
|
|
45
|
+
"""
|
|
46
|
+
|
|
47
|
+
enabled: bool = False
|
|
48
|
+
target_url: str | None = None
|
|
49
|
+
allow_private_ips: bool = False # Default: block private/internal IPs
|
|
50
|
+
allowed_domains: list[str] = field(default_factory=list)
|
|
51
|
+
timeout_seconds: int = 900
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
@dataclass(frozen=True)
|
|
55
|
+
class FalcoSettings:
|
|
56
|
+
"""Falco runtime security settings.
|
|
57
|
+
|
|
58
|
+
EXPERIMENTAL: Linux-only. Requires explicit opt-in.
|
|
59
|
+
"""
|
|
60
|
+
|
|
61
|
+
enabled: bool = False # Must be explicitly enabled
|
|
62
|
+
rules_file: str | None = None
|
|
63
|
+
timeout_seconds: int = 300
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
@dataclass(frozen=True)
|
|
67
|
+
class PolicySettings:
|
|
68
|
+
"""Policy enforcement settings for CI mode.
|
|
69
|
+
|
|
70
|
+
Configures which severity levels trigger failures and threshold limits.
|
|
71
|
+
"""
|
|
72
|
+
|
|
73
|
+
fail_on_critical: bool = True
|
|
74
|
+
fail_on_high: bool = True
|
|
75
|
+
fail_on_medium: bool = False
|
|
76
|
+
fail_on_low: bool = False
|
|
77
|
+
fail_on_info: bool = False
|
|
78
|
+
max_critical: int = 0
|
|
79
|
+
max_high: int = 0
|
|
80
|
+
max_medium: int = -1 # -1 = no limit
|
|
81
|
+
max_low: int = -1
|
|
82
|
+
max_info: int = -1
|
|
83
|
+
max_total: int = -1
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
@dataclass(frozen=True)
|
|
87
|
+
class ThreatFlowSettings:
|
|
88
|
+
"""ThreatFlow threat modeling settings.
|
|
89
|
+
|
|
90
|
+
Configures LLM backend and security controls.
|
|
91
|
+
"""
|
|
92
|
+
|
|
93
|
+
enabled: bool = False
|
|
94
|
+
model_mode: str = "local" # local, openai, anthropic, mock
|
|
95
|
+
model_path: str | None = None # For local models
|
|
96
|
+
api_key: str | None = None # For remote APIs (should use env var)
|
|
97
|
+
api_base: str | None = None # Custom API endpoint
|
|
98
|
+
model_name: str | None = None # Specific model to use
|
|
99
|
+
max_files: int = 500
|
|
100
|
+
timeout_seconds: int = 300
|
|
101
|
+
redact_secrets: bool = True
|
|
102
|
+
sanitize_content: bool = True
|
|
103
|
+
warn_on_injection: bool = True
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
@dataclass(frozen=True)
|
|
107
|
+
class Config:
|
|
108
|
+
repo_path: Path
|
|
109
|
+
run_base_dir: Path
|
|
110
|
+
timeout_seconds: int
|
|
111
|
+
env_allowlist: list[str]
|
|
112
|
+
pipeline: list[PipelineStep]
|
|
113
|
+
scanners: list[str] | None = None
|
|
114
|
+
dojo: DojoSettings | None = None
|
|
115
|
+
zap: ZapSettings | None = None
|
|
116
|
+
falco: FalcoSettings | None = None
|
|
117
|
+
policy: PolicySettings | None = None
|
|
118
|
+
threatflow: ThreatFlowSettings | None = None
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
@dataclass(frozen=True)
|
|
122
|
+
class ConfigOverrides:
|
|
123
|
+
repo_path: Path | None = None
|
|
124
|
+
run_base_dir: Path | None = None
|
|
125
|
+
timeout_seconds: int | None = None
|
|
126
|
+
env_allowlist: list[str] | None = None
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
def default_config(base_dir: Path) -> dict[str, object]:
|
|
130
|
+
return {
|
|
131
|
+
"repo_path": ".",
|
|
132
|
+
"run_base_dir": str(base_dir / "runs"),
|
|
133
|
+
"timeout_seconds": DEFAULT_TIMEOUT_SECONDS,
|
|
134
|
+
"env_allowlist": list(DEFAULT_ENV_ALLOWLIST),
|
|
135
|
+
"pipeline": [],
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
def default_config_text(base_dir: Path) -> str:
|
|
140
|
+
env_allowlist = ", ".join(f'"{item}"' for item in DEFAULT_ENV_ALLOWLIST)
|
|
141
|
+
# Use forward slashes for TOML compatibility on Windows (backslashes are escape chars)
|
|
142
|
+
run_base_dir = str(base_dir / "runs").replace("\\", "/")
|
|
143
|
+
return (
|
|
144
|
+
"# Kekkai config\n"
|
|
145
|
+
"# Values can be overridden via env (KEKKAI_*) or CLI flags.\n\n"
|
|
146
|
+
f'repo_path = "."\n'
|
|
147
|
+
f'run_base_dir = "{run_base_dir}"\n'
|
|
148
|
+
f"timeout_seconds = {DEFAULT_TIMEOUT_SECONDS}\n"
|
|
149
|
+
f"env_allowlist = [{env_allowlist}]\n\n"
|
|
150
|
+
"# [[pipeline]]\n"
|
|
151
|
+
'# name = "example"\n'
|
|
152
|
+
'# args = ["echo", "hello"]\n'
|
|
153
|
+
)
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
def load_config(
|
|
157
|
+
path: Path,
|
|
158
|
+
env: Mapping[str, str] | None = None,
|
|
159
|
+
overrides: ConfigOverrides | None = None,
|
|
160
|
+
base_dir: Path | None = None,
|
|
161
|
+
) -> Config:
|
|
162
|
+
env = env or os.environ
|
|
163
|
+
overrides = overrides or ConfigOverrides()
|
|
164
|
+
base_dir = base_dir or app_base_dir()
|
|
165
|
+
|
|
166
|
+
values: dict[str, object] = default_config(base_dir)
|
|
167
|
+
values.update(_load_from_file(path))
|
|
168
|
+
values.update(_load_from_env(env))
|
|
169
|
+
values.update(_load_from_overrides(overrides))
|
|
170
|
+
|
|
171
|
+
return _coerce_config(values)
|
|
172
|
+
|
|
173
|
+
|
|
174
|
+
def _load_from_file(path: Path) -> dict[str, object]:
|
|
175
|
+
if not path.exists():
|
|
176
|
+
return {}
|
|
177
|
+
data = tomllib.loads(path.read_text())
|
|
178
|
+
if isinstance(data, dict) and "kekkai" in data and isinstance(data["kekkai"], dict):
|
|
179
|
+
data = data["kekkai"]
|
|
180
|
+
if not isinstance(data, dict):
|
|
181
|
+
raise ValueError("config file must contain a table")
|
|
182
|
+
return dict(data)
|
|
183
|
+
|
|
184
|
+
|
|
185
|
+
def _load_from_env(env: Mapping[str, str]) -> dict[str, object]:
|
|
186
|
+
result: dict[str, object] = {}
|
|
187
|
+
if value := env.get("KEKKAI_REPO_PATH"):
|
|
188
|
+
result["repo_path"] = value
|
|
189
|
+
if value := env.get("KEKKAI_RUN_BASE_DIR"):
|
|
190
|
+
result["run_base_dir"] = value
|
|
191
|
+
if value := env.get("KEKKAI_TIMEOUT_SECONDS"):
|
|
192
|
+
result["timeout_seconds"] = value
|
|
193
|
+
if value := env.get("KEKKAI_ENV_ALLOWLIST"):
|
|
194
|
+
result["env_allowlist"] = value
|
|
195
|
+
return result
|
|
196
|
+
|
|
197
|
+
|
|
198
|
+
def _load_from_overrides(overrides: ConfigOverrides) -> dict[str, object]:
|
|
199
|
+
result: dict[str, object] = {}
|
|
200
|
+
if overrides.repo_path is not None:
|
|
201
|
+
result["repo_path"] = str(overrides.repo_path)
|
|
202
|
+
if overrides.run_base_dir is not None:
|
|
203
|
+
result["run_base_dir"] = str(overrides.run_base_dir)
|
|
204
|
+
if overrides.timeout_seconds is not None:
|
|
205
|
+
result["timeout_seconds"] = overrides.timeout_seconds
|
|
206
|
+
if overrides.env_allowlist is not None:
|
|
207
|
+
result["env_allowlist"] = overrides.env_allowlist
|
|
208
|
+
return result
|
|
209
|
+
|
|
210
|
+
|
|
211
|
+
def _coerce_config(values: Mapping[str, object]) -> Config:
|
|
212
|
+
repo_path = _expect_str(values.get("repo_path"), "repo_path")
|
|
213
|
+
run_base_dir = _expect_str(values.get("run_base_dir"), "run_base_dir")
|
|
214
|
+
timeout_seconds = _expect_int(values.get("timeout_seconds"), "timeout_seconds")
|
|
215
|
+
env_allowlist = _expect_str_list(values.get("env_allowlist"), "env_allowlist")
|
|
216
|
+
pipeline = _parse_pipeline(values.get("pipeline", []))
|
|
217
|
+
scanners = _parse_scanners(values.get("scanners"))
|
|
218
|
+
dojo = _parse_dojo(values.get("dojo"))
|
|
219
|
+
zap = _parse_zap(values.get("zap"))
|
|
220
|
+
falco = _parse_falco(values.get("falco"))
|
|
221
|
+
policy = _parse_policy(values.get("policy"))
|
|
222
|
+
threatflow = _parse_threatflow(values.get("threatflow"))
|
|
223
|
+
|
|
224
|
+
return Config(
|
|
225
|
+
repo_path=Path(repo_path),
|
|
226
|
+
run_base_dir=Path(run_base_dir).expanduser(),
|
|
227
|
+
timeout_seconds=timeout_seconds,
|
|
228
|
+
env_allowlist=env_allowlist,
|
|
229
|
+
pipeline=pipeline,
|
|
230
|
+
scanners=scanners,
|
|
231
|
+
dojo=dojo,
|
|
232
|
+
zap=zap,
|
|
233
|
+
falco=falco,
|
|
234
|
+
policy=policy,
|
|
235
|
+
threatflow=threatflow,
|
|
236
|
+
)
|
|
237
|
+
|
|
238
|
+
|
|
239
|
+
def _parse_scanners(value: object) -> list[str] | None:
|
|
240
|
+
if value is None:
|
|
241
|
+
return None
|
|
242
|
+
if isinstance(value, str):
|
|
243
|
+
return [s.strip() for s in value.split(",") if s.strip()]
|
|
244
|
+
if isinstance(value, list):
|
|
245
|
+
return [str(s) for s in value]
|
|
246
|
+
return None
|
|
247
|
+
|
|
248
|
+
|
|
249
|
+
def _parse_dojo(value: object) -> DojoSettings | None:
|
|
250
|
+
if value is None:
|
|
251
|
+
return None
|
|
252
|
+
if not isinstance(value, dict):
|
|
253
|
+
return None
|
|
254
|
+
return DojoSettings(
|
|
255
|
+
enabled=bool(value.get("enabled", False)),
|
|
256
|
+
base_url=str(value.get("base_url", "http://localhost:8080")),
|
|
257
|
+
api_key=str(value.get("api_key", "")),
|
|
258
|
+
product_name=str(value.get("product_name", "Kekkai Scans")),
|
|
259
|
+
engagement_name=str(value.get("engagement_name", "Default Engagement")),
|
|
260
|
+
)
|
|
261
|
+
|
|
262
|
+
|
|
263
|
+
def _parse_zap(value: object) -> ZapSettings | None:
|
|
264
|
+
"""Parse ZAP settings from config.
|
|
265
|
+
|
|
266
|
+
ZAP is disabled by default and requires explicit target URL.
|
|
267
|
+
"""
|
|
268
|
+
if value is None:
|
|
269
|
+
return None
|
|
270
|
+
if not isinstance(value, dict):
|
|
271
|
+
return None
|
|
272
|
+
|
|
273
|
+
allowed_domains = value.get("allowed_domains", [])
|
|
274
|
+
if isinstance(allowed_domains, str):
|
|
275
|
+
allowed_domains = [d.strip() for d in allowed_domains.split(",") if d.strip()]
|
|
276
|
+
elif not isinstance(allowed_domains, list):
|
|
277
|
+
allowed_domains = []
|
|
278
|
+
|
|
279
|
+
return ZapSettings(
|
|
280
|
+
enabled=bool(value.get("enabled", False)),
|
|
281
|
+
target_url=value.get("target_url") if value.get("target_url") else None,
|
|
282
|
+
allow_private_ips=bool(value.get("allow_private_ips", False)),
|
|
283
|
+
allowed_domains=list(allowed_domains),
|
|
284
|
+
timeout_seconds=int(value.get("timeout_seconds", 900)),
|
|
285
|
+
)
|
|
286
|
+
|
|
287
|
+
|
|
288
|
+
def _parse_falco(value: object) -> FalcoSettings | None:
|
|
289
|
+
"""Parse Falco settings from config.
|
|
290
|
+
|
|
291
|
+
Falco is disabled by default (Linux-only, experimental).
|
|
292
|
+
"""
|
|
293
|
+
if value is None:
|
|
294
|
+
return None
|
|
295
|
+
if not isinstance(value, dict):
|
|
296
|
+
return None
|
|
297
|
+
return FalcoSettings(
|
|
298
|
+
enabled=bool(value.get("enabled", False)),
|
|
299
|
+
rules_file=value.get("rules_file") if value.get("rules_file") else None,
|
|
300
|
+
timeout_seconds=int(value.get("timeout_seconds", 300)),
|
|
301
|
+
)
|
|
302
|
+
|
|
303
|
+
|
|
304
|
+
def _expect_str(value: object, name: str) -> str:
|
|
305
|
+
if isinstance(value, str):
|
|
306
|
+
return value
|
|
307
|
+
raise ValueError(f"{name} must be a string")
|
|
308
|
+
|
|
309
|
+
|
|
310
|
+
def _expect_int(value: object, name: str) -> int:
|
|
311
|
+
if isinstance(value, int):
|
|
312
|
+
return value
|
|
313
|
+
if isinstance(value, str) and value.isdigit():
|
|
314
|
+
return int(value)
|
|
315
|
+
raise ValueError(f"{name} must be an integer")
|
|
316
|
+
|
|
317
|
+
|
|
318
|
+
def _expect_str_list(value: object, name: str) -> list[str]:
|
|
319
|
+
if isinstance(value, str):
|
|
320
|
+
return [item.strip() for item in value.split(",") if item.strip()]
|
|
321
|
+
if isinstance(value, Sequence) and not isinstance(value, str | bytes):
|
|
322
|
+
items: list[str] = []
|
|
323
|
+
for item in value:
|
|
324
|
+
if not isinstance(item, str):
|
|
325
|
+
raise ValueError(f"{name} must be a list of strings")
|
|
326
|
+
items.append(item)
|
|
327
|
+
return items
|
|
328
|
+
raise ValueError(f"{name} must be a list of strings")
|
|
329
|
+
|
|
330
|
+
|
|
331
|
+
def _parse_pipeline(value: object) -> list[PipelineStep]:
|
|
332
|
+
if value is None:
|
|
333
|
+
return []
|
|
334
|
+
if not isinstance(value, list):
|
|
335
|
+
raise ValueError("pipeline must be a list")
|
|
336
|
+
steps: list[PipelineStep] = []
|
|
337
|
+
for item in value:
|
|
338
|
+
if not isinstance(item, dict):
|
|
339
|
+
raise ValueError("pipeline entries must be tables")
|
|
340
|
+
name = _expect_str(item.get("name"), "pipeline.name")
|
|
341
|
+
args = item.get("args")
|
|
342
|
+
if not isinstance(args, list) or not all(isinstance(arg, str) for arg in args):
|
|
343
|
+
raise ValueError("pipeline.args must be a list of strings")
|
|
344
|
+
steps.append(PipelineStep(name=name, args=list(args)))
|
|
345
|
+
return steps
|
|
346
|
+
|
|
347
|
+
|
|
348
|
+
def _parse_policy(value: object) -> PolicySettings | None:
|
|
349
|
+
"""Parse policy settings from config.
|
|
350
|
+
|
|
351
|
+
Policy settings control CI mode behavior and thresholds.
|
|
352
|
+
"""
|
|
353
|
+
if value is None:
|
|
354
|
+
return None
|
|
355
|
+
if not isinstance(value, dict):
|
|
356
|
+
return None
|
|
357
|
+
|
|
358
|
+
def _get_int(key: str, default: int) -> int:
|
|
359
|
+
v = value.get(key, default)
|
|
360
|
+
if isinstance(v, int):
|
|
361
|
+
return v
|
|
362
|
+
if isinstance(v, str) and (v.lstrip("-").isdigit()):
|
|
363
|
+
return int(v)
|
|
364
|
+
return default
|
|
365
|
+
|
|
366
|
+
return PolicySettings(
|
|
367
|
+
fail_on_critical=bool(value.get("fail_on_critical", True)),
|
|
368
|
+
fail_on_high=bool(value.get("fail_on_high", True)),
|
|
369
|
+
fail_on_medium=bool(value.get("fail_on_medium", False)),
|
|
370
|
+
fail_on_low=bool(value.get("fail_on_low", False)),
|
|
371
|
+
fail_on_info=bool(value.get("fail_on_info", False)),
|
|
372
|
+
max_critical=_get_int("max_critical", 0),
|
|
373
|
+
max_high=_get_int("max_high", 0),
|
|
374
|
+
max_medium=_get_int("max_medium", -1),
|
|
375
|
+
max_low=_get_int("max_low", -1),
|
|
376
|
+
max_info=_get_int("max_info", -1),
|
|
377
|
+
max_total=_get_int("max_total", -1),
|
|
378
|
+
)
|
|
379
|
+
|
|
380
|
+
|
|
381
|
+
def _parse_threatflow(value: object) -> ThreatFlowSettings | None:
|
|
382
|
+
"""Parse ThreatFlow settings from config.
|
|
383
|
+
|
|
384
|
+
ThreatFlow is disabled by default and uses local model by default when enabled.
|
|
385
|
+
"""
|
|
386
|
+
if value is None:
|
|
387
|
+
return None
|
|
388
|
+
if not isinstance(value, dict):
|
|
389
|
+
return None
|
|
390
|
+
|
|
391
|
+
return ThreatFlowSettings(
|
|
392
|
+
enabled=bool(value.get("enabled", False)),
|
|
393
|
+
model_mode=str(value.get("model_mode", "local")),
|
|
394
|
+
model_path=value.get("model_path") if value.get("model_path") else None,
|
|
395
|
+
api_key=value.get("api_key") if value.get("api_key") else None,
|
|
396
|
+
api_base=value.get("api_base") if value.get("api_base") else None,
|
|
397
|
+
model_name=value.get("model_name") if value.get("model_name") else None,
|
|
398
|
+
max_files=int(value.get("max_files", 500)),
|
|
399
|
+
timeout_seconds=int(value.get("timeout_seconds", 300)),
|
|
400
|
+
redact_secrets=bool(value.get("redact_secrets", True)),
|
|
401
|
+
sanitize_content=bool(value.get("sanitize_content", True)),
|
|
402
|
+
warn_on_injection=bool(value.get("warn_on_injection", True)),
|
|
403
|
+
)
|