kekkai-cli 1.1.0__py3-none-any.whl → 1.1.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kekkai/cli.py +124 -33
- kekkai/dojo_import.py +9 -1
- kekkai/output.py +1 -1
- kekkai/report/unified.py +226 -0
- kekkai/triage/__init__.py +54 -1
- kekkai/triage/loader.py +196 -0
- {kekkai_cli-1.1.0.dist-info → kekkai_cli-1.1.1.dist-info}/METADATA +33 -13
- {kekkai_cli-1.1.0.dist-info → kekkai_cli-1.1.1.dist-info}/RECORD +11 -27
- {kekkai_cli-1.1.0.dist-info → kekkai_cli-1.1.1.dist-info}/entry_points.txt +0 -1
- {kekkai_cli-1.1.0.dist-info → kekkai_cli-1.1.1.dist-info}/top_level.txt +0 -1
- portal/__init__.py +0 -19
- portal/api.py +0 -155
- portal/auth.py +0 -103
- portal/enterprise/__init__.py +0 -45
- portal/enterprise/audit.py +0 -435
- portal/enterprise/licensing.py +0 -408
- portal/enterprise/rbac.py +0 -276
- portal/enterprise/saml.py +0 -595
- portal/ops/__init__.py +0 -53
- portal/ops/backup.py +0 -553
- portal/ops/log_shipper.py +0 -469
- portal/ops/monitoring.py +0 -517
- portal/ops/restore.py +0 -469
- portal/ops/secrets.py +0 -408
- portal/ops/upgrade.py +0 -591
- portal/tenants.py +0 -340
- portal/uploads.py +0 -259
- portal/web.py +0 -393
- {kekkai_cli-1.1.0.dist-info → kekkai_cli-1.1.1.dist-info}/WHEEL +0 -0
portal/enterprise/__init__.py
DELETED
|
@@ -1,45 +0,0 @@
|
|
|
1
|
-
"""Enterprise features for Kekkai Portal.
|
|
2
|
-
|
|
3
|
-
Provides:
|
|
4
|
-
- RBAC (Role-Based Access Control)
|
|
5
|
-
- SAML 2.0 SSO integration
|
|
6
|
-
- Audit logging
|
|
7
|
-
- Enterprise license gating (ECDSA asymmetric signing)
|
|
8
|
-
"""
|
|
9
|
-
|
|
10
|
-
from __future__ import annotations
|
|
11
|
-
|
|
12
|
-
from .audit import AuditEvent, AuditEventType, AuditLog
|
|
13
|
-
from .licensing import (
|
|
14
|
-
EnterpriseLicense,
|
|
15
|
-
LicenseCheckResult,
|
|
16
|
-
LicenseSigner,
|
|
17
|
-
LicenseStatus,
|
|
18
|
-
LicenseValidator,
|
|
19
|
-
generate_keypair,
|
|
20
|
-
)
|
|
21
|
-
from .rbac import AuthorizationResult, Permission, RBACManager, Role
|
|
22
|
-
from .saml import SAMLAssertion, SAMLConfig, SAMLError, SAMLProcessor
|
|
23
|
-
|
|
24
|
-
ENTERPRISE_AVAILABLE = True
|
|
25
|
-
|
|
26
|
-
__all__ = [
|
|
27
|
-
"ENTERPRISE_AVAILABLE",
|
|
28
|
-
"AuditEvent",
|
|
29
|
-
"AuditEventType",
|
|
30
|
-
"AuditLog",
|
|
31
|
-
"AuthorizationResult",
|
|
32
|
-
"EnterpriseLicense",
|
|
33
|
-
"LicenseCheckResult",
|
|
34
|
-
"LicenseSigner",
|
|
35
|
-
"LicenseStatus",
|
|
36
|
-
"LicenseValidator",
|
|
37
|
-
"Permission",
|
|
38
|
-
"RBACManager",
|
|
39
|
-
"Role",
|
|
40
|
-
"SAMLAssertion",
|
|
41
|
-
"SAMLConfig",
|
|
42
|
-
"SAMLError",
|
|
43
|
-
"SAMLProcessor",
|
|
44
|
-
"generate_keypair",
|
|
45
|
-
]
|
portal/enterprise/audit.py
DELETED
|
@@ -1,435 +0,0 @@
|
|
|
1
|
-
"""Audit logging for enterprise portal.
|
|
2
|
-
|
|
3
|
-
Security controls:
|
|
4
|
-
- ASVS V16.3.1: Log auth events
|
|
5
|
-
- Log integrity protection (append-only, hash chain)
|
|
6
|
-
- Structured JSON format
|
|
7
|
-
- Redaction of sensitive fields
|
|
8
|
-
"""
|
|
9
|
-
|
|
10
|
-
from __future__ import annotations
|
|
11
|
-
|
|
12
|
-
import hashlib
|
|
13
|
-
import json
|
|
14
|
-
import logging
|
|
15
|
-
import os
|
|
16
|
-
import threading
|
|
17
|
-
import time
|
|
18
|
-
from dataclasses import asdict, dataclass, field
|
|
19
|
-
from datetime import UTC, datetime
|
|
20
|
-
from enum import Enum
|
|
21
|
-
from pathlib import Path
|
|
22
|
-
from typing import TYPE_CHECKING, Any
|
|
23
|
-
|
|
24
|
-
from kekkai_core import redact
|
|
25
|
-
|
|
26
|
-
if TYPE_CHECKING:
|
|
27
|
-
pass
|
|
28
|
-
|
|
29
|
-
logger = logging.getLogger(__name__)
|
|
30
|
-
|
|
31
|
-
SENSITIVE_FIELDS = frozenset(
|
|
32
|
-
{
|
|
33
|
-
"password",
|
|
34
|
-
"api_key",
|
|
35
|
-
"token",
|
|
36
|
-
"secret",
|
|
37
|
-
"authorization",
|
|
38
|
-
"cookie",
|
|
39
|
-
"session_id",
|
|
40
|
-
"credentials",
|
|
41
|
-
}
|
|
42
|
-
)
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
class AuditEventType(Enum):
|
|
46
|
-
"""Types of auditable events."""
|
|
47
|
-
|
|
48
|
-
# Authentication events
|
|
49
|
-
AUTH_LOGIN_SUCCESS = "auth.login.success"
|
|
50
|
-
AUTH_LOGIN_FAILURE = "auth.login.failure"
|
|
51
|
-
AUTH_LOGOUT = "auth.logout"
|
|
52
|
-
AUTH_SESSION_EXPIRED = "auth.session.expired"
|
|
53
|
-
AUTH_SAML_ASSERTION = "auth.saml.assertion"
|
|
54
|
-
AUTH_SAML_REPLAY_BLOCKED = "auth.saml.replay_blocked"
|
|
55
|
-
|
|
56
|
-
# Authorization events
|
|
57
|
-
AUTHZ_DENIED = "authz.denied"
|
|
58
|
-
AUTHZ_CROSS_TENANT = "authz.cross_tenant"
|
|
59
|
-
|
|
60
|
-
# Admin actions
|
|
61
|
-
ADMIN_USER_CREATED = "admin.user.created"
|
|
62
|
-
ADMIN_USER_UPDATED = "admin.user.updated"
|
|
63
|
-
ADMIN_USER_DELETED = "admin.user.deleted"
|
|
64
|
-
ADMIN_ROLE_CHANGED = "admin.role.changed"
|
|
65
|
-
ADMIN_TENANT_CREATED = "admin.tenant.created"
|
|
66
|
-
ADMIN_TENANT_UPDATED = "admin.tenant.updated"
|
|
67
|
-
ADMIN_TENANT_DELETED = "admin.tenant.deleted"
|
|
68
|
-
ADMIN_API_KEY_ROTATED = "admin.api_key.rotated"
|
|
69
|
-
ADMIN_SAML_CONFIG_UPDATED = "admin.saml_config.updated"
|
|
70
|
-
|
|
71
|
-
# Data access events
|
|
72
|
-
DATA_UPLOAD = "data.upload"
|
|
73
|
-
DATA_EXPORT = "data.export"
|
|
74
|
-
DATA_DELETE = "data.delete"
|
|
75
|
-
|
|
76
|
-
# System events
|
|
77
|
-
SYSTEM_LICENSE_CHECK = "system.license.check"
|
|
78
|
-
SYSTEM_LICENSE_EXPIRED = "system.license.expired"
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
@dataclass
|
|
82
|
-
class AuditEvent:
|
|
83
|
-
"""Represents an auditable event."""
|
|
84
|
-
|
|
85
|
-
event_type: AuditEventType
|
|
86
|
-
timestamp: datetime = field(default_factory=lambda: datetime.now(UTC))
|
|
87
|
-
actor_id: str | None = None
|
|
88
|
-
actor_email: str | None = None
|
|
89
|
-
tenant_id: str | None = None
|
|
90
|
-
resource_type: str | None = None
|
|
91
|
-
resource_id: str | None = None
|
|
92
|
-
action: str | None = None
|
|
93
|
-
outcome: str = "success"
|
|
94
|
-
client_ip: str | None = None
|
|
95
|
-
user_agent: str | None = None
|
|
96
|
-
details: dict[str, Any] = field(default_factory=dict)
|
|
97
|
-
event_id: str = field(
|
|
98
|
-
default_factory=lambda: f"{int(time.time() * 1000)}-{os.urandom(4).hex()}"
|
|
99
|
-
)
|
|
100
|
-
|
|
101
|
-
def to_dict(self) -> dict[str, Any]:
|
|
102
|
-
"""Convert to dictionary for serialization."""
|
|
103
|
-
data = asdict(self)
|
|
104
|
-
data["event_type"] = self.event_type.value
|
|
105
|
-
data["timestamp"] = self.timestamp.isoformat()
|
|
106
|
-
data["details"] = _redact_sensitive(self.details)
|
|
107
|
-
if self.client_ip:
|
|
108
|
-
data["client_ip"] = redact(self.client_ip)
|
|
109
|
-
return data
|
|
110
|
-
|
|
111
|
-
def to_json(self) -> str:
|
|
112
|
-
"""Serialize to JSON string."""
|
|
113
|
-
return json.dumps(self.to_dict(), separators=(",", ":"))
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
class AuditLog:
|
|
117
|
-
"""Append-only audit log with integrity protection."""
|
|
118
|
-
|
|
119
|
-
def __init__(
|
|
120
|
-
self,
|
|
121
|
-
log_path: Path | None = None,
|
|
122
|
-
enable_hash_chain: bool = True,
|
|
123
|
-
) -> None:
|
|
124
|
-
self._log_path = log_path
|
|
125
|
-
self._enable_hash_chain = enable_hash_chain
|
|
126
|
-
self._last_hash: str | None = None
|
|
127
|
-
self._lock = threading.Lock()
|
|
128
|
-
self._load_last_hash()
|
|
129
|
-
|
|
130
|
-
def _load_last_hash(self) -> None:
|
|
131
|
-
"""Load the last hash from existing log for chain continuity."""
|
|
132
|
-
if not self._log_path or not self._log_path.exists():
|
|
133
|
-
self._last_hash = "0" * 64
|
|
134
|
-
return
|
|
135
|
-
|
|
136
|
-
try:
|
|
137
|
-
with open(self._log_path, "rb") as f:
|
|
138
|
-
f.seek(0, 2)
|
|
139
|
-
size = f.tell()
|
|
140
|
-
if size == 0:
|
|
141
|
-
self._last_hash = "0" * 64
|
|
142
|
-
return
|
|
143
|
-
|
|
144
|
-
chunk_size = min(4096, size)
|
|
145
|
-
f.seek(-chunk_size, 2)
|
|
146
|
-
last_chunk = f.read()
|
|
147
|
-
lines = last_chunk.split(b"\n")
|
|
148
|
-
for line in reversed(lines):
|
|
149
|
-
if line.strip():
|
|
150
|
-
try:
|
|
151
|
-
entry = json.loads(line)
|
|
152
|
-
self._last_hash = entry.get("_hash", "0" * 64)
|
|
153
|
-
return
|
|
154
|
-
except json.JSONDecodeError:
|
|
155
|
-
continue
|
|
156
|
-
except OSError as e:
|
|
157
|
-
logger.warning("Failed to load last hash: %s", e)
|
|
158
|
-
|
|
159
|
-
self._last_hash = "0" * 64
|
|
160
|
-
|
|
161
|
-
def _compute_hash(self, event_json: str) -> str:
|
|
162
|
-
"""Compute hash for integrity chain."""
|
|
163
|
-
data = f"{self._last_hash}:{event_json}"
|
|
164
|
-
return hashlib.sha256(data.encode()).hexdigest()
|
|
165
|
-
|
|
166
|
-
def log(self, event: AuditEvent) -> str:
|
|
167
|
-
"""Log an audit event.
|
|
168
|
-
|
|
169
|
-
Returns:
|
|
170
|
-
The event ID
|
|
171
|
-
"""
|
|
172
|
-
with self._lock:
|
|
173
|
-
event_data = event.to_dict()
|
|
174
|
-
event_json = json.dumps(event_data, separators=(",", ":"))
|
|
175
|
-
|
|
176
|
-
if self._enable_hash_chain:
|
|
177
|
-
event_hash = self._compute_hash(event_json)
|
|
178
|
-
event_data["_hash"] = event_hash
|
|
179
|
-
event_data["_prev_hash"] = self._last_hash
|
|
180
|
-
self._last_hash = event_hash
|
|
181
|
-
event_json = json.dumps(event_data, separators=(",", ":"))
|
|
182
|
-
|
|
183
|
-
if self._log_path:
|
|
184
|
-
self._write_to_file(event_json)
|
|
185
|
-
|
|
186
|
-
logger.info("audit.event %s", event_json)
|
|
187
|
-
return event.event_id
|
|
188
|
-
|
|
189
|
-
def _write_to_file(self, event_json: str) -> None:
|
|
190
|
-
"""Write event to log file (append-only)."""
|
|
191
|
-
if not self._log_path:
|
|
192
|
-
return
|
|
193
|
-
try:
|
|
194
|
-
self._log_path.parent.mkdir(parents=True, exist_ok=True)
|
|
195
|
-
with open(self._log_path, "a", encoding="utf-8") as f:
|
|
196
|
-
f.write(event_json + "\n")
|
|
197
|
-
f.flush()
|
|
198
|
-
os.fsync(f.fileno())
|
|
199
|
-
except OSError as e:
|
|
200
|
-
logger.error("Failed to write audit log: %s", e)
|
|
201
|
-
|
|
202
|
-
def log_auth_success(
|
|
203
|
-
self,
|
|
204
|
-
user_id: str,
|
|
205
|
-
tenant_id: str,
|
|
206
|
-
client_ip: str | None = None,
|
|
207
|
-
auth_method: str = "api_key",
|
|
208
|
-
**details: Any,
|
|
209
|
-
) -> str:
|
|
210
|
-
"""Log successful authentication."""
|
|
211
|
-
return self.log(
|
|
212
|
-
AuditEvent(
|
|
213
|
-
event_type=AuditEventType.AUTH_LOGIN_SUCCESS,
|
|
214
|
-
actor_id=user_id,
|
|
215
|
-
tenant_id=tenant_id,
|
|
216
|
-
client_ip=client_ip,
|
|
217
|
-
action="login",
|
|
218
|
-
outcome="success",
|
|
219
|
-
details={"auth_method": auth_method, **details},
|
|
220
|
-
)
|
|
221
|
-
)
|
|
222
|
-
|
|
223
|
-
def log_auth_failure(
|
|
224
|
-
self,
|
|
225
|
-
reason: str,
|
|
226
|
-
client_ip: str | None = None,
|
|
227
|
-
attempted_user: str | None = None,
|
|
228
|
-
**details: Any,
|
|
229
|
-
) -> str:
|
|
230
|
-
"""Log failed authentication attempt."""
|
|
231
|
-
return self.log(
|
|
232
|
-
AuditEvent(
|
|
233
|
-
event_type=AuditEventType.AUTH_LOGIN_FAILURE,
|
|
234
|
-
actor_id=attempted_user,
|
|
235
|
-
client_ip=client_ip,
|
|
236
|
-
action="login",
|
|
237
|
-
outcome="failure",
|
|
238
|
-
details={"reason": reason, **details},
|
|
239
|
-
)
|
|
240
|
-
)
|
|
241
|
-
|
|
242
|
-
def log_authz_denied(
|
|
243
|
-
self,
|
|
244
|
-
user_id: str,
|
|
245
|
-
tenant_id: str,
|
|
246
|
-
permission: str,
|
|
247
|
-
resource_type: str | None = None,
|
|
248
|
-
resource_id: str | None = None,
|
|
249
|
-
client_ip: str | None = None,
|
|
250
|
-
**details: Any,
|
|
251
|
-
) -> str:
|
|
252
|
-
"""Log authorization denial (ASVS V16.3.2)."""
|
|
253
|
-
return self.log(
|
|
254
|
-
AuditEvent(
|
|
255
|
-
event_type=AuditEventType.AUTHZ_DENIED,
|
|
256
|
-
actor_id=user_id,
|
|
257
|
-
tenant_id=tenant_id,
|
|
258
|
-
resource_type=resource_type,
|
|
259
|
-
resource_id=resource_id,
|
|
260
|
-
client_ip=client_ip,
|
|
261
|
-
action=permission,
|
|
262
|
-
outcome="denied",
|
|
263
|
-
details=details,
|
|
264
|
-
)
|
|
265
|
-
)
|
|
266
|
-
|
|
267
|
-
def log_admin_action(
|
|
268
|
-
self,
|
|
269
|
-
event_type: AuditEventType,
|
|
270
|
-
admin_id: str,
|
|
271
|
-
tenant_id: str,
|
|
272
|
-
resource_type: str,
|
|
273
|
-
resource_id: str,
|
|
274
|
-
action: str,
|
|
275
|
-
client_ip: str | None = None,
|
|
276
|
-
**details: Any,
|
|
277
|
-
) -> str:
|
|
278
|
-
"""Log an administrative action."""
|
|
279
|
-
return self.log(
|
|
280
|
-
AuditEvent(
|
|
281
|
-
event_type=event_type,
|
|
282
|
-
actor_id=admin_id,
|
|
283
|
-
tenant_id=tenant_id,
|
|
284
|
-
resource_type=resource_type,
|
|
285
|
-
resource_id=resource_id,
|
|
286
|
-
action=action,
|
|
287
|
-
client_ip=client_ip,
|
|
288
|
-
details=details,
|
|
289
|
-
)
|
|
290
|
-
)
|
|
291
|
-
|
|
292
|
-
def log_saml_replay_blocked(
|
|
293
|
-
self,
|
|
294
|
-
assertion_id: str,
|
|
295
|
-
client_ip: str | None = None,
|
|
296
|
-
**details: Any,
|
|
297
|
-
) -> str:
|
|
298
|
-
"""Log blocked SAML replay attempt."""
|
|
299
|
-
return self.log(
|
|
300
|
-
AuditEvent(
|
|
301
|
-
event_type=AuditEventType.AUTH_SAML_REPLAY_BLOCKED,
|
|
302
|
-
client_ip=client_ip,
|
|
303
|
-
action="saml_replay",
|
|
304
|
-
outcome="blocked",
|
|
305
|
-
details={"assertion_id": assertion_id, **details},
|
|
306
|
-
)
|
|
307
|
-
)
|
|
308
|
-
|
|
309
|
-
def verify_integrity(self, start_line: int = 0) -> tuple[bool, int, str | None]:
|
|
310
|
-
"""Verify the integrity of the audit log.
|
|
311
|
-
|
|
312
|
-
Returns:
|
|
313
|
-
Tuple of (is_valid, lines_checked, error_message)
|
|
314
|
-
"""
|
|
315
|
-
if not self._log_path or not self._log_path.exists():
|
|
316
|
-
return True, 0, None
|
|
317
|
-
|
|
318
|
-
try:
|
|
319
|
-
with open(self._log_path, encoding="utf-8") as f:
|
|
320
|
-
lines = f.readlines()
|
|
321
|
-
|
|
322
|
-
if not lines:
|
|
323
|
-
return True, 0, None
|
|
324
|
-
|
|
325
|
-
prev_hash = "0" * 64
|
|
326
|
-
for i, line in enumerate(lines[start_line:], start=start_line):
|
|
327
|
-
if not line.strip():
|
|
328
|
-
continue
|
|
329
|
-
|
|
330
|
-
try:
|
|
331
|
-
entry = json.loads(line)
|
|
332
|
-
except json.JSONDecodeError as e:
|
|
333
|
-
return False, i, f"Invalid JSON at line {i}: {e}"
|
|
334
|
-
|
|
335
|
-
if "_hash" not in entry:
|
|
336
|
-
continue
|
|
337
|
-
|
|
338
|
-
stored_prev = entry.get("_prev_hash", "0" * 64)
|
|
339
|
-
if stored_prev != prev_hash:
|
|
340
|
-
return False, i, f"Hash chain broken at line {i}"
|
|
341
|
-
|
|
342
|
-
entry_copy = {k: v for k, v in entry.items() if not k.startswith("_")}
|
|
343
|
-
event_json = json.dumps(entry_copy, separators=(",", ":"))
|
|
344
|
-
expected_hash = hashlib.sha256(f"{prev_hash}:{event_json}".encode()).hexdigest()
|
|
345
|
-
|
|
346
|
-
if entry["_hash"] != expected_hash:
|
|
347
|
-
return False, i, f"Hash mismatch at line {i}"
|
|
348
|
-
|
|
349
|
-
prev_hash = entry["_hash"]
|
|
350
|
-
|
|
351
|
-
return True, len(lines), None
|
|
352
|
-
|
|
353
|
-
except OSError as e:
|
|
354
|
-
return False, 0, f"Failed to read log: {e}"
|
|
355
|
-
|
|
356
|
-
def read_events(
|
|
357
|
-
self,
|
|
358
|
-
start_time: datetime | None = None,
|
|
359
|
-
end_time: datetime | None = None,
|
|
360
|
-
event_types: list[AuditEventType] | None = None,
|
|
361
|
-
tenant_id: str | None = None,
|
|
362
|
-
limit: int = 1000,
|
|
363
|
-
) -> list[dict[str, Any]]:
|
|
364
|
-
"""Read audit events with optional filtering."""
|
|
365
|
-
if not self._log_path or not self._log_path.exists():
|
|
366
|
-
return []
|
|
367
|
-
|
|
368
|
-
events = []
|
|
369
|
-
type_values = {et.value for et in event_types} if event_types else None
|
|
370
|
-
|
|
371
|
-
try:
|
|
372
|
-
with open(self._log_path, encoding="utf-8") as f:
|
|
373
|
-
for line in f:
|
|
374
|
-
if not line.strip():
|
|
375
|
-
continue
|
|
376
|
-
|
|
377
|
-
try:
|
|
378
|
-
entry = json.loads(line)
|
|
379
|
-
except json.JSONDecodeError:
|
|
380
|
-
continue
|
|
381
|
-
|
|
382
|
-
if type_values and entry.get("event_type") not in type_values:
|
|
383
|
-
continue
|
|
384
|
-
|
|
385
|
-
if tenant_id and entry.get("tenant_id") != tenant_id:
|
|
386
|
-
continue
|
|
387
|
-
|
|
388
|
-
if start_time or end_time:
|
|
389
|
-
ts_str = entry.get("timestamp")
|
|
390
|
-
if ts_str:
|
|
391
|
-
ts = datetime.fromisoformat(ts_str)
|
|
392
|
-
if start_time and ts < start_time:
|
|
393
|
-
continue
|
|
394
|
-
if end_time and ts > end_time:
|
|
395
|
-
continue
|
|
396
|
-
|
|
397
|
-
events.append(entry)
|
|
398
|
-
if len(events) >= limit:
|
|
399
|
-
break
|
|
400
|
-
|
|
401
|
-
except OSError as e:
|
|
402
|
-
logger.error("Failed to read audit log: %s", e)
|
|
403
|
-
|
|
404
|
-
return events
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
def _redact_sensitive(data: dict[str, Any]) -> dict[str, Any]:
|
|
408
|
-
"""Recursively redact sensitive fields in a dictionary."""
|
|
409
|
-
result: dict[str, Any] = {}
|
|
410
|
-
for key, value in data.items():
|
|
411
|
-
lower_key = key.lower()
|
|
412
|
-
if any(s in lower_key for s in SENSITIVE_FIELDS):
|
|
413
|
-
result[key] = "[REDACTED]"
|
|
414
|
-
elif isinstance(value, dict):
|
|
415
|
-
result[key] = _redact_sensitive(value)
|
|
416
|
-
elif isinstance(value, list):
|
|
417
|
-
redacted_list: list[Any] = [
|
|
418
|
-
_redact_sensitive(v) if isinstance(v, dict) else v for v in value
|
|
419
|
-
]
|
|
420
|
-
result[key] = redacted_list
|
|
421
|
-
else:
|
|
422
|
-
result[key] = value
|
|
423
|
-
return result
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
def create_audit_log(log_dir: Path | None = None) -> AuditLog:
|
|
427
|
-
"""Create an audit log instance."""
|
|
428
|
-
log_path: Path | None
|
|
429
|
-
if log_dir:
|
|
430
|
-
log_path = log_dir / "audit.jsonl"
|
|
431
|
-
else:
|
|
432
|
-
default_dir = os.environ.get("PORTAL_AUDIT_DIR")
|
|
433
|
-
log_path = Path(default_dir) / "audit.jsonl" if default_dir else None
|
|
434
|
-
|
|
435
|
-
return AuditLog(log_path=log_path)
|