tweek 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tweek/__init__.py +16 -0
- tweek/cli.py +3390 -0
- tweek/cli_helpers.py +193 -0
- tweek/config/__init__.py +13 -0
- tweek/config/allowed_dirs.yaml +23 -0
- tweek/config/manager.py +1064 -0
- tweek/config/patterns.yaml +751 -0
- tweek/config/tiers.yaml +129 -0
- tweek/diagnostics.py +589 -0
- tweek/hooks/__init__.py +1 -0
- tweek/hooks/pre_tool_use.py +861 -0
- tweek/integrations/__init__.py +3 -0
- tweek/integrations/moltbot.py +243 -0
- tweek/licensing.py +398 -0
- tweek/logging/__init__.py +9 -0
- tweek/logging/bundle.py +350 -0
- tweek/logging/json_logger.py +150 -0
- tweek/logging/security_log.py +745 -0
- tweek/mcp/__init__.py +24 -0
- tweek/mcp/approval.py +456 -0
- tweek/mcp/approval_cli.py +356 -0
- tweek/mcp/clients/__init__.py +37 -0
- tweek/mcp/clients/chatgpt.py +112 -0
- tweek/mcp/clients/claude_desktop.py +203 -0
- tweek/mcp/clients/gemini.py +178 -0
- tweek/mcp/proxy.py +667 -0
- tweek/mcp/screening.py +175 -0
- tweek/mcp/server.py +317 -0
- tweek/platform/__init__.py +131 -0
- tweek/plugins/__init__.py +835 -0
- tweek/plugins/base.py +1080 -0
- tweek/plugins/compliance/__init__.py +30 -0
- tweek/plugins/compliance/gdpr.py +333 -0
- tweek/plugins/compliance/gov.py +324 -0
- tweek/plugins/compliance/hipaa.py +285 -0
- tweek/plugins/compliance/legal.py +322 -0
- tweek/plugins/compliance/pci.py +361 -0
- tweek/plugins/compliance/soc2.py +275 -0
- tweek/plugins/detectors/__init__.py +30 -0
- tweek/plugins/detectors/continue_dev.py +206 -0
- tweek/plugins/detectors/copilot.py +254 -0
- tweek/plugins/detectors/cursor.py +192 -0
- tweek/plugins/detectors/moltbot.py +205 -0
- tweek/plugins/detectors/windsurf.py +214 -0
- tweek/plugins/git_discovery.py +395 -0
- tweek/plugins/git_installer.py +491 -0
- tweek/plugins/git_lockfile.py +338 -0
- tweek/plugins/git_registry.py +503 -0
- tweek/plugins/git_security.py +482 -0
- tweek/plugins/providers/__init__.py +30 -0
- tweek/plugins/providers/anthropic.py +181 -0
- tweek/plugins/providers/azure_openai.py +289 -0
- tweek/plugins/providers/bedrock.py +248 -0
- tweek/plugins/providers/google.py +197 -0
- tweek/plugins/providers/openai.py +230 -0
- tweek/plugins/scope.py +130 -0
- tweek/plugins/screening/__init__.py +26 -0
- tweek/plugins/screening/llm_reviewer.py +149 -0
- tweek/plugins/screening/pattern_matcher.py +273 -0
- tweek/plugins/screening/rate_limiter.py +174 -0
- tweek/plugins/screening/session_analyzer.py +159 -0
- tweek/proxy/__init__.py +302 -0
- tweek/proxy/addon.py +223 -0
- tweek/proxy/interceptor.py +313 -0
- tweek/proxy/server.py +315 -0
- tweek/sandbox/__init__.py +71 -0
- tweek/sandbox/executor.py +382 -0
- tweek/sandbox/linux.py +278 -0
- tweek/sandbox/profile_generator.py +323 -0
- tweek/screening/__init__.py +13 -0
- tweek/screening/context.py +81 -0
- tweek/security/__init__.py +22 -0
- tweek/security/llm_reviewer.py +348 -0
- tweek/security/rate_limiter.py +682 -0
- tweek/security/secret_scanner.py +506 -0
- tweek/security/session_analyzer.py +600 -0
- tweek/vault/__init__.py +40 -0
- tweek/vault/cross_platform.py +251 -0
- tweek/vault/keychain.py +288 -0
- tweek-0.1.0.dist-info/METADATA +335 -0
- tweek-0.1.0.dist-info/RECORD +85 -0
- tweek-0.1.0.dist-info/WHEEL +5 -0
- tweek-0.1.0.dist-info/entry_points.txt +25 -0
- tweek-0.1.0.dist-info/licenses/LICENSE +190 -0
- tweek-0.1.0.dist-info/top_level.txt +1 -0
tweek/logging/bundle.py
ADDED
|
@@ -0,0 +1,350 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Tweek Diagnostic Bundle Collector
|
|
4
|
+
|
|
5
|
+
Collects logs, configs, and system info into a zip file for support.
|
|
6
|
+
Sensitive data is redacted before inclusion.
|
|
7
|
+
|
|
8
|
+
Usage:
|
|
9
|
+
tweek logs bundle # Create bundle in current dir
|
|
10
|
+
tweek logs bundle -o /tmp/bundle.zip # Specify output path
|
|
11
|
+
tweek logs bundle --days 7 # Only last 7 days of events
|
|
12
|
+
tweek logs bundle --dry-run # Show what would be collected
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
import json
|
|
16
|
+
import platform
|
|
17
|
+
import shutil
|
|
18
|
+
import sqlite3
|
|
19
|
+
import sys
|
|
20
|
+
import zipfile
|
|
21
|
+
from datetime import datetime
|
|
22
|
+
from pathlib import Path
|
|
23
|
+
from typing import Any, Dict, List, Optional
|
|
24
|
+
|
|
25
|
+
TWEEK_DIR = Path.home() / ".tweek"
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class BundleCollector:
|
|
29
|
+
"""
|
|
30
|
+
Collects diagnostic data into a zip bundle for support.
|
|
31
|
+
|
|
32
|
+
Automatically redacts sensitive data (API keys, secrets, credentials)
|
|
33
|
+
before including any file in the bundle.
|
|
34
|
+
"""
|
|
35
|
+
|
|
36
|
+
# Files that are NEVER included
|
|
37
|
+
EXCLUDED_FILES = {
|
|
38
|
+
"license.key",
|
|
39
|
+
"credential_registry.json",
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
# Directories that are NEVER included
|
|
43
|
+
EXCLUDED_DIRS = {
|
|
44
|
+
"certs", # CA private keys
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
def __init__(self, redact: bool = True, days: Optional[int] = None):
|
|
48
|
+
self.redact = redact
|
|
49
|
+
self.days = days
|
|
50
|
+
self._redactor = None
|
|
51
|
+
self._collected: List[Dict[str, Any]] = []
|
|
52
|
+
|
|
53
|
+
def _get_redactor(self):
|
|
54
|
+
"""Lazy-load the redactor."""
|
|
55
|
+
if self._redactor is None:
|
|
56
|
+
from tweek.logging.security_log import LogRedactor
|
|
57
|
+
self._redactor = LogRedactor(enabled=self.redact)
|
|
58
|
+
return self._redactor
|
|
59
|
+
|
|
60
|
+
def collect_security_db(self) -> Optional[Path]:
|
|
61
|
+
"""Copy the security events database."""
|
|
62
|
+
db_path = TWEEK_DIR / "security.db"
|
|
63
|
+
if not db_path.exists():
|
|
64
|
+
self._collected.append({"file": "security.db", "status": "not found"})
|
|
65
|
+
return None
|
|
66
|
+
|
|
67
|
+
self._collected.append({
|
|
68
|
+
"file": "security.db",
|
|
69
|
+
"status": "included",
|
|
70
|
+
"size": db_path.stat().st_size,
|
|
71
|
+
})
|
|
72
|
+
return db_path
|
|
73
|
+
|
|
74
|
+
def collect_approvals_db(self) -> Optional[Path]:
|
|
75
|
+
"""Copy the MCP approvals database."""
|
|
76
|
+
db_path = TWEEK_DIR / "approvals.db"
|
|
77
|
+
if not db_path.exists():
|
|
78
|
+
self._collected.append({"file": "approvals.db", "status": "not found"})
|
|
79
|
+
return None
|
|
80
|
+
|
|
81
|
+
self._collected.append({
|
|
82
|
+
"file": "approvals.db",
|
|
83
|
+
"status": "included",
|
|
84
|
+
"size": db_path.stat().st_size,
|
|
85
|
+
})
|
|
86
|
+
return db_path
|
|
87
|
+
|
|
88
|
+
def collect_proxy_log(self) -> Optional[Path]:
|
|
89
|
+
"""Copy the HTTP proxy log."""
|
|
90
|
+
log_path = TWEEK_DIR / "proxy" / "proxy.log"
|
|
91
|
+
if not log_path.exists():
|
|
92
|
+
self._collected.append({"file": "proxy/proxy.log", "status": "not found"})
|
|
93
|
+
return None
|
|
94
|
+
|
|
95
|
+
self._collected.append({
|
|
96
|
+
"file": "proxy/proxy.log",
|
|
97
|
+
"status": "included",
|
|
98
|
+
"size": log_path.stat().st_size,
|
|
99
|
+
})
|
|
100
|
+
return log_path
|
|
101
|
+
|
|
102
|
+
def collect_json_log(self) -> Optional[Path]:
|
|
103
|
+
"""Copy the JSON event log."""
|
|
104
|
+
log_path = TWEEK_DIR / "security_events.jsonl"
|
|
105
|
+
if not log_path.exists():
|
|
106
|
+
self._collected.append({"file": "security_events.jsonl", "status": "not found"})
|
|
107
|
+
return None
|
|
108
|
+
|
|
109
|
+
self._collected.append({
|
|
110
|
+
"file": "security_events.jsonl",
|
|
111
|
+
"status": "included",
|
|
112
|
+
"size": log_path.stat().st_size,
|
|
113
|
+
})
|
|
114
|
+
return log_path
|
|
115
|
+
|
|
116
|
+
def collect_config(self, scope: str = "user") -> Optional[str]:
|
|
117
|
+
"""Collect and redact a config file.
|
|
118
|
+
|
|
119
|
+
Returns redacted YAML content as string, or None if not found.
|
|
120
|
+
"""
|
|
121
|
+
if scope == "user":
|
|
122
|
+
config_path = TWEEK_DIR / "config.yaml"
|
|
123
|
+
bundle_name = "config_user.yaml"
|
|
124
|
+
else:
|
|
125
|
+
config_path = Path.cwd() / ".tweek" / "config.yaml"
|
|
126
|
+
bundle_name = "config_project.yaml"
|
|
127
|
+
|
|
128
|
+
if not config_path.exists():
|
|
129
|
+
self._collected.append({"file": bundle_name, "status": "not found"})
|
|
130
|
+
return None
|
|
131
|
+
|
|
132
|
+
content = config_path.read_text()
|
|
133
|
+
if self.redact:
|
|
134
|
+
redactor = self._get_redactor()
|
|
135
|
+
content = redactor.redact_string(content)
|
|
136
|
+
|
|
137
|
+
self._collected.append({
|
|
138
|
+
"file": bundle_name,
|
|
139
|
+
"status": "included (redacted)" if self.redact else "included",
|
|
140
|
+
})
|
|
141
|
+
return content
|
|
142
|
+
|
|
143
|
+
def collect_doctor_output(self) -> str:
|
|
144
|
+
"""Run tweek doctor programmatically and capture output."""
|
|
145
|
+
try:
|
|
146
|
+
from tweek.diagnostics import run_health_checks, get_health_verdict
|
|
147
|
+
|
|
148
|
+
checks = run_health_checks()
|
|
149
|
+
verdict = get_health_verdict(checks)
|
|
150
|
+
|
|
151
|
+
lines = [
|
|
152
|
+
f"Tweek Doctor Report",
|
|
153
|
+
f"Generated: {datetime.utcnow().isoformat()}Z",
|
|
154
|
+
f"Overall: {verdict}",
|
|
155
|
+
"",
|
|
156
|
+
]
|
|
157
|
+
for check in checks:
|
|
158
|
+
status = check.status.value if hasattr(check.status, "value") else str(check.status)
|
|
159
|
+
lines.append(f"[{status:>7}] {check.name}: {check.message}")
|
|
160
|
+
if check.fix_hint:
|
|
161
|
+
lines.append(f" Fix: {check.fix_hint}")
|
|
162
|
+
|
|
163
|
+
self._collected.append({"file": "doctor_output.txt", "status": "generated"})
|
|
164
|
+
return "\n".join(lines)
|
|
165
|
+
|
|
166
|
+
except Exception as e:
|
|
167
|
+
self._collected.append({"file": "doctor_output.txt", "status": f"error: {e}"})
|
|
168
|
+
return f"Failed to run doctor: {e}"
|
|
169
|
+
|
|
170
|
+
def collect_system_info(self) -> Dict[str, Any]:
|
|
171
|
+
"""Collect platform and version information."""
|
|
172
|
+
info = {
|
|
173
|
+
"timestamp": datetime.utcnow().isoformat() + "Z",
|
|
174
|
+
"platform": {
|
|
175
|
+
"system": platform.system(),
|
|
176
|
+
"release": platform.release(),
|
|
177
|
+
"version": platform.version(),
|
|
178
|
+
"machine": platform.machine(),
|
|
179
|
+
"python_version": platform.python_version(),
|
|
180
|
+
},
|
|
181
|
+
"tweek": {},
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
# Tweek version
|
|
185
|
+
try:
|
|
186
|
+
from tweek import __version__
|
|
187
|
+
info["tweek"]["version"] = __version__
|
|
188
|
+
except (ImportError, AttributeError):
|
|
189
|
+
info["tweek"]["version"] = "unknown"
|
|
190
|
+
|
|
191
|
+
# License tier (not the key)
|
|
192
|
+
try:
|
|
193
|
+
from tweek.licensing import get_license
|
|
194
|
+
license_mgr = get_license()
|
|
195
|
+
info["tweek"]["license_tier"] = license_mgr.tier.value
|
|
196
|
+
except Exception:
|
|
197
|
+
info["tweek"]["license_tier"] = "unknown"
|
|
198
|
+
|
|
199
|
+
# Platform capabilities
|
|
200
|
+
try:
|
|
201
|
+
from tweek.platform import get_capabilities
|
|
202
|
+
caps = get_capabilities()
|
|
203
|
+
info["tweek"]["capabilities"] = {
|
|
204
|
+
"sandbox": caps.sandbox_available,
|
|
205
|
+
"vault_backend": caps.vault_backend,
|
|
206
|
+
}
|
|
207
|
+
except Exception:
|
|
208
|
+
pass
|
|
209
|
+
|
|
210
|
+
# MCP availability
|
|
211
|
+
try:
|
|
212
|
+
from mcp.server import Server
|
|
213
|
+
info["tweek"]["mcp_available"] = True
|
|
214
|
+
except ImportError:
|
|
215
|
+
info["tweek"]["mcp_available"] = False
|
|
216
|
+
|
|
217
|
+
# Data directory stats
|
|
218
|
+
try:
|
|
219
|
+
if TWEEK_DIR.exists():
|
|
220
|
+
info["tweek"]["data_dir_exists"] = True
|
|
221
|
+
files = list(TWEEK_DIR.iterdir())
|
|
222
|
+
info["tweek"]["data_files"] = [
|
|
223
|
+
f.name for f in files
|
|
224
|
+
if f.name not in self.EXCLUDED_FILES
|
|
225
|
+
and f.name not in self.EXCLUDED_DIRS
|
|
226
|
+
]
|
|
227
|
+
except Exception:
|
|
228
|
+
pass
|
|
229
|
+
|
|
230
|
+
self._collected.append({"file": "system_info.json", "status": "generated"})
|
|
231
|
+
return info
|
|
232
|
+
|
|
233
|
+
def create_bundle(self, output_path: Path) -> Path:
|
|
234
|
+
"""
|
|
235
|
+
Create the diagnostic bundle zip file.
|
|
236
|
+
|
|
237
|
+
Args:
|
|
238
|
+
output_path: Path for the output zip file
|
|
239
|
+
|
|
240
|
+
Returns:
|
|
241
|
+
Path to the created zip file
|
|
242
|
+
"""
|
|
243
|
+
self._collected = []
|
|
244
|
+
|
|
245
|
+
with zipfile.ZipFile(output_path, "w", zipfile.ZIP_DEFLATED) as zf:
|
|
246
|
+
# Security database
|
|
247
|
+
db_path = self.collect_security_db()
|
|
248
|
+
if db_path:
|
|
249
|
+
if self.days:
|
|
250
|
+
# Export only recent events to a temp copy
|
|
251
|
+
self._add_filtered_db(zf, db_path, "security.db")
|
|
252
|
+
else:
|
|
253
|
+
zf.write(db_path, "security.db")
|
|
254
|
+
|
|
255
|
+
# Approvals database
|
|
256
|
+
approvals_path = self.collect_approvals_db()
|
|
257
|
+
if approvals_path:
|
|
258
|
+
zf.write(approvals_path, "approvals.db")
|
|
259
|
+
|
|
260
|
+
# Proxy log
|
|
261
|
+
proxy_log = self.collect_proxy_log()
|
|
262
|
+
if proxy_log:
|
|
263
|
+
zf.write(proxy_log, "proxy.log")
|
|
264
|
+
|
|
265
|
+
# JSON event log
|
|
266
|
+
json_log = self.collect_json_log()
|
|
267
|
+
if json_log:
|
|
268
|
+
zf.write(json_log, "security_events.jsonl")
|
|
269
|
+
|
|
270
|
+
# Configs (redacted)
|
|
271
|
+
for scope, name in [("user", "config_user.yaml"), ("project", "config_project.yaml")]:
|
|
272
|
+
content = self.collect_config(scope)
|
|
273
|
+
if content:
|
|
274
|
+
zf.writestr(name, content)
|
|
275
|
+
|
|
276
|
+
# Doctor output
|
|
277
|
+
doctor = self.collect_doctor_output()
|
|
278
|
+
zf.writestr("doctor_output.txt", doctor)
|
|
279
|
+
|
|
280
|
+
# System info
|
|
281
|
+
sys_info = self.collect_system_info()
|
|
282
|
+
zf.writestr("system_info.json", json.dumps(sys_info, indent=2))
|
|
283
|
+
|
|
284
|
+
# Manifest
|
|
285
|
+
manifest = {
|
|
286
|
+
"bundle_version": "1.0",
|
|
287
|
+
"created_at": datetime.utcnow().isoformat() + "Z",
|
|
288
|
+
"redacted": self.redact,
|
|
289
|
+
"days_filter": self.days,
|
|
290
|
+
"files": self._collected,
|
|
291
|
+
}
|
|
292
|
+
zf.writestr("manifest.json", json.dumps(manifest, indent=2))
|
|
293
|
+
|
|
294
|
+
return output_path
|
|
295
|
+
|
|
296
|
+
def get_dry_run_report(self) -> List[Dict[str, Any]]:
|
|
297
|
+
"""Generate a dry-run report showing what would be collected."""
|
|
298
|
+
self._collected = []
|
|
299
|
+
|
|
300
|
+
self.collect_security_db()
|
|
301
|
+
self.collect_approvals_db()
|
|
302
|
+
self.collect_proxy_log()
|
|
303
|
+
self.collect_json_log()
|
|
304
|
+
self.collect_config("user")
|
|
305
|
+
self.collect_config("project")
|
|
306
|
+
self._collected.append({"file": "doctor_output.txt", "status": "will generate"})
|
|
307
|
+
self._collected.append({"file": "system_info.json", "status": "will generate"})
|
|
308
|
+
self._collected.append({"file": "manifest.json", "status": "will generate"})
|
|
309
|
+
|
|
310
|
+
return self._collected
|
|
311
|
+
|
|
312
|
+
def _add_filtered_db(self, zf: zipfile.ZipFile, db_path: Path, archive_name: str):
|
|
313
|
+
"""Add a filtered copy of the security database (only recent events)."""
|
|
314
|
+
import tempfile
|
|
315
|
+
tmp_db = Path(tempfile.mktemp(suffix=".db"))
|
|
316
|
+
try:
|
|
317
|
+
# Create a new DB with only recent events
|
|
318
|
+
src = sqlite3.connect(str(db_path))
|
|
319
|
+
dst = sqlite3.connect(str(tmp_db))
|
|
320
|
+
|
|
321
|
+
# Copy schema
|
|
322
|
+
for row in src.execute(
|
|
323
|
+
"SELECT sql FROM sqlite_master WHERE type='table' AND name='security_events'"
|
|
324
|
+
):
|
|
325
|
+
if row[0]:
|
|
326
|
+
dst.execute(row[0])
|
|
327
|
+
|
|
328
|
+
# Copy filtered data
|
|
329
|
+
days_filter = f"-{self.days} days"
|
|
330
|
+
rows = src.execute(
|
|
331
|
+
"SELECT * FROM security_events WHERE timestamp > datetime('now', ?)",
|
|
332
|
+
(days_filter,),
|
|
333
|
+
).fetchall()
|
|
334
|
+
|
|
335
|
+
if rows:
|
|
336
|
+
placeholders = ",".join("?" * len(rows[0]))
|
|
337
|
+
for row in rows:
|
|
338
|
+
dst.execute(
|
|
339
|
+
f"INSERT INTO security_events VALUES ({placeholders})",
|
|
340
|
+
tuple(row),
|
|
341
|
+
)
|
|
342
|
+
|
|
343
|
+
dst.commit()
|
|
344
|
+
src.close()
|
|
345
|
+
dst.close()
|
|
346
|
+
|
|
347
|
+
zf.write(tmp_db, archive_name)
|
|
348
|
+
finally:
|
|
349
|
+
if tmp_db.exists():
|
|
350
|
+
tmp_db.unlink()
|
|
@@ -0,0 +1,150 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Tweek JSON Event Logger
|
|
4
|
+
|
|
5
|
+
Structured NDJSON (newline-delimited JSON) logging for security events.
|
|
6
|
+
Writes to ~/.tweek/security_events.jsonl with automatic rotation.
|
|
7
|
+
|
|
8
|
+
This supplements the primary SQLite logger with a format suitable for
|
|
9
|
+
ingestion into log aggregation systems (ELK, Splunk, Datadog, etc.).
|
|
10
|
+
|
|
11
|
+
Enable via config: logging.json_events: true
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
import json
|
|
15
|
+
import logging
|
|
16
|
+
import os
|
|
17
|
+
from datetime import datetime, timezone
|
|
18
|
+
from pathlib import Path
|
|
19
|
+
from typing import Any, Dict, Optional
|
|
20
|
+
|
|
21
|
+
logger = logging.getLogger(__name__)
|
|
22
|
+
|
|
23
|
+
# Defaults
|
|
24
|
+
DEFAULT_LOG_PATH = Path.home() / ".tweek" / "security_events.jsonl"
|
|
25
|
+
MAX_FILE_SIZE_BYTES = 10 * 1024 * 1024 # 10 MB
|
|
26
|
+
MAX_ROTATED_FILES = 5
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class JsonEventLogger:
|
|
30
|
+
"""
|
|
31
|
+
Writes SecurityEvents as newline-delimited JSON (NDJSON).
|
|
32
|
+
|
|
33
|
+
Each line is a self-contained JSON object with:
|
|
34
|
+
- ISO 8601 timestamp
|
|
35
|
+
- All SecurityEvent fields
|
|
36
|
+
- Correlation ID for linking related events
|
|
37
|
+
- Source (hooks/mcp/mcp_proxy/http_proxy)
|
|
38
|
+
"""
|
|
39
|
+
|
|
40
|
+
def __init__(
|
|
41
|
+
self,
|
|
42
|
+
log_path: Optional[Path] = None,
|
|
43
|
+
enabled: bool = False,
|
|
44
|
+
max_size_bytes: int = MAX_FILE_SIZE_BYTES,
|
|
45
|
+
max_rotated: int = MAX_ROTATED_FILES,
|
|
46
|
+
):
|
|
47
|
+
self.log_path = log_path or DEFAULT_LOG_PATH
|
|
48
|
+
self.enabled = enabled
|
|
49
|
+
self.max_size_bytes = max_size_bytes
|
|
50
|
+
self.max_rotated = max_rotated
|
|
51
|
+
|
|
52
|
+
def write_event(
|
|
53
|
+
self,
|
|
54
|
+
event: "SecurityEvent",
|
|
55
|
+
redacted_command: Optional[str] = None,
|
|
56
|
+
redacted_reason: Optional[str] = None,
|
|
57
|
+
redacted_metadata: Optional[Dict[str, Any]] = None,
|
|
58
|
+
):
|
|
59
|
+
"""Write a single event as a JSON line.
|
|
60
|
+
|
|
61
|
+
Uses pre-redacted values from the SecurityLogger to avoid
|
|
62
|
+
double-redaction overhead.
|
|
63
|
+
"""
|
|
64
|
+
if not self.enabled:
|
|
65
|
+
return
|
|
66
|
+
|
|
67
|
+
record = {
|
|
68
|
+
"timestamp": datetime.now(timezone.utc).isoformat(),
|
|
69
|
+
"event_type": event.event_type.value,
|
|
70
|
+
"tool_name": event.tool_name,
|
|
71
|
+
"command": redacted_command,
|
|
72
|
+
"tier": event.tier,
|
|
73
|
+
"pattern_name": event.pattern_name,
|
|
74
|
+
"pattern_severity": event.pattern_severity,
|
|
75
|
+
"decision": event.decision,
|
|
76
|
+
"decision_reason": redacted_reason,
|
|
77
|
+
"user_response": event.user_response,
|
|
78
|
+
"session_id": event.session_id,
|
|
79
|
+
"working_directory": event.working_directory,
|
|
80
|
+
"correlation_id": event.correlation_id,
|
|
81
|
+
"source": event.source,
|
|
82
|
+
"metadata": redacted_metadata,
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
# Strip None values for cleaner output
|
|
86
|
+
record = {k: v for k, v in record.items() if v is not None}
|
|
87
|
+
|
|
88
|
+
try:
|
|
89
|
+
self._rotate_if_needed()
|
|
90
|
+
self.log_path.parent.mkdir(parents=True, exist_ok=True)
|
|
91
|
+
with open(self.log_path, "a") as f:
|
|
92
|
+
f.write(json.dumps(record, default=str) + "\n")
|
|
93
|
+
except Exception as e:
|
|
94
|
+
logger.debug(f"Failed to write JSON event: {e}")
|
|
95
|
+
|
|
96
|
+
def _rotate_if_needed(self):
|
|
97
|
+
"""Rotate log file if it exceeds the maximum size."""
|
|
98
|
+
if not self.log_path.exists():
|
|
99
|
+
return
|
|
100
|
+
|
|
101
|
+
try:
|
|
102
|
+
size = self.log_path.stat().st_size
|
|
103
|
+
except OSError:
|
|
104
|
+
return
|
|
105
|
+
|
|
106
|
+
if size < self.max_size_bytes:
|
|
107
|
+
return
|
|
108
|
+
|
|
109
|
+
# Rotate: .jsonl -> .jsonl.1, .jsonl.1 -> .jsonl.2, etc.
|
|
110
|
+
for i in range(self.max_rotated, 0, -1):
|
|
111
|
+
src = Path(f"{self.log_path}.{i}")
|
|
112
|
+
dst = Path(f"{self.log_path}.{i + 1}")
|
|
113
|
+
if i == self.max_rotated and src.exists():
|
|
114
|
+
src.unlink() # Delete oldest
|
|
115
|
+
elif src.exists():
|
|
116
|
+
src.rename(dst)
|
|
117
|
+
|
|
118
|
+
# Move current to .1
|
|
119
|
+
self.log_path.rename(Path(f"{self.log_path}.1"))
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
# Singleton instance
|
|
123
|
+
_json_logger: Optional[JsonEventLogger] = None
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
def get_json_logger() -> Optional[JsonEventLogger]:
|
|
127
|
+
"""Get the singleton JSON event logger.
|
|
128
|
+
|
|
129
|
+
Reads the enabled flag from Tweek config on first access.
|
|
130
|
+
Returns None if JSON logging is not configured.
|
|
131
|
+
"""
|
|
132
|
+
global _json_logger
|
|
133
|
+
if _json_logger is None:
|
|
134
|
+
enabled = _read_json_logging_config()
|
|
135
|
+
_json_logger = JsonEventLogger(enabled=enabled)
|
|
136
|
+
return _json_logger
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
def _read_json_logging_config() -> bool:
|
|
140
|
+
"""Check if JSON event logging is enabled in config."""
|
|
141
|
+
try:
|
|
142
|
+
import yaml
|
|
143
|
+
config_path = Path.home() / ".tweek" / "config.yaml"
|
|
144
|
+
if config_path.exists():
|
|
145
|
+
with open(config_path) as f:
|
|
146
|
+
config = yaml.safe_load(f) or {}
|
|
147
|
+
return config.get("logging", {}).get("json_events", False)
|
|
148
|
+
except Exception:
|
|
149
|
+
pass
|
|
150
|
+
return False
|