@rafter-security/cli 0.6.6 → 0.7.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +29 -10
- package/dist/commands/agent/audit-skill.js +22 -20
- package/dist/commands/agent/audit.js +27 -0
- package/dist/commands/agent/components.js +800 -0
- package/dist/commands/agent/config.js +2 -1
- package/dist/commands/agent/disable.js +47 -0
- package/dist/commands/agent/enable.js +50 -0
- package/dist/commands/agent/exec.js +2 -0
- package/dist/commands/agent/index.js +6 -0
- package/dist/commands/agent/init.js +162 -163
- package/dist/commands/agent/install-hook.js +15 -14
- package/dist/commands/agent/list.js +72 -0
- package/dist/commands/agent/scan.js +4 -3
- package/dist/commands/agent/verify.js +1 -1
- package/dist/commands/backend/run.js +12 -3
- package/dist/commands/backend/scan-status.js +3 -2
- package/dist/commands/brief.js +22 -2
- package/dist/commands/ci/init.js +25 -21
- package/dist/commands/completion.js +4 -3
- package/dist/commands/docs/index.js +18 -0
- package/dist/commands/docs/list.js +37 -0
- package/dist/commands/docs/show.js +64 -0
- package/dist/commands/mcp/server.js +84 -0
- package/dist/commands/report.js +42 -41
- package/dist/commands/scan/index.js +7 -5
- package/dist/commands/skill/index.js +14 -0
- package/dist/commands/skill/install.js +89 -0
- package/dist/commands/skill/list.js +79 -0
- package/dist/commands/skill/registry.js +273 -0
- package/dist/commands/skill/remote.js +333 -0
- package/dist/commands/skill/review.js +975 -0
- package/dist/commands/skill/uninstall.js +65 -0
- package/dist/core/audit-logger.js +262 -21
- package/dist/core/config-manager.js +3 -0
- package/dist/core/docs-loader.js +148 -0
- package/dist/core/policy-loader.js +72 -1
- package/dist/core/risk-rules.js +16 -3
- package/dist/index.js +19 -9
- package/dist/scanners/gitleaks.js +6 -2
- package/package.json +1 -1
- package/resources/skills/rafter/SKILL.md +77 -97
- package/resources/skills/rafter/docs/backend.md +106 -0
- package/resources/skills/rafter/docs/cli-reference.md +199 -0
- package/resources/skills/rafter/docs/finding-triage.md +79 -0
- package/resources/skills/rafter/docs/guardrails.md +91 -0
- package/resources/skills/rafter/docs/shift-left.md +64 -0
- package/resources/skills/rafter-agent-security/SKILL.md +1 -1
- package/resources/skills/rafter-code-review/SKILL.md +91 -0
- package/resources/skills/rafter-code-review/docs/api.md +90 -0
- package/resources/skills/rafter-code-review/docs/asvs.md +120 -0
- package/resources/skills/rafter-code-review/docs/cwe-top25.md +78 -0
- package/resources/skills/rafter-code-review/docs/investigation-playbook.md +101 -0
- package/resources/skills/rafter-code-review/docs/llm.md +87 -0
- package/resources/skills/rafter-code-review/docs/web-app.md +84 -0
- package/resources/skills/rafter-secure-design/SKILL.md +103 -0
- package/resources/skills/rafter-secure-design/docs/api-design.md +97 -0
- package/resources/skills/rafter-secure-design/docs/auth.md +67 -0
- package/resources/skills/rafter-secure-design/docs/data-storage.md +90 -0
- package/resources/skills/rafter-secure-design/docs/dependencies.md +101 -0
- package/resources/skills/rafter-secure-design/docs/deployment.md +104 -0
- package/resources/skills/rafter-secure-design/docs/ingestion.md +98 -0
- package/resources/skills/rafter-secure-design/docs/standards-pointers.md +102 -0
- package/resources/skills/rafter-secure-design/docs/threat-modeling.md +128 -0
- package/resources/skills/rafter-skill-review/SKILL.md +106 -0
- package/resources/skills/rafter-skill-review/docs/authorship-provenance.md +82 -0
- package/resources/skills/rafter-skill-review/docs/changelog-review.md +99 -0
- package/resources/skills/rafter-skill-review/docs/data-practices.md +88 -0
- package/resources/skills/rafter-skill-review/docs/malware-indicators.md +79 -0
- package/resources/skills/rafter-skill-review/docs/prompt-injection.md +85 -0
- package/resources/skills/rafter-skill-review/docs/telemetry.md +78 -0
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
import { Command } from "commander";
|
|
2
|
+
import fs from "fs";
|
|
3
|
+
import { resolveSkill, listBundledSkills, skillDestPath, deleteSkillAt, recordSkillState, SKILL_PLATFORMS, } from "./registry.js";
|
|
4
|
+
import { fmt } from "../../utils/formatter.js";
|
|
5
|
+
/**
|
|
6
|
+
* `rafter skill uninstall <name>` — remove a rafter-authored skill from one or
|
|
7
|
+
* more platforms. Missing files are reported, not errored.
|
|
8
|
+
*
|
|
9
|
+
* Exit codes:
|
|
10
|
+
* 0 — uninstall succeeded (or skill was already absent)
|
|
11
|
+
* 1 — unknown skill or unknown platform
|
|
12
|
+
*/
|
|
13
|
+
export function createUninstallCommand() {
|
|
14
|
+
return new Command("uninstall")
|
|
15
|
+
.description("Uninstall a rafter-authored skill from one or more platforms")
|
|
16
|
+
.argument("<name>", "Skill name (e.g. rafter, rafter-secure-design)")
|
|
17
|
+
.option("--platform <platform...>", `Target platform(s). One or more of: ${SKILL_PLATFORMS.join(", ")}. Default: all installed.`)
|
|
18
|
+
.action((name, opts) => {
|
|
19
|
+
const skill = resolveSkill(name);
|
|
20
|
+
if (!skill) {
|
|
21
|
+
console.error(fmt.error(`Unknown skill: ${name}`));
|
|
22
|
+
console.error(fmt.info(`Available: ${listBundledSkills().map((s) => s.name).join(", ") || "(none)"}`));
|
|
23
|
+
process.exit(1);
|
|
24
|
+
}
|
|
25
|
+
let targets;
|
|
26
|
+
if (Array.isArray(opts.platform) && opts.platform.length > 0) {
|
|
27
|
+
targets = [];
|
|
28
|
+
for (const raw of opts.platform) {
|
|
29
|
+
const p = raw.trim();
|
|
30
|
+
if (!SKILL_PLATFORMS.includes(p)) {
|
|
31
|
+
console.error(fmt.error(`Unknown platform: ${raw}. Known: ${SKILL_PLATFORMS.join(", ")}`));
|
|
32
|
+
process.exit(1);
|
|
33
|
+
}
|
|
34
|
+
targets.push(p);
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
else {
|
|
38
|
+
// Default: remove from every platform where the file currently exists.
|
|
39
|
+
targets = SKILL_PLATFORMS.filter((p) => fs.existsSync(skillDestPath(p, skill.name)));
|
|
40
|
+
if (targets.length === 0) {
|
|
41
|
+
console.log(fmt.info(`${skill.name} is not installed on any known platform — no changes`));
|
|
42
|
+
process.exit(0);
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
let exitCode = 0;
|
|
46
|
+
for (const platform of targets) {
|
|
47
|
+
const destPath = skillDestPath(platform, skill.name);
|
|
48
|
+
try {
|
|
49
|
+
const existed = deleteSkillAt(destPath);
|
|
50
|
+
recordSkillState(platform, skill.name, false, null);
|
|
51
|
+
if (existed) {
|
|
52
|
+
console.log(fmt.success(`Uninstalled ${skill.name} from ${platform} (${destPath})`));
|
|
53
|
+
}
|
|
54
|
+
else {
|
|
55
|
+
console.log(fmt.info(`${skill.name} was not installed on ${platform} — no changes`));
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
catch (e) {
|
|
59
|
+
console.error(fmt.error(`Failed to uninstall ${skill.name} from ${platform}: ${e}`));
|
|
60
|
+
exitCode = 1;
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
process.exit(exitCode);
|
|
64
|
+
});
|
|
65
|
+
}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { randomBytes } from "crypto";
|
|
1
|
+
import { createHash, randomBytes } from "crypto";
|
|
2
2
|
import dns from "dns/promises";
|
|
3
3
|
import fs from "fs";
|
|
4
4
|
import net from "net";
|
|
@@ -6,6 +6,7 @@ import path from "path";
|
|
|
6
6
|
import { getAuditLogPath } from "./config-defaults.js";
|
|
7
7
|
import { ConfigManager } from "./config-manager.js";
|
|
8
8
|
import { assessCommandRisk } from "./risk-rules.js";
|
|
9
|
+
import { RegexScanner } from "../scanners/regex-scanner.js";
|
|
9
10
|
/**
|
|
10
11
|
* Validate a webhook URL to prevent SSRF attacks.
|
|
11
12
|
* Rejects non-HTTP(S) schemes and URLs that resolve to private/internal IPs.
|
|
@@ -99,6 +100,97 @@ function isPrivateIp(ip) {
|
|
|
99
100
|
}
|
|
100
101
|
return false;
|
|
101
102
|
}
|
|
103
|
+
/**
|
|
104
|
+
* Return sha256 hex of the last non-empty line of the file (including its
|
|
105
|
+
* trailing newline), or null if the file is empty / does not exist.
|
|
106
|
+
* Reads only the tail of the file so this is cheap even for large logs.
|
|
107
|
+
*/
|
|
108
|
+
export function readLastLineHash(filePath) {
|
|
109
|
+
if (!fs.existsSync(filePath))
|
|
110
|
+
return null;
|
|
111
|
+
const stat = fs.statSync(filePath);
|
|
112
|
+
if (stat.size === 0)
|
|
113
|
+
return null;
|
|
114
|
+
// Read the last 64KB — an audit line tops out well under this.
|
|
115
|
+
const readBytes = Math.min(stat.size, 65536);
|
|
116
|
+
const fd = fs.openSync(filePath, "r");
|
|
117
|
+
try {
|
|
118
|
+
const buf = Buffer.alloc(readBytes);
|
|
119
|
+
fs.readSync(fd, buf, 0, readBytes, stat.size - readBytes);
|
|
120
|
+
const text = buf.toString("utf-8");
|
|
121
|
+
// Find last non-empty line
|
|
122
|
+
const lines = text.split("\n");
|
|
123
|
+
for (let i = lines.length - 1; i >= 0; i--) {
|
|
124
|
+
if (lines[i].trim()) {
|
|
125
|
+
return createHash("sha256").update(lines[i] + "\n").digest("hex");
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
return null;
|
|
129
|
+
}
|
|
130
|
+
finally {
|
|
131
|
+
fs.closeSync(fd);
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
/**
|
|
135
|
+
* Best-effort exclusive file lock via O_EXCL sibling lock file. Retries briefly
|
|
136
|
+
* then gives up (better to log without chain integrity than to drop the event).
|
|
137
|
+
*/
|
|
138
|
+
export function acquireLock(targetPath, maxAttempts = 20, delayMs = 25) {
|
|
139
|
+
const lockPath = targetPath + ".lock";
|
|
140
|
+
for (let attempt = 0; attempt < maxAttempts; attempt++) {
|
|
141
|
+
try {
|
|
142
|
+
const fd = fs.openSync(lockPath, "wx", 0o600);
|
|
143
|
+
fs.writeSync(fd, String(process.pid));
|
|
144
|
+
fs.closeSync(fd);
|
|
145
|
+
return () => {
|
|
146
|
+
try {
|
|
147
|
+
fs.unlinkSync(lockPath);
|
|
148
|
+
}
|
|
149
|
+
catch { /* already gone */ }
|
|
150
|
+
};
|
|
151
|
+
}
|
|
152
|
+
catch (e) {
|
|
153
|
+
if (e.code !== "EEXIST")
|
|
154
|
+
throw e;
|
|
155
|
+
// Stale lock detection: if older than 5s, steal it
|
|
156
|
+
try {
|
|
157
|
+
const st = fs.statSync(lockPath);
|
|
158
|
+
if (Date.now() - st.mtimeMs > 5000) {
|
|
159
|
+
fs.unlinkSync(lockPath);
|
|
160
|
+
continue;
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
catch { /* race with another releaser — retry */ }
|
|
164
|
+
const until = Date.now() + delayMs;
|
|
165
|
+
while (Date.now() < until) { /* busy wait */ }
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
// Degrade gracefully: caller proceeds without the lock.
|
|
169
|
+
return () => { };
|
|
170
|
+
}
|
|
171
|
+
/**
|
|
172
|
+
* Walk up from startDir looking for a .git directory. Returns the repo root
|
|
173
|
+
* (the directory containing .git), or undefined if none found within maxDepth.
|
|
174
|
+
* Single-filesystem-hop lookup — no subprocess, no git binary required.
|
|
175
|
+
*/
|
|
176
|
+
export function findGitRepoRoot(startDir, maxDepth = 20) {
|
|
177
|
+
let dir = startDir;
|
|
178
|
+
for (let i = 0; i < maxDepth; i++) {
|
|
179
|
+
try {
|
|
180
|
+
if (fs.existsSync(path.join(dir, ".git"))) {
|
|
181
|
+
return dir;
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
catch {
|
|
185
|
+
return undefined;
|
|
186
|
+
}
|
|
187
|
+
const parent = path.dirname(dir);
|
|
188
|
+
if (parent === dir)
|
|
189
|
+
return undefined;
|
|
190
|
+
dir = parent;
|
|
191
|
+
}
|
|
192
|
+
return undefined;
|
|
193
|
+
}
|
|
102
194
|
export const RISK_SEVERITY = {
|
|
103
195
|
low: 0,
|
|
104
196
|
medium: 1,
|
|
@@ -107,9 +199,25 @@ export const RISK_SEVERITY = {
|
|
|
107
199
|
};
|
|
108
200
|
export class AuditLogger {
|
|
109
201
|
constructor(logPath) {
|
|
110
|
-
this.logPath = logPath || getAuditLogPath();
|
|
111
|
-
this.sessionId = this.generateSessionId();
|
|
112
202
|
this.configManager = new ConfigManager();
|
|
203
|
+
this.scanner = new RegexScanner();
|
|
204
|
+
this.sessionId = this.generateSessionId();
|
|
205
|
+
if (logPath) {
|
|
206
|
+
this.logPath = logPath;
|
|
207
|
+
}
|
|
208
|
+
else {
|
|
209
|
+
// Project-local override from .rafter.yml (via loadWithPolicy) beats global
|
|
210
|
+
let policyPath;
|
|
211
|
+
try {
|
|
212
|
+
policyPath = this.configManager.loadWithPolicy()?.agent?.audit?.logPath;
|
|
213
|
+
}
|
|
214
|
+
catch {
|
|
215
|
+
// fall through to global
|
|
216
|
+
}
|
|
217
|
+
this.logPath = policyPath
|
|
218
|
+
? path.resolve(policyPath)
|
|
219
|
+
: getAuditLogPath();
|
|
220
|
+
}
|
|
113
221
|
// Ensure log directory exists
|
|
114
222
|
const dir = path.dirname(this.logPath);
|
|
115
223
|
if (!fs.existsSync(dir)) {
|
|
@@ -125,16 +233,73 @@ export class AuditLogger {
|
|
|
125
233
|
if (!config.agent?.audit.logAllActions) {
|
|
126
234
|
return;
|
|
127
235
|
}
|
|
128
|
-
const
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
236
|
+
const cwd = entry.cwd ?? process.cwd();
|
|
237
|
+
const gitRepo = entry.gitRepo ?? findGitRepoRoot(cwd);
|
|
238
|
+
// Atomic read-last-line + append under a file lock so concurrent writers
|
|
239
|
+
// don't race and produce entries with duplicate prevHash values.
|
|
240
|
+
const release = acquireLock(this.logPath);
|
|
241
|
+
try {
|
|
242
|
+
const prevHash = readLastLineHash(this.logPath);
|
|
243
|
+
const fullEntry = {
|
|
244
|
+
...entry,
|
|
245
|
+
cwd,
|
|
246
|
+
gitRepo,
|
|
247
|
+
prevHash,
|
|
248
|
+
timestamp: new Date().toISOString(),
|
|
249
|
+
sessionId: this.sessionId
|
|
250
|
+
};
|
|
251
|
+
const line = JSON.stringify(fullEntry) + "\n";
|
|
252
|
+
fs.appendFileSync(this.logPath, line, { encoding: "utf-8", mode: 0o600 });
|
|
253
|
+
// Send webhook notification if configured and risk meets threshold
|
|
254
|
+
this.sendNotification(fullEntry, config);
|
|
255
|
+
}
|
|
256
|
+
finally {
|
|
257
|
+
release();
|
|
258
|
+
}
|
|
259
|
+
}
|
|
260
|
+
/**
|
|
261
|
+
* Verify the hash chain integrity of the audit log.
|
|
262
|
+
* Returns break locations (1-indexed line numbers where prevHash didn't
|
|
263
|
+
* match the sha256 of the actual prior line). Empty array means the chain
|
|
264
|
+
* is intact. A non-empty result means the log has been tampered with,
|
|
265
|
+
* truncated, or rewritten (including by the legacy cleanup() path).
|
|
266
|
+
*/
|
|
267
|
+
verify() {
|
|
268
|
+
if (!fs.existsSync(this.logPath)) {
|
|
269
|
+
return [];
|
|
270
|
+
}
|
|
271
|
+
const content = fs.readFileSync(this.logPath, "utf-8");
|
|
272
|
+
const rawLines = content.split("\n");
|
|
273
|
+
const breaks = [];
|
|
274
|
+
let lastRawLine = null;
|
|
275
|
+
for (let i = 0; i < rawLines.length; i++) {
|
|
276
|
+
const raw = rawLines[i];
|
|
277
|
+
if (!raw.trim())
|
|
278
|
+
continue;
|
|
279
|
+
let entry;
|
|
280
|
+
try {
|
|
281
|
+
entry = JSON.parse(raw);
|
|
282
|
+
}
|
|
283
|
+
catch {
|
|
284
|
+
breaks.push({ line: i + 1, reason: "malformed JSON" });
|
|
285
|
+
lastRawLine = raw;
|
|
286
|
+
continue;
|
|
287
|
+
}
|
|
288
|
+
const expected = lastRawLine === null
|
|
289
|
+
? null
|
|
290
|
+
: createHash("sha256").update(lastRawLine + "\n").digest("hex");
|
|
291
|
+
const actual = entry.prevHash ?? null;
|
|
292
|
+
if (actual !== expected) {
|
|
293
|
+
breaks.push({
|
|
294
|
+
line: i + 1,
|
|
295
|
+
reason: expected === null
|
|
296
|
+
? `first entry has prevHash ${actual} but expected null`
|
|
297
|
+
: `prevHash ${actual ?? "null"} does not match expected ${expected}`,
|
|
298
|
+
});
|
|
299
|
+
}
|
|
300
|
+
lastRawLine = raw;
|
|
301
|
+
}
|
|
302
|
+
return breaks;
|
|
138
303
|
}
|
|
139
304
|
/**
|
|
140
305
|
* Send webhook notification for high-risk events
|
|
@@ -180,7 +345,7 @@ export class AuditLogger {
|
|
|
180
345
|
eventType: "command_intercepted",
|
|
181
346
|
agentType,
|
|
182
347
|
action: {
|
|
183
|
-
command,
|
|
348
|
+
command: this.scanner.redact(command),
|
|
184
349
|
riskLevel: this.assessCommandRisk(command)
|
|
185
350
|
},
|
|
186
351
|
securityCheck: {
|
|
@@ -236,7 +401,7 @@ export class AuditLogger {
|
|
|
236
401
|
eventType: "policy_override",
|
|
237
402
|
agentType,
|
|
238
403
|
action: {
|
|
239
|
-
command,
|
|
404
|
+
command: command ? this.scanner.redact(command) : command,
|
|
240
405
|
riskLevel: "high"
|
|
241
406
|
},
|
|
242
407
|
securityCheck: {
|
|
@@ -281,6 +446,14 @@ export class AuditLogger {
|
|
|
281
446
|
if (filter.since) {
|
|
282
447
|
entries = entries.filter(e => new Date(e.timestamp) >= filter.since);
|
|
283
448
|
}
|
|
449
|
+
if (filter.cwd) {
|
|
450
|
+
const needle = filter.cwd;
|
|
451
|
+
entries = entries.filter(e => (e.cwd ?? "").includes(needle));
|
|
452
|
+
}
|
|
453
|
+
if (filter.gitRepo) {
|
|
454
|
+
const needle = filter.gitRepo;
|
|
455
|
+
entries = entries.filter(e => (e.gitRepo ?? "").includes(needle));
|
|
456
|
+
}
|
|
284
457
|
if (filter.limit) {
|
|
285
458
|
entries = entries.slice(-filter.limit);
|
|
286
459
|
}
|
|
@@ -288,18 +461,86 @@ export class AuditLogger {
|
|
|
288
461
|
return entries;
|
|
289
462
|
}
|
|
290
463
|
/**
|
|
291
|
-
* Clean up old log entries based on retention policy
|
|
464
|
+
* Clean up old log entries based on retention policy.
|
|
465
|
+
*
|
|
466
|
+
* Retention rewrites break the on-disk hash chain by design (some entries
|
|
467
|
+
* disappear). To keep verify() meaningful post-cleanup we re-seal the
|
|
468
|
+
* chain across surviving entries and record a sidecar `audit.retention.log`
|
|
469
|
+
* line capturing the pre-cleanup tip hash and pruned count, so a verifier
|
|
470
|
+
* can cross-check that retention — not tampering — is what broke the
|
|
471
|
+
* old chain.
|
|
292
472
|
*/
|
|
293
473
|
cleanup() {
|
|
294
474
|
const config = this.configManager.load();
|
|
295
475
|
const retentionDays = config.agent?.audit.retentionDays || 30;
|
|
296
476
|
const cutoffDate = new Date();
|
|
297
477
|
cutoffDate.setDate(cutoffDate.getDate() - retentionDays);
|
|
298
|
-
const
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
478
|
+
const release = acquireLock(this.logPath);
|
|
479
|
+
try {
|
|
480
|
+
if (!fs.existsSync(this.logPath))
|
|
481
|
+
return;
|
|
482
|
+
const preTipHash = readLastLineHash(this.logPath);
|
|
483
|
+
const raw = fs.readFileSync(this.logPath, "utf-8");
|
|
484
|
+
const rawLines = raw.split("\n").filter(l => l.trim());
|
|
485
|
+
const kept = [];
|
|
486
|
+
let prunedCount = 0;
|
|
487
|
+
for (const line of rawLines) {
|
|
488
|
+
try {
|
|
489
|
+
const entry = JSON.parse(line);
|
|
490
|
+
if (!entry.timestamp) {
|
|
491
|
+
prunedCount++;
|
|
492
|
+
continue;
|
|
493
|
+
}
|
|
494
|
+
if (new Date(entry.timestamp) >= cutoffDate) {
|
|
495
|
+
kept.push(entry);
|
|
496
|
+
}
|
|
497
|
+
else {
|
|
498
|
+
prunedCount++;
|
|
499
|
+
}
|
|
500
|
+
}
|
|
501
|
+
catch {
|
|
502
|
+
prunedCount++;
|
|
503
|
+
}
|
|
504
|
+
}
|
|
505
|
+
// Re-seal the chain across surviving entries.
|
|
506
|
+
const output = [];
|
|
507
|
+
let prevLine = null;
|
|
508
|
+
for (const entry of kept) {
|
|
509
|
+
const resealed = {
|
|
510
|
+
...entry,
|
|
511
|
+
prevHash: prevLine === null
|
|
512
|
+
? null
|
|
513
|
+
: createHash("sha256").update(prevLine + "\n").digest("hex"),
|
|
514
|
+
};
|
|
515
|
+
const serialized = JSON.stringify(resealed);
|
|
516
|
+
output.push(serialized);
|
|
517
|
+
prevLine = serialized;
|
|
518
|
+
}
|
|
519
|
+
const content = output.length > 0 ? output.join("\n") + "\n" : "";
|
|
520
|
+
// Atomic replace so readers never see a truncated file.
|
|
521
|
+
const tmpPath = this.logPath + ".tmp-" + randomBytes(4).toString("hex");
|
|
522
|
+
fs.writeFileSync(tmpPath, content, { encoding: "utf-8", mode: 0o600 });
|
|
523
|
+
fs.renameSync(tmpPath, this.logPath);
|
|
524
|
+
if (prunedCount > 0) {
|
|
525
|
+
const sidecar = this.logPath + ".retention.log";
|
|
526
|
+
const note = {
|
|
527
|
+
timestamp: new Date().toISOString(),
|
|
528
|
+
prunedCount,
|
|
529
|
+
retainedCount: kept.length,
|
|
530
|
+
retentionDays,
|
|
531
|
+
preCleanupTipHash: preTipHash,
|
|
532
|
+
};
|
|
533
|
+
try {
|
|
534
|
+
fs.appendFileSync(sidecar, JSON.stringify(note) + "\n", { encoding: "utf-8", mode: 0o600 });
|
|
535
|
+
}
|
|
536
|
+
catch {
|
|
537
|
+
// sidecar is best-effort — don't fail cleanup if we can't write it
|
|
538
|
+
}
|
|
539
|
+
}
|
|
540
|
+
}
|
|
541
|
+
finally {
|
|
542
|
+
release();
|
|
543
|
+
}
|
|
303
544
|
}
|
|
304
545
|
/**
|
|
305
546
|
* Generate a unique session ID
|
|
@@ -0,0 +1,148 @@
|
|
|
1
|
+
import fs from "fs";
|
|
2
|
+
import path from "path";
|
|
3
|
+
import crypto from "crypto";
|
|
4
|
+
import { execSync } from "child_process";
|
|
5
|
+
import { loadPolicy } from "./policy-loader.js";
|
|
6
|
+
import { getRafterDir } from "./config-defaults.js";
|
|
7
|
+
const DEFAULT_TTL_SECONDS = 86400;
|
|
8
|
+
function getCacheDir() {
|
|
9
|
+
return path.join(getRafterDir(), "docs-cache");
|
|
10
|
+
}
|
|
11
|
+
function cacheKey(url) {
|
|
12
|
+
return crypto.createHash("sha256").update(url).digest("hex").slice(0, 32);
|
|
13
|
+
}
|
|
14
|
+
function cachePaths(url) {
|
|
15
|
+
const dir = getCacheDir();
|
|
16
|
+
const key = cacheKey(url);
|
|
17
|
+
return {
|
|
18
|
+
content: path.join(dir, `${key}.txt`),
|
|
19
|
+
meta: path.join(dir, `${key}.meta.json`),
|
|
20
|
+
};
|
|
21
|
+
}
|
|
22
|
+
function readCache(url) {
|
|
23
|
+
const { content, meta } = cachePaths(url);
|
|
24
|
+
if (!fs.existsSync(content) || !fs.existsSync(meta))
|
|
25
|
+
return null;
|
|
26
|
+
try {
|
|
27
|
+
const metaData = JSON.parse(fs.readFileSync(meta, "utf-8"));
|
|
28
|
+
const body = fs.readFileSync(content, "utf-8");
|
|
29
|
+
const fetchedAt = Date.parse(metaData.fetched_at);
|
|
30
|
+
if (isNaN(fetchedAt))
|
|
31
|
+
return null;
|
|
32
|
+
return { content: body, fetchedAt };
|
|
33
|
+
}
|
|
34
|
+
catch {
|
|
35
|
+
return null;
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
function writeCache(url, body, contentType) {
|
|
39
|
+
const dir = getCacheDir();
|
|
40
|
+
if (!fs.existsSync(dir))
|
|
41
|
+
fs.mkdirSync(dir, { recursive: true });
|
|
42
|
+
const { content, meta } = cachePaths(url);
|
|
43
|
+
fs.writeFileSync(content, body, "utf-8");
|
|
44
|
+
fs.writeFileSync(meta, JSON.stringify({
|
|
45
|
+
fetched_at: new Date().toISOString(),
|
|
46
|
+
url,
|
|
47
|
+
content_type: contentType,
|
|
48
|
+
}, null, 2) + "\n", "utf-8");
|
|
49
|
+
}
|
|
50
|
+
function isExpired(fetchedAt, ttlSeconds) {
|
|
51
|
+
return Date.now() - fetchedAt > ttlSeconds * 1000;
|
|
52
|
+
}
|
|
53
|
+
function resolvePolicyPath(relative) {
|
|
54
|
+
if (path.isAbsolute(relative))
|
|
55
|
+
return relative;
|
|
56
|
+
let root;
|
|
57
|
+
try {
|
|
58
|
+
root = execSync("git rev-parse --show-toplevel", {
|
|
59
|
+
encoding: "utf-8",
|
|
60
|
+
stdio: ["pipe", "pipe", "ignore"],
|
|
61
|
+
}).trim();
|
|
62
|
+
}
|
|
63
|
+
catch {
|
|
64
|
+
root = process.cwd();
|
|
65
|
+
}
|
|
66
|
+
return path.resolve(root, relative);
|
|
67
|
+
}
|
|
68
|
+
/**
|
|
69
|
+
* List docs from the active policy with resolution metadata.
|
|
70
|
+
* Never performs network I/O.
|
|
71
|
+
*/
|
|
72
|
+
export function listDocs(entries) {
|
|
73
|
+
const policy = entries ? { docs: entries } : loadPolicy();
|
|
74
|
+
const docs = policy?.docs || [];
|
|
75
|
+
return docs.map(entry => {
|
|
76
|
+
if (entry.path) {
|
|
77
|
+
return {
|
|
78
|
+
...entry,
|
|
79
|
+
source: entry.path,
|
|
80
|
+
sourceKind: "path",
|
|
81
|
+
cacheStatus: "local",
|
|
82
|
+
};
|
|
83
|
+
}
|
|
84
|
+
const url = entry.url;
|
|
85
|
+
const ttl = entry.cache?.ttlSeconds ?? DEFAULT_TTL_SECONDS;
|
|
86
|
+
const cached = readCache(url);
|
|
87
|
+
const { content: cachedPath } = cachePaths(url);
|
|
88
|
+
let cacheStatus = "not-cached";
|
|
89
|
+
if (cached) {
|
|
90
|
+
cacheStatus = isExpired(cached.fetchedAt, ttl) ? "stale" : "cached";
|
|
91
|
+
}
|
|
92
|
+
return {
|
|
93
|
+
...entry,
|
|
94
|
+
source: url,
|
|
95
|
+
sourceKind: "url",
|
|
96
|
+
cacheStatus,
|
|
97
|
+
cachedPath: cached ? cachedPath : undefined,
|
|
98
|
+
};
|
|
99
|
+
});
|
|
100
|
+
}
|
|
101
|
+
/**
|
|
102
|
+
* Resolve docs matching an id or tag. Exact id first, then any entry with that tag.
|
|
103
|
+
*/
|
|
104
|
+
export function resolveDocSelector(selector, entries) {
|
|
105
|
+
const policy = entries ? { docs: entries } : loadPolicy();
|
|
106
|
+
const docs = policy?.docs || [];
|
|
107
|
+
const byId = docs.find(d => d.id === selector);
|
|
108
|
+
if (byId)
|
|
109
|
+
return [byId];
|
|
110
|
+
return docs.filter(d => Array.isArray(d.tags) && d.tags.includes(selector));
|
|
111
|
+
}
|
|
112
|
+
/**
|
|
113
|
+
* Return content for a doc entry, fetching URL docs on miss/expired/refresh.
|
|
114
|
+
* On network failure with stale cache, returns stale content with stale=true.
|
|
115
|
+
*/
|
|
116
|
+
export async function fetchDoc(entry, opts = {}) {
|
|
117
|
+
if (entry.path) {
|
|
118
|
+
const abs = resolvePolicyPath(entry.path);
|
|
119
|
+
const content = fs.readFileSync(abs, "utf-8");
|
|
120
|
+
return { content, cached: false, stale: false, source: entry.path, sourceKind: "path" };
|
|
121
|
+
}
|
|
122
|
+
const url = entry.url;
|
|
123
|
+
const ttl = entry.cache?.ttlSeconds ?? DEFAULT_TTL_SECONDS;
|
|
124
|
+
const cached = readCache(url);
|
|
125
|
+
const fresh = cached && !isExpired(cached.fetchedAt, ttl);
|
|
126
|
+
if (!opts.refresh && fresh) {
|
|
127
|
+
return { content: cached.content, cached: true, stale: false, source: url, sourceKind: "url" };
|
|
128
|
+
}
|
|
129
|
+
try {
|
|
130
|
+
const response = await fetch(url, {
|
|
131
|
+
redirect: "follow",
|
|
132
|
+
signal: AbortSignal.timeout(15000),
|
|
133
|
+
});
|
|
134
|
+
if (!response.ok) {
|
|
135
|
+
throw new Error(`HTTP ${response.status}`);
|
|
136
|
+
}
|
|
137
|
+
const body = await response.text();
|
|
138
|
+
const contentType = response.headers.get("content-type") || "text/plain";
|
|
139
|
+
writeCache(url, body, contentType);
|
|
140
|
+
return { content: body, cached: false, stale: false, source: url, sourceKind: "url" };
|
|
141
|
+
}
|
|
142
|
+
catch (err) {
|
|
143
|
+
if (cached) {
|
|
144
|
+
return { content: cached.content, cached: true, stale: true, source: url, sourceKind: "url" };
|
|
145
|
+
}
|
|
146
|
+
throw err;
|
|
147
|
+
}
|
|
148
|
+
}
|
|
@@ -83,10 +83,77 @@ function mapPolicy(raw) {
|
|
|
83
83
|
}
|
|
84
84
|
if (raw.audit.log_level)
|
|
85
85
|
policy.audit.logLevel = raw.audit.log_level;
|
|
86
|
+
if (raw.audit.log_path)
|
|
87
|
+
policy.audit.logPath = String(raw.audit.log_path);
|
|
88
|
+
}
|
|
89
|
+
if (Array.isArray(raw.docs)) {
|
|
90
|
+
policy.docs = [];
|
|
91
|
+
const seenIds = new Set();
|
|
92
|
+
for (const entry of raw.docs) {
|
|
93
|
+
if (!entry || typeof entry !== "object") {
|
|
94
|
+
console.error(`Warning: skipping malformed docs entry — must be an object.`);
|
|
95
|
+
continue;
|
|
96
|
+
}
|
|
97
|
+
const hasPath = typeof entry.path === "string" && entry.path.length > 0;
|
|
98
|
+
const hasUrl = typeof entry.url === "string" && entry.url.length > 0;
|
|
99
|
+
if (hasPath === hasUrl) {
|
|
100
|
+
console.error(`Warning: skipping docs entry — must have exactly one of "path" or "url".`);
|
|
101
|
+
continue;
|
|
102
|
+
}
|
|
103
|
+
const id = typeof entry.id === "string" && entry.id.length > 0
|
|
104
|
+
? entry.id
|
|
105
|
+
: deriveDocId(hasPath ? entry.path : entry.url, hasPath ? "path" : "url");
|
|
106
|
+
if (seenIds.has(id)) {
|
|
107
|
+
console.error(`Warning: skipping docs entry with duplicate id "${id}".`);
|
|
108
|
+
continue;
|
|
109
|
+
}
|
|
110
|
+
seenIds.add(id);
|
|
111
|
+
const doc = { id };
|
|
112
|
+
if (hasPath)
|
|
113
|
+
doc.path = entry.path;
|
|
114
|
+
if (hasUrl)
|
|
115
|
+
doc.url = entry.url;
|
|
116
|
+
if (typeof entry.description === "string")
|
|
117
|
+
doc.description = entry.description;
|
|
118
|
+
if (Array.isArray(entry.tags) && entry.tags.every((t) => typeof t === "string")) {
|
|
119
|
+
doc.tags = entry.tags;
|
|
120
|
+
}
|
|
121
|
+
else if (entry.tags !== undefined) {
|
|
122
|
+
console.error(`Warning: docs entry "${id}" — tags must be a list of strings, ignoring.`);
|
|
123
|
+
}
|
|
124
|
+
if (entry.cache && typeof entry.cache === "object") {
|
|
125
|
+
const ttl = entry.cache.ttl_seconds;
|
|
126
|
+
if (!hasUrl) {
|
|
127
|
+
console.error(`Warning: docs entry "${id}" — cache is only valid with url, ignoring.`);
|
|
128
|
+
}
|
|
129
|
+
else if (typeof ttl === "number" && ttl > 0 && Number.isFinite(ttl)) {
|
|
130
|
+
doc.cache = { ttlSeconds: Math.floor(ttl) };
|
|
131
|
+
}
|
|
132
|
+
else {
|
|
133
|
+
console.error(`Warning: docs entry "${id}" — cache.ttl_seconds must be a positive number, ignoring.`);
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
const known = new Set(["id", "path", "url", "description", "tags", "cache"]);
|
|
137
|
+
for (const key of Object.keys(entry)) {
|
|
138
|
+
if (!known.has(key)) {
|
|
139
|
+
console.error(`Warning: docs entry "${id}" — unknown key "${key}", ignoring.`);
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
policy.docs.push(doc);
|
|
143
|
+
}
|
|
86
144
|
}
|
|
87
145
|
return policy;
|
|
88
146
|
}
|
|
89
|
-
|
|
147
|
+
function deriveDocId(source, kind) {
|
|
148
|
+
if (kind === "path") {
|
|
149
|
+
const base = path.basename(source);
|
|
150
|
+
const withoutExt = base.replace(/\.[^./]+$/, "");
|
|
151
|
+
return withoutExt || base;
|
|
152
|
+
}
|
|
153
|
+
const crypto = require("crypto");
|
|
154
|
+
return crypto.createHash("sha256").update(source).digest("hex").slice(0, 8);
|
|
155
|
+
}
|
|
156
|
+
const VALID_TOP_LEVEL_KEYS = new Set(["version", "risk_level", "command_policy", "scan", "audit", "docs"]);
|
|
90
157
|
const VALID_RISK_LEVELS = new Set(["minimal", "moderate", "aggressive"]);
|
|
91
158
|
const VALID_COMMAND_MODES = new Set(["allow-all", "approve-dangerous", "deny-list"]);
|
|
92
159
|
const VALID_LOG_LEVELS = new Set(["debug", "info", "warn", "error"]);
|
|
@@ -174,6 +241,10 @@ function validatePolicy(policy, raw) {
|
|
|
174
241
|
console.error(`Warning: "audit.log_level" must be one of: debug, info, warn, error — ignoring.`);
|
|
175
242
|
delete policy.audit.logLevel;
|
|
176
243
|
}
|
|
244
|
+
if (policy.audit.logPath !== undefined && typeof policy.audit.logPath !== "string") {
|
|
245
|
+
console.error(`Warning: "audit.log_path" must be a string — ignoring.`);
|
|
246
|
+
delete policy.audit.logPath;
|
|
247
|
+
}
|
|
177
248
|
}
|
|
178
249
|
return policy;
|
|
179
250
|
}
|