@rafter-security/cli 0.5.3 → 0.5.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +15 -3
- package/dist/commands/agent/audit-skill.js +2 -2
- package/dist/commands/agent/audit.js +96 -0
- package/dist/commands/agent/baseline.js +213 -0
- package/dist/commands/agent/exec.js +1 -1
- package/dist/commands/agent/index.js +4 -0
- package/dist/commands/agent/init.js +371 -29
- package/dist/commands/agent/install-hook.js +41 -47
- package/dist/commands/agent/scan.js +196 -23
- package/dist/commands/agent/status.js +65 -4
- package/dist/commands/agent/update-gitleaks.js +40 -0
- package/dist/commands/agent/verify.js +18 -4
- package/dist/commands/backend/run.js +69 -61
- package/dist/commands/ci/init.js +10 -3
- package/dist/commands/completion.js +320 -110
- package/dist/commands/hook/posttool.js +21 -7
- package/dist/commands/hook/pretool.js +50 -13
- package/dist/commands/issues/dedup.js +39 -0
- package/dist/commands/issues/from-scan.js +143 -0
- package/dist/commands/issues/from-text.js +185 -0
- package/dist/commands/issues/github-client.js +85 -0
- package/dist/commands/issues/index.js +25 -0
- package/dist/commands/issues/issue-builder.js +101 -0
- package/dist/commands/policy/export.js +7 -2
- package/dist/commands/scan/index.js +44 -0
- package/dist/core/audit-logger.js +41 -0
- package/dist/core/config-defaults.js +28 -0
- package/dist/core/config-manager.js +19 -2
- package/dist/core/pattern-engine.js +26 -1
- package/dist/core/risk-rules.js +5 -3
- package/dist/index.js +8 -2
- package/dist/scanners/gitleaks.js +5 -5
- package/dist/scanners/regex-scanner.js +12 -1
- package/dist/scanners/secret-patterns.js +3 -3
- package/dist/utils/binary-manager.js +59 -20
- package/dist/utils/skill-manager.js +5 -3
- package/package.json +2 -1
- package/resources/pre-commit-hook.sh +2 -2
- package/resources/pre-push-hook.sh +60 -0
- package/resources/rafter-security-skill.md +7 -11
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Build structured GitHub issues from scan findings.
|
|
3
|
+
*
|
|
4
|
+
* Handles both:
|
|
5
|
+
* - Backend scan vulnerabilities (SAST/policy findings)
|
|
6
|
+
* - Local scan results (secret detection)
|
|
7
|
+
*/
|
|
8
|
+
import { fingerprint, embedFingerprint } from "./dedup.js";
|
|
9
|
+
function severityLabel(level) {
|
|
10
|
+
const map = {
|
|
11
|
+
error: "critical",
|
|
12
|
+
critical: "critical",
|
|
13
|
+
warning: "high",
|
|
14
|
+
high: "high",
|
|
15
|
+
note: "medium",
|
|
16
|
+
medium: "medium",
|
|
17
|
+
low: "low",
|
|
18
|
+
};
|
|
19
|
+
return map[level.toLowerCase()] || "medium";
|
|
20
|
+
}
|
|
21
|
+
function severityEmoji(level) {
|
|
22
|
+
const sev = severityLabel(level);
|
|
23
|
+
const emojis = {
|
|
24
|
+
critical: "🔴",
|
|
25
|
+
high: "🟠",
|
|
26
|
+
medium: "🟡",
|
|
27
|
+
low: "🟢",
|
|
28
|
+
};
|
|
29
|
+
return emojis[sev] || "🟡";
|
|
30
|
+
}
|
|
31
|
+
export function buildFromBackendVulnerability(vuln) {
|
|
32
|
+
const sev = severityLabel(vuln.level);
|
|
33
|
+
const emoji = severityEmoji(vuln.level);
|
|
34
|
+
const fp = fingerprint(vuln.file, vuln.ruleId);
|
|
35
|
+
const title = `${emoji} [${sev.toUpperCase()}] ${vuln.ruleId}: ${truncate(vuln.message, 80)}`;
|
|
36
|
+
let body = `## Security Finding\n\n`;
|
|
37
|
+
body += `**Rule:** \`${vuln.ruleId}\`\n`;
|
|
38
|
+
body += `**Severity:** ${sev}\n`;
|
|
39
|
+
body += `**File:** \`${vuln.file}\``;
|
|
40
|
+
if (vuln.line)
|
|
41
|
+
body += ` (line ${vuln.line})`;
|
|
42
|
+
body += `\n\n`;
|
|
43
|
+
body += `### Description\n\n${vuln.message}\n\n`;
|
|
44
|
+
body += `### Remediation\n\nReview and fix the finding in \`${vuln.file}\`.\n`;
|
|
45
|
+
body += `\n---\n*Created by [Rafter CLI](https://rafter.so) — security for AI builders*\n`;
|
|
46
|
+
const labels = [
|
|
47
|
+
"security",
|
|
48
|
+
`severity:${sev}`,
|
|
49
|
+
`rule:${vuln.ruleId}`,
|
|
50
|
+
];
|
|
51
|
+
return {
|
|
52
|
+
title,
|
|
53
|
+
body: embedFingerprint(body, fp),
|
|
54
|
+
labels,
|
|
55
|
+
fingerprint: fp,
|
|
56
|
+
};
|
|
57
|
+
}
|
|
58
|
+
export function buildFromLocalMatch(file, match) {
|
|
59
|
+
const sev = severityLabel(match.pattern.severity);
|
|
60
|
+
const emoji = severityEmoji(match.pattern.severity);
|
|
61
|
+
const fp = fingerprint(file, match.pattern.name);
|
|
62
|
+
const title = `${emoji} [${sev.toUpperCase()}] Secret detected: ${match.pattern.name} in ${basename(file)}`;
|
|
63
|
+
let body = `## Secret Detection\n\n`;
|
|
64
|
+
body += `**Pattern:** \`${match.pattern.name}\`\n`;
|
|
65
|
+
body += `**Severity:** ${sev}\n`;
|
|
66
|
+
body += `**File:** \`${file}\``;
|
|
67
|
+
if (match.line)
|
|
68
|
+
body += ` (line ${match.line})`;
|
|
69
|
+
body += `\n`;
|
|
70
|
+
if (match.redacted) {
|
|
71
|
+
body += `**Match:** \`${match.redacted}\`\n`;
|
|
72
|
+
}
|
|
73
|
+
body += `\n`;
|
|
74
|
+
if (match.pattern.description) {
|
|
75
|
+
body += `### Description\n\n${match.pattern.description}\n\n`;
|
|
76
|
+
}
|
|
77
|
+
body += `### Remediation\n\n`;
|
|
78
|
+
body += `1. Rotate the exposed credential immediately\n`;
|
|
79
|
+
body += `2. Remove the secret from source code\n`;
|
|
80
|
+
body += `3. Use environment variables or a secrets manager instead\n`;
|
|
81
|
+
body += `\n---\n*Created by [Rafter CLI](https://rafter.so) — security for AI builders*\n`;
|
|
82
|
+
const labels = [
|
|
83
|
+
"security",
|
|
84
|
+
"secret-detected",
|
|
85
|
+
`severity:${sev}`,
|
|
86
|
+
];
|
|
87
|
+
return {
|
|
88
|
+
title,
|
|
89
|
+
body: embedFingerprint(body, fp),
|
|
90
|
+
labels,
|
|
91
|
+
fingerprint: fp,
|
|
92
|
+
};
|
|
93
|
+
}
|
|
94
|
+
function truncate(s, max) {
|
|
95
|
+
if (s.length <= max)
|
|
96
|
+
return s;
|
|
97
|
+
return s.slice(0, max - 3) + "...";
|
|
98
|
+
}
|
|
99
|
+
function basename(filepath) {
|
|
100
|
+
return filepath.split("/").pop() || filepath;
|
|
101
|
+
}
|
|
@@ -54,9 +54,14 @@ function generateCodexConfig() {
|
|
|
54
54
|
const policy = cfg.agent?.commandPolicy;
|
|
55
55
|
const blocked = policy?.blockedPatterns || [];
|
|
56
56
|
const approval = policy?.requireApproval || [];
|
|
57
|
-
let toml = `# Rafter security policy for OpenAI Codex
|
|
57
|
+
let toml = `# Rafter security policy for OpenAI Codex CLI
|
|
58
58
|
# Generated by: rafter policy export --format codex
|
|
59
|
-
#
|
|
59
|
+
#
|
|
60
|
+
# Usage: Save this file to your project root as codex-policy.toml
|
|
61
|
+
# then reference it in your Codex sandbox configuration.
|
|
62
|
+
#
|
|
63
|
+
# For full Codex integration, run: rafter agent init
|
|
64
|
+
# This auto-detects Codex CLI and installs skills to ~/.agents/skills/
|
|
60
65
|
|
|
61
66
|
`;
|
|
62
67
|
if (blocked.length > 0) {
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* rafter scan — top-level scan command group.
|
|
3
|
+
*
|
|
4
|
+
* Default (no subcommand): remote backend scan (same as `rafter run`)
|
|
5
|
+
* rafter scan remote: explicit alias for remote backend scan
|
|
6
|
+
* rafter scan local [path]: local secret scanner (was `rafter agent scan`)
|
|
7
|
+
*/
|
|
8
|
+
import { Command } from "commander";
|
|
9
|
+
import { runRemoteScan } from "../backend/run.js";
|
|
10
|
+
import { createScanCommand as createLocalScanCommand } from "../agent/scan.js";
|
|
11
|
+
export function createScanGroupCommand() {
|
|
12
|
+
// "local" subcommand — reuses agent/scan.ts logic, renamed
|
|
13
|
+
const localCmd = createLocalScanCommand();
|
|
14
|
+
localCmd.name("local");
|
|
15
|
+
localCmd.description("Scan files or directories for secrets (local)");
|
|
16
|
+
// "remote" subcommand — same handler as `rafter run`
|
|
17
|
+
const remoteCmd = new Command("remote")
|
|
18
|
+
.description("Trigger a remote backend security scan (explicit alias for 'rafter run')")
|
|
19
|
+
.option("-r, --repo <repo>", "org/repo (default: current)")
|
|
20
|
+
.option("-b, --branch <branch>", "branch (default: current else main)")
|
|
21
|
+
.option("-k, --api-key <key>", "API key or RAFTER_API_KEY env var")
|
|
22
|
+
.option("-f, --format <format>", "json | md", "md")
|
|
23
|
+
.option("--skip-interactive", "do not wait for scan to complete")
|
|
24
|
+
.option("--quiet", "suppress status messages")
|
|
25
|
+
.action(async (opts) => {
|
|
26
|
+
await runRemoteScan(opts);
|
|
27
|
+
});
|
|
28
|
+
// Root scan group — default action is remote backend scan
|
|
29
|
+
const scanGroup = new Command("scan")
|
|
30
|
+
.description("Scan for security issues. Default: remote backend scan. Use 'scan local' for local secret scanning.")
|
|
31
|
+
.option("-r, --repo <repo>", "org/repo (default: current)")
|
|
32
|
+
.option("-b, --branch <branch>", "branch (default: current else main)")
|
|
33
|
+
.option("-k, --api-key <key>", "API key or RAFTER_API_KEY env var")
|
|
34
|
+
.option("-f, --format <format>", "json | md", "md")
|
|
35
|
+
.option("--skip-interactive", "do not wait for scan to complete")
|
|
36
|
+
.option("--quiet", "suppress status messages");
|
|
37
|
+
scanGroup.addCommand(localCmd);
|
|
38
|
+
scanGroup.addCommand(remoteCmd);
|
|
39
|
+
// When invoked with no subcommand, run remote backend scan
|
|
40
|
+
scanGroup.action(async (opts) => {
|
|
41
|
+
await runRemoteScan(opts);
|
|
42
|
+
});
|
|
43
|
+
return scanGroup;
|
|
44
|
+
}
|
|
@@ -3,6 +3,12 @@ import path from "path";
|
|
|
3
3
|
import { getAuditLogPath } from "./config-defaults.js";
|
|
4
4
|
import { ConfigManager } from "./config-manager.js";
|
|
5
5
|
import { assessCommandRisk } from "./risk-rules.js";
|
|
6
|
+
export const RISK_SEVERITY = {
|
|
7
|
+
low: 0,
|
|
8
|
+
medium: 1,
|
|
9
|
+
high: 2,
|
|
10
|
+
critical: 3,
|
|
11
|
+
};
|
|
6
12
|
export class AuditLogger {
|
|
7
13
|
constructor(logPath) {
|
|
8
14
|
this.logPath = logPath || getAuditLogPath();
|
|
@@ -31,6 +37,41 @@ export class AuditLogger {
|
|
|
31
37
|
// Append to log file
|
|
32
38
|
const line = JSON.stringify(fullEntry) + "\n";
|
|
33
39
|
fs.appendFileSync(this.logPath, line, "utf-8");
|
|
40
|
+
// Send webhook notification if configured and risk meets threshold
|
|
41
|
+
this.sendNotification(fullEntry, config);
|
|
42
|
+
}
|
|
43
|
+
/**
|
|
44
|
+
* Send webhook notification for high-risk events
|
|
45
|
+
*/
|
|
46
|
+
sendNotification(entry, config) {
|
|
47
|
+
const webhookUrl = config.agent?.notifications?.webhook;
|
|
48
|
+
if (!webhookUrl)
|
|
49
|
+
return;
|
|
50
|
+
const eventRisk = entry.action?.riskLevel || "low";
|
|
51
|
+
const minRisk = config.agent?.notifications?.minRiskLevel || "high";
|
|
52
|
+
if ((RISK_SEVERITY[eventRisk] ?? 0) < (RISK_SEVERITY[minRisk] ?? 2)) {
|
|
53
|
+
return;
|
|
54
|
+
}
|
|
55
|
+
const payload = {
|
|
56
|
+
event: entry.eventType,
|
|
57
|
+
risk: eventRisk,
|
|
58
|
+
command: entry.action?.command || null,
|
|
59
|
+
timestamp: entry.timestamp,
|
|
60
|
+
agent: entry.agentType || null,
|
|
61
|
+
// Slack-compatible text field
|
|
62
|
+
text: `[rafter] ${eventRisk}-risk event: ${entry.eventType}${entry.action?.command ? ` — ${entry.action.command}` : ""}`,
|
|
63
|
+
// Discord-compatible content field
|
|
64
|
+
content: `[rafter] ${eventRisk}-risk event: ${entry.eventType}${entry.action?.command ? ` — ${entry.action.command}` : ""}`,
|
|
65
|
+
};
|
|
66
|
+
// Fire-and-forget POST — never block audit logging
|
|
67
|
+
fetch(webhookUrl, {
|
|
68
|
+
method: "POST",
|
|
69
|
+
headers: { "Content-Type": "application/json" },
|
|
70
|
+
body: JSON.stringify(payload),
|
|
71
|
+
signal: AbortSignal.timeout(5000),
|
|
72
|
+
}).catch(() => {
|
|
73
|
+
// Silently ignore webhook failures
|
|
74
|
+
});
|
|
34
75
|
}
|
|
35
76
|
/**
|
|
36
77
|
* Log a command interception
|
|
@@ -19,6 +19,30 @@ export function getDefaultConfig() {
|
|
|
19
19
|
claudeCode: {
|
|
20
20
|
enabled: false,
|
|
21
21
|
mcpPath: path.join(os.homedir(), ".claude", "mcp", "rafter-security.json")
|
|
22
|
+
},
|
|
23
|
+
codex: {
|
|
24
|
+
enabled: false,
|
|
25
|
+
skillsDir: path.join(os.homedir(), ".agents", "skills")
|
|
26
|
+
},
|
|
27
|
+
gemini: {
|
|
28
|
+
enabled: false,
|
|
29
|
+
configPath: path.join(os.homedir(), ".gemini", "settings.json")
|
|
30
|
+
},
|
|
31
|
+
aider: {
|
|
32
|
+
enabled: false,
|
|
33
|
+
configPath: path.join(os.homedir(), ".aider.conf.yml")
|
|
34
|
+
},
|
|
35
|
+
cursor: {
|
|
36
|
+
enabled: false,
|
|
37
|
+
mcpPath: path.join(os.homedir(), ".cursor", "mcp.json")
|
|
38
|
+
},
|
|
39
|
+
windsurf: {
|
|
40
|
+
enabled: false,
|
|
41
|
+
mcpPath: path.join(os.homedir(), ".codeium", "windsurf", "mcp_config.json")
|
|
42
|
+
},
|
|
43
|
+
continueDev: {
|
|
44
|
+
enabled: false,
|
|
45
|
+
configPath: path.join(os.homedir(), ".continue", "config.json")
|
|
22
46
|
}
|
|
23
47
|
},
|
|
24
48
|
skills: {
|
|
@@ -39,6 +63,10 @@ export function getDefaultConfig() {
|
|
|
39
63
|
logAllActions: true,
|
|
40
64
|
retentionDays: 30,
|
|
41
65
|
logLevel: "info"
|
|
66
|
+
},
|
|
67
|
+
notifications: {
|
|
68
|
+
webhook: undefined,
|
|
69
|
+
minRiskLevel: "high"
|
|
42
70
|
}
|
|
43
71
|
}
|
|
44
72
|
};
|
|
@@ -179,10 +179,27 @@ export class ConfigManager {
|
|
|
179
179
|
* Migrate config to latest version
|
|
180
180
|
*/
|
|
181
181
|
migrate(config) {
|
|
182
|
-
|
|
183
|
-
// In future, handle version-specific migrations
|
|
182
|
+
let dirty = false;
|
|
184
183
|
if (config.version !== CONFIG_VERSION) {
|
|
185
184
|
config.version = CONFIG_VERSION;
|
|
185
|
+
dirty = true;
|
|
186
|
+
}
|
|
187
|
+
// Fix overly broad curl/wget pipe-to-shell patterns.
|
|
188
|
+
// Old pattern: "curl.*\|.*sh" — matches any command containing "sh" after a pipe
|
|
189
|
+
// (e.g. `grep "curl\|sh"` or `git push`). Replace with word-bounded shell names.
|
|
190
|
+
const badToGood = {
|
|
191
|
+
"curl.*\\|.*sh": "curl.*\\|\\s*(bash|sh|zsh|dash)\\b",
|
|
192
|
+
"wget.*\\|.*sh": "wget.*\\|\\s*(bash|sh|zsh|dash)\\b",
|
|
193
|
+
};
|
|
194
|
+
const approval = config.agent?.commandPolicy?.requireApproval;
|
|
195
|
+
if (Array.isArray(approval)) {
|
|
196
|
+
const fixed = approval.map(p => badToGood[p] ?? p);
|
|
197
|
+
if (fixed.some((p, i) => p !== approval[i])) {
|
|
198
|
+
config.agent.commandPolicy.requireApproval = fixed;
|
|
199
|
+
dirty = true;
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
if (dirty) {
|
|
186
203
|
this.save(config);
|
|
187
204
|
}
|
|
188
205
|
return config;
|
|
@@ -1,3 +1,7 @@
|
|
|
1
|
+
const GENERIC_PATTERN_NAMES = new Set(["Generic API Key", "Generic Secret"]);
|
|
2
|
+
const VARIABLE_NAME_RE = /^[A-Z][A-Z0-9]*(?:_[A-Z0-9]+)+$/;
|
|
3
|
+
const LOWERCASE_IDENT_RE = /^[a-z][a-z0-9]*(?:_[a-z0-9]+)+$/;
|
|
4
|
+
const QUOTED_VALUE_RE = /['"]([^'"]+)['"]/;
|
|
1
5
|
export class PatternEngine {
|
|
2
6
|
constructor(patterns) {
|
|
3
7
|
this.patterns = patterns;
|
|
@@ -11,6 +15,8 @@ export class PatternEngine {
|
|
|
11
15
|
const regex = this.createRegex(pattern.regex);
|
|
12
16
|
let match;
|
|
13
17
|
while ((match = regex.exec(text)) !== null) {
|
|
18
|
+
if (this.isFalsePositive(pattern, match[0]))
|
|
19
|
+
continue;
|
|
14
20
|
matches.push({
|
|
15
21
|
pattern,
|
|
16
22
|
match: match[0],
|
|
@@ -32,6 +38,8 @@ export class PatternEngine {
|
|
|
32
38
|
const regex = this.createRegex(pattern.regex);
|
|
33
39
|
let match;
|
|
34
40
|
while ((match = regex.exec(line)) !== null) {
|
|
41
|
+
if (this.isFalsePositive(pattern, match[0]))
|
|
42
|
+
continue;
|
|
35
43
|
matches.push({
|
|
36
44
|
pattern,
|
|
37
45
|
match: match[0],
|
|
@@ -51,7 +59,7 @@ export class PatternEngine {
|
|
|
51
59
|
let redacted = text;
|
|
52
60
|
for (const pattern of this.patterns) {
|
|
53
61
|
const regex = this.createRegex(pattern.regex);
|
|
54
|
-
redacted = redacted.replace(regex, (match) => this.redact(match));
|
|
62
|
+
redacted = redacted.replace(regex, (match) => this.isFalsePositive(pattern, match) ? match : this.redact(match));
|
|
55
63
|
}
|
|
56
64
|
return redacted;
|
|
57
65
|
}
|
|
@@ -67,6 +75,23 @@ export class PatternEngine {
|
|
|
67
75
|
getPatternsBySeverity(severity) {
|
|
68
76
|
return this.patterns.filter(p => p.severity === severity);
|
|
69
77
|
}
|
|
78
|
+
/**
|
|
79
|
+
* Check if a match from a generic pattern looks like a variable name
|
|
80
|
+
* rather than an actual secret value.
|
|
81
|
+
*/
|
|
82
|
+
isFalsePositive(pattern, matchText) {
|
|
83
|
+
if (!GENERIC_PATTERN_NAMES.has(pattern.name))
|
|
84
|
+
return false;
|
|
85
|
+
const m = QUOTED_VALUE_RE.exec(matchText);
|
|
86
|
+
if (!m)
|
|
87
|
+
return false;
|
|
88
|
+
const value = m[1];
|
|
89
|
+
if (VARIABLE_NAME_RE.test(value))
|
|
90
|
+
return true;
|
|
91
|
+
if (LOWERCASE_IDENT_RE.test(value))
|
|
92
|
+
return true;
|
|
93
|
+
return false;
|
|
94
|
+
}
|
|
70
95
|
/**
|
|
71
96
|
* Create RegExp from pattern string, extracting inline flags
|
|
72
97
|
*/
|
package/dist/core/risk-rules.js
CHANGED
|
@@ -17,9 +17,10 @@ export const HIGH_PATTERNS = [
|
|
|
17
17
|
/chmod\s+777/,
|
|
18
18
|
/curl.*\|\s*(bash|sh|zsh|dash)\b/,
|
|
19
19
|
/wget.*\|\s*(bash|sh|zsh|dash)\b/,
|
|
20
|
-
/git\s+push\s
|
|
21
|
-
/git\s+push\s
|
|
22
|
-
/git\s+push\s
|
|
20
|
+
/git\s+push\b.*\s--force\b/, // --force anywhere after push
|
|
21
|
+
/git\s+push\b.*\s-[a-zA-Z]*f\b/, // -f or combined flags like -vf
|
|
22
|
+
/git\s+push\b.*\s--force-(with-lease|if-includes)\b/, // specific force variants
|
|
23
|
+
/git\s+push\s+\S*\s+\+/, // refspec force: git push origin +main
|
|
23
24
|
/docker\s+system\s+prune/,
|
|
24
25
|
/npm\s+publish/,
|
|
25
26
|
/pypi.*upload/,
|
|
@@ -50,6 +51,7 @@ export const DEFAULT_REQUIRE_APPROVAL = [
|
|
|
50
51
|
"git push -f",
|
|
51
52
|
"git push --force-with-lease",
|
|
52
53
|
"git push --force-if-includes",
|
|
54
|
+
"git push .* \\+",
|
|
53
55
|
];
|
|
54
56
|
/**
|
|
55
57
|
* Assess risk level of a command string.
|
package/dist/index.js
CHANGED
|
@@ -4,16 +4,18 @@ import * as dotenv from "dotenv";
|
|
|
4
4
|
import { createRunCommand } from "./commands/backend/run.js";
|
|
5
5
|
import { createGetCommand } from "./commands/backend/get.js";
|
|
6
6
|
import { createUsageCommand } from "./commands/backend/usage.js";
|
|
7
|
+
import { createScanGroupCommand } from "./commands/scan/index.js";
|
|
7
8
|
import { createAgentCommand } from "./commands/agent/index.js";
|
|
8
9
|
import { createCiCommand } from "./commands/ci/index.js";
|
|
9
10
|
import { createHookCommand } from "./commands/hook/index.js";
|
|
10
11
|
import { createMcpCommand } from "./commands/mcp/index.js";
|
|
11
12
|
import { createPolicyCommand } from "./commands/policy/index.js";
|
|
12
13
|
import { createCompletionCommand } from "./commands/completion.js";
|
|
14
|
+
import { createIssuesCommand } from "./commands/issues/index.js";
|
|
13
15
|
import { checkForUpdate } from "./utils/update-checker.js";
|
|
14
16
|
import { setAgentMode } from "./utils/formatter.js";
|
|
15
17
|
dotenv.config();
|
|
16
|
-
const VERSION = "0.5.
|
|
18
|
+
const VERSION = "0.5.7";
|
|
17
19
|
const program = new Command()
|
|
18
20
|
.name("rafter")
|
|
19
21
|
.description("Rafter CLI")
|
|
@@ -26,10 +28,12 @@ program.hook("preAction", (thisCommand) => {
|
|
|
26
28
|
setAgentMode(true);
|
|
27
29
|
}
|
|
28
30
|
});
|
|
29
|
-
// Backend commands
|
|
31
|
+
// Backend commands
|
|
30
32
|
program.addCommand(createRunCommand());
|
|
31
33
|
program.addCommand(createGetCommand());
|
|
32
34
|
program.addCommand(createUsageCommand());
|
|
35
|
+
// Scan command group (default: remote backend scan; subcommands: local, remote)
|
|
36
|
+
program.addCommand(createScanGroupCommand());
|
|
33
37
|
// Agent commands
|
|
34
38
|
program.addCommand(createAgentCommand());
|
|
35
39
|
// CI commands
|
|
@@ -40,6 +44,8 @@ program.addCommand(createHookCommand());
|
|
|
40
44
|
program.addCommand(createMcpCommand());
|
|
41
45
|
// Policy commands
|
|
42
46
|
program.addCommand(createPolicyCommand());
|
|
47
|
+
// GitHub Issues integration
|
|
48
|
+
program.addCommand(createIssuesCommand());
|
|
43
49
|
// Shell completions
|
|
44
50
|
program.addCommand(createCompletionCommand());
|
|
45
51
|
// Non-blocking update check — runs after command, prints to stderr
|
|
@@ -44,11 +44,7 @@ export class GitleaksScanner {
|
|
|
44
44
|
};
|
|
45
45
|
}
|
|
46
46
|
catch (e) {
|
|
47
|
-
//
|
|
48
|
-
if (fs.existsSync(tmpReport)) {
|
|
49
|
-
fs.unlinkSync(tmpReport);
|
|
50
|
-
}
|
|
51
|
-
// Gitleaks exits with code 1 when leaks found
|
|
47
|
+
// Gitleaks exits with code 1 when leaks found — read report before cleanup
|
|
52
48
|
if (e.code === 1 && fs.existsSync(tmpReport)) {
|
|
53
49
|
const results = this.parseResults(tmpReport);
|
|
54
50
|
fs.unlinkSync(tmpReport);
|
|
@@ -57,6 +53,10 @@ export class GitleaksScanner {
|
|
|
57
53
|
matches: results.map(r => this.convertToPatternMatch(r))
|
|
58
54
|
};
|
|
59
55
|
}
|
|
56
|
+
// Clean up report for non-leak errors
|
|
57
|
+
if (fs.existsSync(tmpReport)) {
|
|
58
|
+
fs.unlinkSync(tmpReport);
|
|
59
|
+
}
|
|
60
60
|
throw new Error(`Gitleaks scan failed: ${e.message}`);
|
|
61
61
|
}
|
|
62
62
|
}
|
|
@@ -57,7 +57,18 @@ export class RegexScanner {
|
|
|
57
57
|
".next",
|
|
58
58
|
"coverage",
|
|
59
59
|
".vscode",
|
|
60
|
-
".idea"
|
|
60
|
+
".idea",
|
|
61
|
+
// Vendored / virtual-env / generated dirs that cause false positives
|
|
62
|
+
"vendor",
|
|
63
|
+
".venv",
|
|
64
|
+
"venv",
|
|
65
|
+
"__pycache__",
|
|
66
|
+
".tox",
|
|
67
|
+
".mypy_cache",
|
|
68
|
+
".pytest_cache",
|
|
69
|
+
"results",
|
|
70
|
+
".terraform",
|
|
71
|
+
"bower_components"
|
|
61
72
|
];
|
|
62
73
|
// Merge policy excludePaths into the exclude list
|
|
63
74
|
if (options?.excludePaths) {
|
|
@@ -90,13 +90,13 @@ export const DEFAULT_SECRET_PATTERNS = [
|
|
|
90
90
|
// Generic patterns
|
|
91
91
|
{
|
|
92
92
|
name: "Generic API Key",
|
|
93
|
-
regex: "(?i)(api[_-]?key|apikey)[\\s]*[:=][\\s]*['\"]
|
|
93
|
+
regex: "(?i)(?<![a-zA-Z0-9_])(api[_-]?key|apikey)[\\s]*[:=][\\s]*['\"](?=[0-9a-zA-Z\\-_]*[0-9])[0-9a-zA-Z\\-_]{16,}['\"]",
|
|
94
94
|
severity: "high",
|
|
95
95
|
description: "Generic API key pattern detected"
|
|
96
96
|
},
|
|
97
97
|
{
|
|
98
98
|
name: "Generic Secret",
|
|
99
|
-
regex: "(?i)(secret|password|passwd|pwd)[\\s]*[:=][\\s]*['\"]
|
|
99
|
+
regex: "(?i)(?<![a-zA-Z0-9_])(secret|password|passwd|pwd)[\\s]*[:=][\\s]*['\"](?=[^\\s'\"]*[0-9])(?=[^\\s'\"]*[a-zA-Z])[0-9a-zA-Z\\-_!@#$%^&*()]{12,}['\"]",
|
|
100
100
|
severity: "high",
|
|
101
101
|
description: "Generic secret pattern detected"
|
|
102
102
|
},
|
|
@@ -108,7 +108,7 @@ export const DEFAULT_SECRET_PATTERNS = [
|
|
|
108
108
|
},
|
|
109
109
|
{
|
|
110
110
|
name: "Bearer Token",
|
|
111
|
-
regex: "(?i)bearer[\\s]+[a-zA-Z0-9\\-_\\.=]
|
|
111
|
+
regex: "(?i)bearer[\\s]+(?=[a-zA-Z0-9\\-_\\.=]*[0-9])(?=[a-zA-Z0-9\\-_\\.=]*[a-zA-Z])[a-zA-Z0-9\\-_\\.=]{20,}",
|
|
112
112
|
severity: "high",
|
|
113
113
|
description: "Bearer token detected"
|
|
114
114
|
},
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import fs from "fs";
|
|
2
|
+
import os from "os";
|
|
2
3
|
import path from "path";
|
|
3
4
|
import https from "https";
|
|
4
5
|
import { exec, execSync } from "child_process";
|
|
@@ -6,7 +7,7 @@ import { promisify } from "util";
|
|
|
6
7
|
import { getBinDir } from "../core/config-defaults.js";
|
|
7
8
|
import * as tar from "tar";
|
|
8
9
|
const execAsync = promisify(exec);
|
|
9
|
-
const GITLEAKS_VERSION = "8.18.2";
|
|
10
|
+
export const GITLEAKS_VERSION = "8.18.2";
|
|
10
11
|
export class BinaryManager {
|
|
11
12
|
constructor() {
|
|
12
13
|
this.binDir = getBinDir();
|
|
@@ -74,10 +75,10 @@ export class BinaryManager {
|
|
|
74
75
|
return false;
|
|
75
76
|
}
|
|
76
77
|
try {
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
});
|
|
80
|
-
return
|
|
78
|
+
// execAsync rejects on non-zero exit, so reaching here means exit code 0.
|
|
79
|
+
// Accept any successful exit — don't require specific stdout content.
|
|
80
|
+
await execAsync(`"${this.getGitleaksPath()}" version`, { timeout: 5000 });
|
|
81
|
+
return true;
|
|
81
82
|
}
|
|
82
83
|
catch {
|
|
83
84
|
return false;
|
|
@@ -90,8 +91,9 @@ export class BinaryManager {
|
|
|
90
91
|
const gitleaksPath = binaryPath ?? this.getGitleaksPath();
|
|
91
92
|
try {
|
|
92
93
|
const { stdout, stderr } = await execAsync(`"${gitleaksPath}" version`, { timeout: 5000 });
|
|
93
|
-
|
|
94
|
-
|
|
94
|
+
// execAsync rejects on non-zero exit, so reaching here means exit code 0.
|
|
95
|
+
// Accept any successful exit — don't require specific stdout content.
|
|
96
|
+
return { ok: true, stdout: stdout.trim(), stderr: stderr.trim() };
|
|
95
97
|
}
|
|
96
98
|
catch (e) {
|
|
97
99
|
const err = e;
|
|
@@ -145,9 +147,11 @@ export class BinaryManager {
|
|
|
145
147
|
return lines.join("\n");
|
|
146
148
|
}
|
|
147
149
|
/**
|
|
148
|
-
* Download and install Gitleaks
|
|
150
|
+
* Download and install Gitleaks.
|
|
151
|
+
* @param onProgress Optional progress callback.
|
|
152
|
+
* @param version Gitleaks version to install (defaults to GITLEAKS_VERSION).
|
|
149
153
|
*/
|
|
150
|
-
async downloadGitleaks(onProgress) {
|
|
154
|
+
async downloadGitleaks(onProgress, version = GITLEAKS_VERSION) {
|
|
151
155
|
const log = onProgress || (() => { });
|
|
152
156
|
// Check platform support
|
|
153
157
|
if (!this.isPlatformSupported()) {
|
|
@@ -159,8 +163,8 @@ export class BinaryManager {
|
|
|
159
163
|
}
|
|
160
164
|
const platform = this.getPlatformString();
|
|
161
165
|
const arch = this.getArchString();
|
|
162
|
-
const url = this.getDownloadUrl(platform, arch);
|
|
163
|
-
log(`Downloading Gitleaks v${
|
|
166
|
+
const url = this.getDownloadUrl(platform, arch, version);
|
|
167
|
+
log(`Downloading Gitleaks v${version} for ${platform}/${arch}...`);
|
|
164
168
|
log(` URL: ${url}`);
|
|
165
169
|
const archivePath = path.join(this.binDir, platform === "windows" ? "gitleaks.zip" : "gitleaks.tar.gz");
|
|
166
170
|
try {
|
|
@@ -172,8 +176,7 @@ export class BinaryManager {
|
|
|
172
176
|
// Extract binary
|
|
173
177
|
log("Extracting binary...");
|
|
174
178
|
if (platform === "windows") {
|
|
175
|
-
|
|
176
|
-
throw new Error("Windows support coming soon");
|
|
179
|
+
await this.extractZip(archivePath);
|
|
177
180
|
}
|
|
178
181
|
else {
|
|
179
182
|
await this.extractTarball(archivePath);
|
|
@@ -255,15 +258,15 @@ export class BinaryManager {
|
|
|
255
258
|
throw new Error(`Unsupported architecture: ${arch}`);
|
|
256
259
|
}
|
|
257
260
|
/**
|
|
258
|
-
* Get download URL for platform/arch
|
|
261
|
+
* Get download URL for platform/arch/version
|
|
259
262
|
*/
|
|
260
|
-
getDownloadUrl(platform, arch) {
|
|
261
|
-
const baseUrl = `https://github.com/gitleaks/gitleaks/releases/download/v${
|
|
263
|
+
getDownloadUrl(platform, arch, version = GITLEAKS_VERSION) {
|
|
264
|
+
const baseUrl = `https://github.com/gitleaks/gitleaks/releases/download/v${version}`;
|
|
262
265
|
if (platform === "windows") {
|
|
263
|
-
return `${baseUrl}/gitleaks_${
|
|
266
|
+
return `${baseUrl}/gitleaks_${version}_windows_${arch}.zip`;
|
|
264
267
|
}
|
|
265
268
|
else {
|
|
266
|
-
return `${baseUrl}/gitleaks_${
|
|
269
|
+
return `${baseUrl}/gitleaks_${version}_${platform}_${arch}.tar.gz`;
|
|
267
270
|
}
|
|
268
271
|
}
|
|
269
272
|
/**
|
|
@@ -320,13 +323,49 @@ export class BinaryManager {
|
|
|
320
323
|
});
|
|
321
324
|
}
|
|
322
325
|
/**
|
|
323
|
-
* Extract
|
|
326
|
+
* Extract zip (Windows) — uses PowerShell's Expand-Archive, then copies
|
|
327
|
+
* only the gitleaks.exe binary to binDir. Cleans up the temp extract dir.
|
|
328
|
+
*/
|
|
329
|
+
async extractZip(zipPath) {
|
|
330
|
+
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "rafter-gitleaks-"));
|
|
331
|
+
try {
|
|
332
|
+
// PowerShell 5+ ships on all supported Windows versions
|
|
333
|
+
await execAsync(`powershell -NoProfile -Command "Expand-Archive -Force -LiteralPath '${zipPath}' -DestinationPath '${tempDir}'"`, { timeout: 30000 });
|
|
334
|
+
// Find gitleaks.exe — may be at root or inside a subdirectory
|
|
335
|
+
const findBinary = (dir) => {
|
|
336
|
+
for (const entry of fs.readdirSync(dir)) {
|
|
337
|
+
const full = path.join(dir, entry);
|
|
338
|
+
if (entry === "gitleaks.exe")
|
|
339
|
+
return full;
|
|
340
|
+
if (fs.statSync(full).isDirectory()) {
|
|
341
|
+
const found = findBinary(full);
|
|
342
|
+
if (found)
|
|
343
|
+
return found;
|
|
344
|
+
}
|
|
345
|
+
}
|
|
346
|
+
return null;
|
|
347
|
+
};
|
|
348
|
+
const found = findBinary(tempDir);
|
|
349
|
+
if (!found)
|
|
350
|
+
throw new Error("gitleaks.exe not found in archive");
|
|
351
|
+
fs.copyFileSync(found, path.join(this.binDir, "gitleaks.exe"));
|
|
352
|
+
}
|
|
353
|
+
finally {
|
|
354
|
+
fs.rmSync(tempDir, { recursive: true, force: true });
|
|
355
|
+
}
|
|
356
|
+
}
|
|
357
|
+
/**
|
|
358
|
+
* Extract tarball — binary only, strip packaging extras (LICENSE, README.md).
|
|
359
|
+
*
|
|
360
|
+
* The gitleaks release tarball has all files at the archive root (no top-level
|
|
361
|
+
* directory), so strip: 0 (the default). With strip: 1, node-tar reduces the
|
|
362
|
+
* single-component paths to empty strings; the filter never matches "gitleaks"
|
|
363
|
+
* and nothing is extracted. The filter alone is sufficient.
|
|
324
364
|
*/
|
|
325
365
|
async extractTarball(tarballPath) {
|
|
326
366
|
await tar.extract({
|
|
327
367
|
file: tarballPath,
|
|
328
368
|
cwd: this.binDir,
|
|
329
|
-
strip: 1,
|
|
330
369
|
filter: (p) => {
|
|
331
370
|
const base = path.basename(p);
|
|
332
371
|
return base === "gitleaks" || base === "gitleaks.exe";
|
|
@@ -156,15 +156,17 @@ export class SkillManager {
|
|
|
156
156
|
async installRafterSkillVerbose(force = false) {
|
|
157
157
|
const skillPath = this.getRafterSkillPath();
|
|
158
158
|
const sourcePath = this.getRafterSkillSourcePath();
|
|
159
|
-
if (
|
|
160
|
-
|
|
159
|
+
// Check if ~/.openclaw exists (the parent dir), not just the skills subdir
|
|
160
|
+
const openclawDir = path.join(os.homedir(), ".openclaw");
|
|
161
|
+
if (!fs.existsSync(openclawDir)) {
|
|
162
|
+
return { ok: false, sourcePath, destPath: skillPath, error: `OpenClaw not found: ${openclawDir}` };
|
|
161
163
|
}
|
|
162
164
|
// Check if already installed and not forcing
|
|
163
165
|
if (!force && this.isRafterSkillInstalled()) {
|
|
164
166
|
return { ok: true, sourcePath, destPath: skillPath };
|
|
165
167
|
}
|
|
166
168
|
try {
|
|
167
|
-
// Ensure skills directory exists
|
|
169
|
+
// Ensure skills directory exists (may not exist on fresh OpenClaw installs)
|
|
168
170
|
const skillsDir = this.getOpenClawSkillsDir();
|
|
169
171
|
if (!fs.existsSync(skillsDir)) {
|
|
170
172
|
fs.mkdirSync(skillsDir, { recursive: true });
|