@rafter-security/cli 0.5.1 → 0.5.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/dist/commands/agent/audit-skill.js +6 -0
- package/dist/commands/agent/index.js +4 -0
- package/dist/commands/agent/init.js +81 -32
- package/dist/commands/agent/install-hook.js +2 -1
- package/dist/commands/agent/scan.js +70 -5
- package/dist/commands/agent/status.js +115 -0
- package/dist/commands/agent/verify.js +117 -0
- package/dist/commands/completion.js +170 -0
- package/dist/commands/hook/index.js +2 -0
- package/dist/commands/hook/posttool.js +73 -0
- package/dist/core/config-manager.js +16 -0
- package/dist/core/custom-patterns.js +157 -0
- package/dist/core/risk-rules.js +6 -1
- package/dist/index.js +4 -1
- package/dist/scanners/regex-scanner.js +7 -11
- package/dist/utils/binary-manager.js +100 -7
- package/dist/utils/skill-manager.js +22 -9
- package/package.json +1 -1
- package/resources/rafter-security-skill.md +7 -0
|
@@ -0,0 +1,170 @@
|
|
|
1
|
+
import { Command } from "commander";
|
|
2
|
+
const BASH_COMPLETION = `
|
|
3
|
+
# rafter bash completion
|
|
4
|
+
# Add to ~/.bashrc: eval "$(rafter completion bash)"
|
|
5
|
+
_rafter_completion() {
|
|
6
|
+
local cur prev words
|
|
7
|
+
COMPREPLY=()
|
|
8
|
+
cur="\${COMP_WORDS[COMP_CWORD]}"
|
|
9
|
+
prev="\${COMP_WORDS[COMP_CWORD-1]}"
|
|
10
|
+
words="\${COMP_WORDS[*]}"
|
|
11
|
+
|
|
12
|
+
local top_cmds="run scan get usage agent ci hook mcp policy completion --help --version"
|
|
13
|
+
local agent_cmds="init scan exec config audit audit-skill install-hook verify status"
|
|
14
|
+
local ci_cmds="init"
|
|
15
|
+
local hook_cmds="pretool posttool"
|
|
16
|
+
local policy_cmds="export"
|
|
17
|
+
|
|
18
|
+
if [[ \${COMP_CWORD} -eq 1 ]]; then
|
|
19
|
+
COMPREPLY=( \$(compgen -W "\${top_cmds}" -- "\${cur}") )
|
|
20
|
+
return 0
|
|
21
|
+
fi
|
|
22
|
+
|
|
23
|
+
case "\${COMP_WORDS[1]}" in
|
|
24
|
+
agent)
|
|
25
|
+
if [[ \${COMP_CWORD} -eq 2 ]]; then
|
|
26
|
+
COMPREPLY=( \$(compgen -W "\${agent_cmds}" -- "\${cur}") )
|
|
27
|
+
fi
|
|
28
|
+
case "\${COMP_WORDS[2]}" in
|
|
29
|
+
scan) COMPREPLY=( \$(compgen -W "--quiet --json --format --staged --diff --engine" -- "\${cur}") ) ;;
|
|
30
|
+
init) COMPREPLY=( \$(compgen -W "--risk-level --skip-gitleaks --skip-openclaw --skip-claude-code --force" -- "\${cur}") ) ;;
|
|
31
|
+
verify) COMPREPLY=() ;;
|
|
32
|
+
status) COMPREPLY=() ;;
|
|
33
|
+
audit-skill) COMPREPLY=( \$(compgen -W "--skip-openclaw --json" -- "\${cur}") ) ;;
|
|
34
|
+
install-hook) COMPREPLY=( \$(compgen -W "--global" -- "\${cur}") ) ;;
|
|
35
|
+
config) COMPREPLY=( \$(compgen -W "show get set" -- "\${cur}") ) ;;
|
|
36
|
+
audit) COMPREPLY=( \$(compgen -W "--last --event --agent --since" -- "\${cur}") ) ;;
|
|
37
|
+
esac
|
|
38
|
+
;;
|
|
39
|
+
hook)
|
|
40
|
+
COMPREPLY=( \$(compgen -W "\${hook_cmds}" -- "\${cur}") )
|
|
41
|
+
;;
|
|
42
|
+
ci)
|
|
43
|
+
if [[ \${COMP_CWORD} -eq 2 ]]; then
|
|
44
|
+
COMPREPLY=( \$(compgen -W "\${ci_cmds}" -- "\${cur}") )
|
|
45
|
+
fi
|
|
46
|
+
;;
|
|
47
|
+
policy)
|
|
48
|
+
COMPREPLY=( \$(compgen -W "\${policy_cmds}" -- "\${cur}") )
|
|
49
|
+
;;
|
|
50
|
+
run|scan)
|
|
51
|
+
COMPREPLY=( \$(compgen -W "--api-key --format --quiet" -- "\${cur}") )
|
|
52
|
+
;;
|
|
53
|
+
completion)
|
|
54
|
+
COMPREPLY=( \$(compgen -W "bash zsh fish" -- "\${cur}") )
|
|
55
|
+
;;
|
|
56
|
+
esac
|
|
57
|
+
}
|
|
58
|
+
complete -F _rafter_completion rafter
|
|
59
|
+
`;
|
|
60
|
+
const ZSH_COMPLETION = `
|
|
61
|
+
# rafter zsh completion
|
|
62
|
+
# Add to ~/.zshrc: eval "$(rafter completion zsh)"
|
|
63
|
+
#compdef rafter
|
|
64
|
+
|
|
65
|
+
_rafter() {
|
|
66
|
+
local state
|
|
67
|
+
typeset -A opt_args
|
|
68
|
+
|
|
69
|
+
_arguments \\
|
|
70
|
+
'1: :->cmd' \\
|
|
71
|
+
'*: :->args'
|
|
72
|
+
|
|
73
|
+
case \$state in
|
|
74
|
+
cmd)
|
|
75
|
+
_values 'command' \\
|
|
76
|
+
'run[Run a security scan via backend]' \\
|
|
77
|
+
'scan[Alias for run]' \\
|
|
78
|
+
'agent[Agent security features]' \\
|
|
79
|
+
'ci[CI/CD integration]' \\
|
|
80
|
+
'hook[Hook handlers]' \\
|
|
81
|
+
'mcp[MCP server]' \\
|
|
82
|
+
'policy[Policy management]' \\
|
|
83
|
+
'completion[Shell completion scripts]'
|
|
84
|
+
;;
|
|
85
|
+
args)
|
|
86
|
+
case \$words[2] in
|
|
87
|
+
agent)
|
|
88
|
+
_values 'subcommand' \\
|
|
89
|
+
'init[Initialize agent security]' \\
|
|
90
|
+
'scan[Scan for secrets]' \\
|
|
91
|
+
'exec[Execute command with security validation]' \\
|
|
92
|
+
'config[Manage configuration]' \\
|
|
93
|
+
'audit[View audit logs]' \\
|
|
94
|
+
'audit-skill[Audit a skill file]' \\
|
|
95
|
+
'install-hook[Install git pre-commit hook]' \\
|
|
96
|
+
'verify[Health check]' \\
|
|
97
|
+
'status[Status dashboard]'
|
|
98
|
+
;;
|
|
99
|
+
hook)
|
|
100
|
+
_values 'subcommand' 'pretool[PreToolUse handler]' 'posttool[PostToolUse handler]'
|
|
101
|
+
;;
|
|
102
|
+
completion)
|
|
103
|
+
_values 'shell' 'bash' 'zsh' 'fish'
|
|
104
|
+
;;
|
|
105
|
+
esac
|
|
106
|
+
;;
|
|
107
|
+
esac
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
_rafter
|
|
111
|
+
`;
|
|
112
|
+
const FISH_COMPLETION = `
|
|
113
|
+
# rafter fish completion
|
|
114
|
+
# Save to ~/.config/fish/completions/rafter.fish
|
|
115
|
+
# Or: rafter completion fish > ~/.config/fish/completions/rafter.fish
|
|
116
|
+
|
|
117
|
+
complete -c rafter -f
|
|
118
|
+
complete -c rafter -n '__fish_use_subcommand' -a 'run' -d 'Run a security scan via backend'
|
|
119
|
+
complete -c rafter -n '__fish_use_subcommand' -a 'agent' -d 'Agent security features'
|
|
120
|
+
complete -c rafter -n '__fish_use_subcommand' -a 'ci' -d 'CI/CD integration'
|
|
121
|
+
complete -c rafter -n '__fish_use_subcommand' -a 'hook' -d 'Hook handlers'
|
|
122
|
+
complete -c rafter -n '__fish_use_subcommand' -a 'mcp' -d 'MCP server'
|
|
123
|
+
complete -c rafter -n '__fish_use_subcommand' -a 'completion' -d 'Shell completion scripts'
|
|
124
|
+
|
|
125
|
+
# agent subcommands
|
|
126
|
+
complete -c rafter -n '__fish_seen_subcommand_from agent' -a 'init scan exec config audit audit-skill install-hook verify status'
|
|
127
|
+
complete -c rafter -n '__fish_seen_subcommand_from agent; and __fish_seen_subcommand_from scan' -l quiet -s q -d 'Only output if secrets found'
|
|
128
|
+
complete -c rafter -n '__fish_seen_subcommand_from agent; and __fish_seen_subcommand_from scan' -l json -d 'JSON output'
|
|
129
|
+
complete -c rafter -n '__fish_seen_subcommand_from agent; and __fish_seen_subcommand_from scan' -l format -d 'Output format: text, json, sarif'
|
|
130
|
+
complete -c rafter -n '__fish_seen_subcommand_from agent; and __fish_seen_subcommand_from scan' -l staged -d 'Scan staged files'
|
|
131
|
+
complete -c rafter -n '__fish_seen_subcommand_from agent; and __fish_seen_subcommand_from scan' -l engine -d 'Engine: gitleaks, patterns, auto'
|
|
132
|
+
|
|
133
|
+
# hook subcommands
|
|
134
|
+
complete -c rafter -n '__fish_seen_subcommand_from hook' -a 'pretool posttool'
|
|
135
|
+
|
|
136
|
+
# completion subcommands
|
|
137
|
+
complete -c rafter -n '__fish_seen_subcommand_from completion' -a 'bash zsh fish'
|
|
138
|
+
`;
|
|
139
|
+
export function createCompletionCommand() {
|
|
140
|
+
return new Command("completion")
|
|
141
|
+
.description("Generate shell completion scripts")
|
|
142
|
+
.argument("<shell>", "Shell type: bash, zsh, or fish")
|
|
143
|
+
.addHelpText("after", `
|
|
144
|
+
Examples:
|
|
145
|
+
# bash — add to ~/.bashrc
|
|
146
|
+
eval "$(rafter completion bash)"
|
|
147
|
+
|
|
148
|
+
# zsh — add to ~/.zshrc
|
|
149
|
+
eval "$(rafter completion zsh)"
|
|
150
|
+
|
|
151
|
+
# fish — save to completions dir
|
|
152
|
+
rafter completion fish > ~/.config/fish/completions/rafter.fish
|
|
153
|
+
`)
|
|
154
|
+
.action((shell) => {
|
|
155
|
+
switch (shell.toLowerCase()) {
|
|
156
|
+
case "bash":
|
|
157
|
+
process.stdout.write(BASH_COMPLETION.trimStart());
|
|
158
|
+
break;
|
|
159
|
+
case "zsh":
|
|
160
|
+
process.stdout.write(ZSH_COMPLETION.trimStart());
|
|
161
|
+
break;
|
|
162
|
+
case "fish":
|
|
163
|
+
process.stdout.write(FISH_COMPLETION.trimStart());
|
|
164
|
+
break;
|
|
165
|
+
default:
|
|
166
|
+
console.error(`Unknown shell: ${shell}. Supported: bash, zsh, fish`);
|
|
167
|
+
process.exit(1);
|
|
168
|
+
}
|
|
169
|
+
});
|
|
170
|
+
}
|
|
@@ -1,8 +1,10 @@
|
|
|
1
1
|
import { Command } from "commander";
|
|
2
2
|
import { createHookPretoolCommand } from "./pretool.js";
|
|
3
|
+
import { createHookPosttoolCommand } from "./posttool.js";
|
|
3
4
|
export function createHookCommand() {
|
|
4
5
|
const hook = new Command("hook")
|
|
5
6
|
.description("Hook handlers for agent platform integration");
|
|
6
7
|
hook.addCommand(createHookPretoolCommand());
|
|
8
|
+
hook.addCommand(createHookPosttoolCommand());
|
|
7
9
|
return hook;
|
|
8
10
|
}
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
import { Command } from "commander";
|
|
2
|
+
import { RegexScanner } from "../../scanners/regex-scanner.js";
|
|
3
|
+
import { AuditLogger } from "../../core/audit-logger.js";
|
|
4
|
+
export function createHookPosttoolCommand() {
|
|
5
|
+
return new Command("posttool")
|
|
6
|
+
.description("PostToolUse hook handler (reads stdin, redacts secrets in output, writes JSON to stdout)")
|
|
7
|
+
.action(async () => {
|
|
8
|
+
const input = await readStdin();
|
|
9
|
+
let payload;
|
|
10
|
+
try {
|
|
11
|
+
payload = JSON.parse(input);
|
|
12
|
+
}
|
|
13
|
+
catch {
|
|
14
|
+
writeOutput({ action: "continue" });
|
|
15
|
+
return;
|
|
16
|
+
}
|
|
17
|
+
const output = evaluateToolResponse(payload);
|
|
18
|
+
writeOutput(output);
|
|
19
|
+
});
|
|
20
|
+
}
|
|
21
|
+
function evaluateToolResponse(payload) {
|
|
22
|
+
const { tool_response } = payload;
|
|
23
|
+
// No response body — pass through
|
|
24
|
+
if (!tool_response) {
|
|
25
|
+
return { action: "continue" };
|
|
26
|
+
}
|
|
27
|
+
const scanner = new RegexScanner();
|
|
28
|
+
let modified = false;
|
|
29
|
+
const redacted = { ...tool_response };
|
|
30
|
+
// Scan and redact output
|
|
31
|
+
if (typeof tool_response.output === "string" && tool_response.output) {
|
|
32
|
+
if (scanner.hasSecrets(tool_response.output)) {
|
|
33
|
+
redacted.output = scanner.redact(tool_response.output);
|
|
34
|
+
modified = true;
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
// Scan and redact content (used by some tools)
|
|
38
|
+
if (typeof tool_response.content === "string" && tool_response.content) {
|
|
39
|
+
if (scanner.hasSecrets(tool_response.content)) {
|
|
40
|
+
redacted.content = scanner.redact(tool_response.content);
|
|
41
|
+
modified = true;
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
if (modified) {
|
|
45
|
+
const audit = new AuditLogger();
|
|
46
|
+
const matchCount = countMatches(scanner, tool_response);
|
|
47
|
+
audit.logContentSanitized(`${payload.tool_name} tool response`, matchCount);
|
|
48
|
+
return { action: "modify", tool_response: redacted };
|
|
49
|
+
}
|
|
50
|
+
return { action: "continue" };
|
|
51
|
+
}
|
|
52
|
+
function countMatches(scanner, tool_response) {
|
|
53
|
+
let count = 0;
|
|
54
|
+
if (typeof tool_response?.output === "string" && tool_response.output) {
|
|
55
|
+
count += scanner.scanText(tool_response.output).length;
|
|
56
|
+
}
|
|
57
|
+
if (typeof tool_response?.content === "string" && tool_response.content) {
|
|
58
|
+
count += scanner.scanText(tool_response.content).length;
|
|
59
|
+
}
|
|
60
|
+
return count;
|
|
61
|
+
}
|
|
62
|
+
function readStdin() {
|
|
63
|
+
return new Promise((resolve) => {
|
|
64
|
+
let data = "";
|
|
65
|
+
process.stdin.setEncoding("utf-8");
|
|
66
|
+
process.stdin.on("data", (chunk) => { data += chunk; });
|
|
67
|
+
process.stdin.on("end", () => { resolve(data); });
|
|
68
|
+
process.stdin.resume();
|
|
69
|
+
});
|
|
70
|
+
}
|
|
71
|
+
function writeOutput(output) {
|
|
72
|
+
process.stdout.write(JSON.stringify(output) + "\n");
|
|
73
|
+
}
|
|
@@ -96,6 +96,22 @@ export class ConfigManager {
|
|
|
96
96
|
fs.mkdirSync(dir, { recursive: true });
|
|
97
97
|
}
|
|
98
98
|
}
|
|
99
|
+
// Write patterns/ README if missing
|
|
100
|
+
const patternsReadme = path.join(rafterDir, "patterns", "README.md");
|
|
101
|
+
if (!fs.existsSync(patternsReadme)) {
|
|
102
|
+
fs.writeFileSync(patternsReadme, [
|
|
103
|
+
"# Custom Secret Patterns",
|
|
104
|
+
"",
|
|
105
|
+
"Place custom secret-detection pattern files here.",
|
|
106
|
+
"Each file should contain one regex pattern per line.",
|
|
107
|
+
"",
|
|
108
|
+
"Rafter ships 21 built-in patterns (AWS, GitHub, Stripe, etc.).",
|
|
109
|
+
"Files in this directory extend that set for your environment.",
|
|
110
|
+
"",
|
|
111
|
+
"Support for loading custom patterns from this directory is planned",
|
|
112
|
+
"for a future release.",
|
|
113
|
+
].join("\n"), "utf-8");
|
|
114
|
+
}
|
|
99
115
|
// Create default config if it doesn't exist
|
|
100
116
|
if (!fs.existsSync(this.configPath)) {
|
|
101
117
|
const config = getDefaultConfig();
|
|
@@ -0,0 +1,157 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Load custom secret patterns from ~/.rafter/patterns/
|
|
3
|
+
* and suppression rules from .rafterignore.
|
|
4
|
+
*/
|
|
5
|
+
import fs from "fs";
|
|
6
|
+
import path from "path";
|
|
7
|
+
import { getRafterDir } from "./config-defaults.js";
|
|
8
|
+
// ---------------------------------------------------------------------------
|
|
9
|
+
// Custom pattern loading
|
|
10
|
+
// ---------------------------------------------------------------------------
|
|
11
|
+
/**
|
|
12
|
+
* Load user-defined patterns from ~/.rafter/patterns/*.txt and *.json.
|
|
13
|
+
*
|
|
14
|
+
* .txt — one regex per line (comments with # ignored)
|
|
15
|
+
* .json — array of {name, pattern, severity?} objects
|
|
16
|
+
*
|
|
17
|
+
* Returns Pattern[] merged with DEFAULT_SECRET_PATTERNS by callers.
|
|
18
|
+
*/
|
|
19
|
+
export function loadCustomPatterns() {
|
|
20
|
+
const patternsDir = path.join(getRafterDir(), "patterns");
|
|
21
|
+
if (!fs.existsSync(patternsDir))
|
|
22
|
+
return [];
|
|
23
|
+
const results = [];
|
|
24
|
+
let entries;
|
|
25
|
+
try {
|
|
26
|
+
entries = fs.readdirSync(patternsDir, { withFileTypes: true });
|
|
27
|
+
}
|
|
28
|
+
catch {
|
|
29
|
+
return [];
|
|
30
|
+
}
|
|
31
|
+
for (const entry of entries) {
|
|
32
|
+
if (!entry.isFile())
|
|
33
|
+
continue;
|
|
34
|
+
const file = path.join(patternsDir, entry.name);
|
|
35
|
+
const ext = path.extname(entry.name).toLowerCase();
|
|
36
|
+
if (ext === ".txt") {
|
|
37
|
+
results.push(...loadTxtPatterns(file));
|
|
38
|
+
}
|
|
39
|
+
else if (ext === ".json") {
|
|
40
|
+
results.push(...loadJsonPatterns(file));
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
return results;
|
|
44
|
+
}
|
|
45
|
+
function loadTxtPatterns(file) {
|
|
46
|
+
try {
|
|
47
|
+
const lines = fs.readFileSync(file, "utf-8").split("\n");
|
|
48
|
+
const patterns = [];
|
|
49
|
+
for (const raw of lines) {
|
|
50
|
+
const line = raw.trim();
|
|
51
|
+
if (!line || line.startsWith("#"))
|
|
52
|
+
continue;
|
|
53
|
+
patterns.push({
|
|
54
|
+
name: `Custom (${path.basename(file, ".txt")})`,
|
|
55
|
+
regex: line,
|
|
56
|
+
severity: "high",
|
|
57
|
+
});
|
|
58
|
+
}
|
|
59
|
+
return patterns;
|
|
60
|
+
}
|
|
61
|
+
catch {
|
|
62
|
+
return [];
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
function loadJsonPatterns(file) {
|
|
66
|
+
try {
|
|
67
|
+
const data = JSON.parse(fs.readFileSync(file, "utf-8"));
|
|
68
|
+
if (!Array.isArray(data))
|
|
69
|
+
return [];
|
|
70
|
+
const patterns = [];
|
|
71
|
+
for (const entry of data) {
|
|
72
|
+
if (typeof entry.pattern !== "string")
|
|
73
|
+
continue;
|
|
74
|
+
patterns.push({
|
|
75
|
+
name: entry.name ?? `Custom (${path.basename(file, ".json")})`,
|
|
76
|
+
regex: entry.pattern,
|
|
77
|
+
severity: entry.severity ?? "high",
|
|
78
|
+
description: entry.description,
|
|
79
|
+
});
|
|
80
|
+
}
|
|
81
|
+
return patterns;
|
|
82
|
+
}
|
|
83
|
+
catch {
|
|
84
|
+
return [];
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
/**
|
|
88
|
+
* Parse .rafterignore from the given directory (project root).
|
|
89
|
+
*
|
|
90
|
+
* Format — one entry per line:
|
|
91
|
+
* path/glob → suppress all findings in matching files
|
|
92
|
+
* path/glob:pattern-name → suppress specific pattern in matching files
|
|
93
|
+
*
|
|
94
|
+
* Lines starting with # are comments.
|
|
95
|
+
*/
|
|
96
|
+
export function loadSuppressions(projectRoot = process.cwd()) {
|
|
97
|
+
const file = path.join(projectRoot, ".rafterignore");
|
|
98
|
+
if (!fs.existsSync(file))
|
|
99
|
+
return [];
|
|
100
|
+
const suppressions = [];
|
|
101
|
+
try {
|
|
102
|
+
const lines = fs.readFileSync(file, "utf-8").split("\n");
|
|
103
|
+
for (const raw of lines) {
|
|
104
|
+
const line = raw.trim();
|
|
105
|
+
if (!line || line.startsWith("#"))
|
|
106
|
+
continue;
|
|
107
|
+
const colonIdx = line.indexOf(":");
|
|
108
|
+
if (colonIdx === -1) {
|
|
109
|
+
suppressions.push({ pathGlob: line });
|
|
110
|
+
}
|
|
111
|
+
else {
|
|
112
|
+
suppressions.push({
|
|
113
|
+
pathGlob: line.slice(0, colonIdx).trim(),
|
|
114
|
+
patternName: line.slice(colonIdx + 1).trim() || undefined,
|
|
115
|
+
});
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
catch {
|
|
120
|
+
// ignore unreadable .rafterignore
|
|
121
|
+
}
|
|
122
|
+
return suppressions;
|
|
123
|
+
}
|
|
124
|
+
/**
|
|
125
|
+
* Returns true if a finding should be suppressed.
|
|
126
|
+
*/
|
|
127
|
+
export function isSuppressed(filePath, patternName, suppressions) {
|
|
128
|
+
for (const s of suppressions) {
|
|
129
|
+
if (matchGlob(s.pathGlob, filePath)) {
|
|
130
|
+
if (!s.patternName || s.patternName.toLowerCase() === patternName.toLowerCase()) {
|
|
131
|
+
return true;
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
return false;
|
|
136
|
+
}
|
|
137
|
+
/**
|
|
138
|
+
* Minimal glob matcher: supports * (within segment) and ** (cross-segment).
|
|
139
|
+
* Not full micromatch — covers the 90% case for .rafterignore.
|
|
140
|
+
*/
|
|
141
|
+
function matchGlob(glob, filePath) {
|
|
142
|
+
// Normalise separators
|
|
143
|
+
const g = glob.replace(/\\/g, "/");
|
|
144
|
+
const f = filePath.replace(/\\/g, "/");
|
|
145
|
+
// Escape regex special chars except * which we handle specially
|
|
146
|
+
const escaped = g
|
|
147
|
+
.replace(/[.+^${}()|[\]\\]/g, "\\$&")
|
|
148
|
+
.replace(/\*\*/g, "\x00") // placeholder for **
|
|
149
|
+
.replace(/\*/g, "[^/]*") // * = anything within one segment
|
|
150
|
+
.replace(/\x00/g, ".*"); // ** = anything including /
|
|
151
|
+
try {
|
|
152
|
+
return new RegExp(`(^|/)${escaped}(/|$)`).test(f);
|
|
153
|
+
}
|
|
154
|
+
catch {
|
|
155
|
+
return false;
|
|
156
|
+
}
|
|
157
|
+
}
|
package/dist/core/risk-rules.js
CHANGED
|
@@ -17,7 +17,9 @@ export const HIGH_PATTERNS = [
|
|
|
17
17
|
/chmod\s+777/,
|
|
18
18
|
/curl.*\|\s*(bash|sh|zsh|dash)\b/,
|
|
19
19
|
/wget.*\|\s*(bash|sh|zsh|dash)\b/,
|
|
20
|
-
/git\s+push\s
|
|
20
|
+
/git\s+push\s+(--force|-f)\b/,
|
|
21
|
+
/git\s+push\s+--force-(with-lease|if-includes)\b/,
|
|
22
|
+
/git\s+push\s+\S*\s+\+\S+/, // refspec force: git push origin +main
|
|
21
23
|
/docker\s+system\s+prune/,
|
|
22
24
|
/npm\s+publish/,
|
|
23
25
|
/pypi.*upload/,
|
|
@@ -45,6 +47,9 @@ export const DEFAULT_REQUIRE_APPROVAL = [
|
|
|
45
47
|
"wget.*\\|\\s*(bash|sh|zsh|dash)\\b",
|
|
46
48
|
"chmod 777",
|
|
47
49
|
"git push --force",
|
|
50
|
+
"git push -f",
|
|
51
|
+
"git push --force-with-lease",
|
|
52
|
+
"git push --force-if-includes",
|
|
48
53
|
];
|
|
49
54
|
/**
|
|
50
55
|
* Assess risk level of a command string.
|
package/dist/index.js
CHANGED
|
@@ -9,10 +9,11 @@ import { createCiCommand } from "./commands/ci/index.js";
|
|
|
9
9
|
import { createHookCommand } from "./commands/hook/index.js";
|
|
10
10
|
import { createMcpCommand } from "./commands/mcp/index.js";
|
|
11
11
|
import { createPolicyCommand } from "./commands/policy/index.js";
|
|
12
|
+
import { createCompletionCommand } from "./commands/completion.js";
|
|
12
13
|
import { checkForUpdate } from "./utils/update-checker.js";
|
|
13
14
|
import { setAgentMode } from "./utils/formatter.js";
|
|
14
15
|
dotenv.config();
|
|
15
|
-
const VERSION = "0.5.
|
|
16
|
+
const VERSION = "0.5.3";
|
|
16
17
|
const program = new Command()
|
|
17
18
|
.name("rafter")
|
|
18
19
|
.description("Rafter CLI")
|
|
@@ -39,6 +40,8 @@ program.addCommand(createHookCommand());
|
|
|
39
40
|
program.addCommand(createMcpCommand());
|
|
40
41
|
// Policy commands
|
|
41
42
|
program.addCommand(createPolicyCommand());
|
|
43
|
+
// Shell completions
|
|
44
|
+
program.addCommand(createCompletionCommand());
|
|
42
45
|
// Non-blocking update check — runs after command, prints to stderr
|
|
43
46
|
checkForUpdate(VERSION).then((notice) => {
|
|
44
47
|
if (notice)
|
|
@@ -2,9 +2,10 @@ import fs from "fs";
|
|
|
2
2
|
import path from "path";
|
|
3
3
|
import { PatternEngine } from "../core/pattern-engine.js";
|
|
4
4
|
import { DEFAULT_SECRET_PATTERNS } from "./secret-patterns.js";
|
|
5
|
+
import { loadCustomPatterns, loadSuppressions, isSuppressed } from "../core/custom-patterns.js";
|
|
5
6
|
export class RegexScanner {
|
|
6
7
|
constructor(customPatterns) {
|
|
7
|
-
const patterns = [...DEFAULT_SECRET_PATTERNS];
|
|
8
|
+
const patterns = [...DEFAULT_SECRET_PATTERNS, ...loadCustomPatterns()];
|
|
8
9
|
if (customPatterns) {
|
|
9
10
|
for (const cp of customPatterns) {
|
|
10
11
|
patterns.push({
|
|
@@ -15,6 +16,7 @@ export class RegexScanner {
|
|
|
15
16
|
}
|
|
16
17
|
}
|
|
17
18
|
this.engine = new PatternEngine(patterns);
|
|
19
|
+
this.suppressions = loadSuppressions();
|
|
18
20
|
}
|
|
19
21
|
/**
|
|
20
22
|
* Scan a single file for secrets
|
|
@@ -22,18 +24,12 @@ export class RegexScanner {
|
|
|
22
24
|
scanFile(filePath) {
|
|
23
25
|
try {
|
|
24
26
|
const content = fs.readFileSync(filePath, "utf-8");
|
|
25
|
-
const
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
matches
|
|
29
|
-
};
|
|
27
|
+
const raw = this.engine.scanWithPosition(content);
|
|
28
|
+
const matches = raw.filter((m) => !isSuppressed(filePath, m.pattern.name, this.suppressions));
|
|
29
|
+
return { file: filePath, matches };
|
|
30
30
|
}
|
|
31
31
|
catch (e) {
|
|
32
|
-
|
|
33
|
-
return {
|
|
34
|
-
file: filePath,
|
|
35
|
-
matches: []
|
|
36
|
-
};
|
|
32
|
+
return { file: filePath, matches: [] };
|
|
37
33
|
}
|
|
38
34
|
}
|
|
39
35
|
/**
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import fs from "fs";
|
|
2
2
|
import path from "path";
|
|
3
3
|
import https from "https";
|
|
4
|
-
import { exec } from "child_process";
|
|
4
|
+
import { exec, execSync } from "child_process";
|
|
5
5
|
import { promisify } from "util";
|
|
6
6
|
import { getBinDir } from "../core/config-defaults.js";
|
|
7
7
|
import * as tar from "tar";
|
|
@@ -52,6 +52,20 @@ export class BinaryManager {
|
|
|
52
52
|
const gitleaksPath = this.getGitleaksPath();
|
|
53
53
|
return fs.existsSync(gitleaksPath);
|
|
54
54
|
}
|
|
55
|
+
/**
|
|
56
|
+
* Find gitleaks on system PATH (like Python's shutil.which)
|
|
57
|
+
*/
|
|
58
|
+
findGitleaksOnPath() {
|
|
59
|
+
const cmd = process.platform === "win32" ? "where gitleaks" : "which gitleaks";
|
|
60
|
+
try {
|
|
61
|
+
const result = execSync(cmd, { timeout: 5000, encoding: "utf-8" });
|
|
62
|
+
const found = result.trim().split("\n")[0].trim();
|
|
63
|
+
return found || null;
|
|
64
|
+
}
|
|
65
|
+
catch {
|
|
66
|
+
return null;
|
|
67
|
+
}
|
|
68
|
+
}
|
|
55
69
|
/**
|
|
56
70
|
* Verify Gitleaks binary works
|
|
57
71
|
*/
|
|
@@ -69,6 +83,67 @@ export class BinaryManager {
|
|
|
69
83
|
return false;
|
|
70
84
|
}
|
|
71
85
|
}
|
|
86
|
+
/**
|
|
87
|
+
* Run gitleaks version and return {ok, stdout, stderr}
|
|
88
|
+
*/
|
|
89
|
+
async verifyGitleaksVerbose(binaryPath) {
|
|
90
|
+
const gitleaksPath = binaryPath ?? this.getGitleaksPath();
|
|
91
|
+
try {
|
|
92
|
+
const { stdout, stderr } = await execAsync(`"${gitleaksPath}" version`, { timeout: 5000 });
|
|
93
|
+
const ok = stdout.includes("gitleaks version");
|
|
94
|
+
return { ok, stdout: stdout.trim(), stderr: stderr.trim() };
|
|
95
|
+
}
|
|
96
|
+
catch (e) {
|
|
97
|
+
const err = e;
|
|
98
|
+
return {
|
|
99
|
+
ok: false,
|
|
100
|
+
stdout: (err.stdout ?? "").trim(),
|
|
101
|
+
stderr: (err.stderr ?? String(e)).trim(),
|
|
102
|
+
};
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
/**
|
|
106
|
+
* Collect diagnostic context for a failed binary (file type, uname, glibc/musl)
|
|
107
|
+
*/
|
|
108
|
+
async collectBinaryDiagnostics(binaryPath) {
|
|
109
|
+
const gitleaksPath = binaryPath ?? this.getGitleaksPath();
|
|
110
|
+
const lines = [];
|
|
111
|
+
try {
|
|
112
|
+
const { stdout: fileOut } = await execAsync(`file "${gitleaksPath}"`, { timeout: 5000 });
|
|
113
|
+
lines.push(` file: ${fileOut.trim()}`);
|
|
114
|
+
}
|
|
115
|
+
catch {
|
|
116
|
+
lines.push(` file: (unavailable)`);
|
|
117
|
+
}
|
|
118
|
+
try {
|
|
119
|
+
const { stdout: uname } = await execAsync("uname -a", { timeout: 5000 });
|
|
120
|
+
lines.push(` uname: ${uname.trim()}`);
|
|
121
|
+
}
|
|
122
|
+
catch {
|
|
123
|
+
lines.push(` uname: (unavailable)`);
|
|
124
|
+
}
|
|
125
|
+
lines.push(` node arch: ${process.arch}, platform: ${process.platform}`);
|
|
126
|
+
// Detect glibc vs musl on Linux
|
|
127
|
+
if (process.platform === "linux") {
|
|
128
|
+
try {
|
|
129
|
+
const { stdout: ldd } = await execAsync("ldd --version 2>&1 || true", { timeout: 5000 });
|
|
130
|
+
if (ldd.includes("musl")) {
|
|
131
|
+
lines.push(" libc: musl (gitleaks linux builds target glibc; musl systems need a musl build or static binary)");
|
|
132
|
+
}
|
|
133
|
+
else if (ldd.includes("GLIBC") || ldd.includes("GNU")) {
|
|
134
|
+
const match = ldd.match(/(\d+\.\d+)/);
|
|
135
|
+
lines.push(` libc: glibc ${match ? match[1] : "(version unknown)"}`);
|
|
136
|
+
}
|
|
137
|
+
else {
|
|
138
|
+
lines.push(" libc: unknown");
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
catch {
|
|
142
|
+
lines.push(" libc: (detection failed)");
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
return lines.join("\n");
|
|
146
|
+
}
|
|
72
147
|
/**
|
|
73
148
|
* Download and install Gitleaks
|
|
74
149
|
*/
|
|
@@ -86,10 +161,14 @@ export class BinaryManager {
|
|
|
86
161
|
const arch = this.getArchString();
|
|
87
162
|
const url = this.getDownloadUrl(platform, arch);
|
|
88
163
|
log(`Downloading Gitleaks v${GITLEAKS_VERSION} for ${platform}/${arch}...`);
|
|
164
|
+
log(` URL: ${url}`);
|
|
89
165
|
const archivePath = path.join(this.binDir, platform === "windows" ? "gitleaks.zip" : "gitleaks.tar.gz");
|
|
90
166
|
try {
|
|
91
167
|
// Download archive
|
|
92
168
|
await this.downloadFile(url, archivePath, log);
|
|
169
|
+
// Log downloaded file size as basic integrity signal
|
|
170
|
+
const stats = fs.statSync(archivePath);
|
|
171
|
+
log(` Downloaded: ${(stats.size / 1024).toFixed(1)} KB`);
|
|
93
172
|
// Extract binary
|
|
94
173
|
log("Extracting binary...");
|
|
95
174
|
if (platform === "windows") {
|
|
@@ -102,12 +181,22 @@ export class BinaryManager {
|
|
|
102
181
|
// Make executable (Unix systems)
|
|
103
182
|
if (process.platform !== "win32") {
|
|
104
183
|
await execAsync(`chmod +x "${this.getGitleaksPath()}"`);
|
|
184
|
+
log(" chmod +x applied");
|
|
105
185
|
}
|
|
106
|
-
// Verify it works
|
|
107
|
-
const
|
|
108
|
-
if (!
|
|
109
|
-
|
|
186
|
+
// Verify it works — capture output for diagnostics
|
|
187
|
+
const { ok, stdout: verOut, stderr: verErr } = await this.verifyGitleaksVerbose();
|
|
188
|
+
if (!ok) {
|
|
189
|
+
const diag = await this.collectBinaryDiagnostics();
|
|
190
|
+
const binaryPath = this.getGitleaksPath();
|
|
191
|
+
throw new Error(`Gitleaks binary failed to execute.\n` +
|
|
192
|
+
` Binary: ${binaryPath}\n` +
|
|
193
|
+
` URL: ${url}\n` +
|
|
194
|
+
(verOut ? ` gitleaks version stdout: ${verOut}\n` : "") +
|
|
195
|
+
(verErr ? ` gitleaks version stderr: ${verErr}\n` : "") +
|
|
196
|
+
`Diagnostics:\n${diag}\n` +
|
|
197
|
+
`Fix: ensure the binary matches your OS/arch, or install gitleaks manually and ensure it is on PATH.`);
|
|
110
198
|
}
|
|
199
|
+
log(` Verified: ${verOut}`);
|
|
111
200
|
// Clean up archive
|
|
112
201
|
if (fs.existsSync(archivePath)) {
|
|
113
202
|
fs.unlinkSync(archivePath);
|
|
@@ -231,13 +320,17 @@ export class BinaryManager {
|
|
|
231
320
|
});
|
|
232
321
|
}
|
|
233
322
|
/**
|
|
234
|
-
* Extract tarball
|
|
323
|
+
* Extract tarball — binary only, strip packaging extras (LICENSE, README.md)
|
|
235
324
|
*/
|
|
236
325
|
async extractTarball(tarballPath) {
|
|
237
326
|
await tar.extract({
|
|
238
327
|
file: tarballPath,
|
|
239
328
|
cwd: this.binDir,
|
|
240
|
-
strip:
|
|
329
|
+
strip: 1,
|
|
330
|
+
filter: (p) => {
|
|
331
|
+
const base = path.basename(p);
|
|
332
|
+
return base === "gitleaks" || base === "gitleaks.exe";
|
|
333
|
+
},
|
|
241
334
|
});
|
|
242
335
|
}
|
|
243
336
|
}
|