@rafter-security/cli 0.6.6 → 0.7.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (70) hide show
  1. package/README.md +29 -10
  2. package/dist/commands/agent/audit-skill.js +22 -20
  3. package/dist/commands/agent/audit.js +27 -0
  4. package/dist/commands/agent/components.js +800 -0
  5. package/dist/commands/agent/config.js +2 -1
  6. package/dist/commands/agent/disable.js +47 -0
  7. package/dist/commands/agent/enable.js +50 -0
  8. package/dist/commands/agent/exec.js +2 -0
  9. package/dist/commands/agent/index.js +6 -0
  10. package/dist/commands/agent/init.js +162 -163
  11. package/dist/commands/agent/install-hook.js +15 -14
  12. package/dist/commands/agent/list.js +72 -0
  13. package/dist/commands/agent/scan.js +4 -3
  14. package/dist/commands/agent/verify.js +1 -1
  15. package/dist/commands/backend/run.js +12 -3
  16. package/dist/commands/backend/scan-status.js +3 -2
  17. package/dist/commands/brief.js +22 -2
  18. package/dist/commands/ci/init.js +25 -21
  19. package/dist/commands/completion.js +4 -3
  20. package/dist/commands/docs/index.js +18 -0
  21. package/dist/commands/docs/list.js +37 -0
  22. package/dist/commands/docs/show.js +64 -0
  23. package/dist/commands/mcp/server.js +84 -0
  24. package/dist/commands/report.js +42 -41
  25. package/dist/commands/scan/index.js +7 -5
  26. package/dist/commands/skill/index.js +14 -0
  27. package/dist/commands/skill/install.js +89 -0
  28. package/dist/commands/skill/list.js +79 -0
  29. package/dist/commands/skill/registry.js +273 -0
  30. package/dist/commands/skill/remote.js +333 -0
  31. package/dist/commands/skill/review.js +975 -0
  32. package/dist/commands/skill/uninstall.js +65 -0
  33. package/dist/core/audit-logger.js +262 -21
  34. package/dist/core/config-manager.js +3 -0
  35. package/dist/core/docs-loader.js +148 -0
  36. package/dist/core/policy-loader.js +72 -1
  37. package/dist/core/risk-rules.js +16 -3
  38. package/dist/index.js +19 -9
  39. package/dist/scanners/gitleaks.js +6 -2
  40. package/package.json +1 -1
  41. package/resources/skills/rafter/SKILL.md +77 -97
  42. package/resources/skills/rafter/docs/backend.md +106 -0
  43. package/resources/skills/rafter/docs/cli-reference.md +199 -0
  44. package/resources/skills/rafter/docs/finding-triage.md +79 -0
  45. package/resources/skills/rafter/docs/guardrails.md +91 -0
  46. package/resources/skills/rafter/docs/shift-left.md +64 -0
  47. package/resources/skills/rafter-agent-security/SKILL.md +1 -1
  48. package/resources/skills/rafter-code-review/SKILL.md +91 -0
  49. package/resources/skills/rafter-code-review/docs/api.md +90 -0
  50. package/resources/skills/rafter-code-review/docs/asvs.md +120 -0
  51. package/resources/skills/rafter-code-review/docs/cwe-top25.md +78 -0
  52. package/resources/skills/rafter-code-review/docs/investigation-playbook.md +101 -0
  53. package/resources/skills/rafter-code-review/docs/llm.md +87 -0
  54. package/resources/skills/rafter-code-review/docs/web-app.md +84 -0
  55. package/resources/skills/rafter-secure-design/SKILL.md +103 -0
  56. package/resources/skills/rafter-secure-design/docs/api-design.md +97 -0
  57. package/resources/skills/rafter-secure-design/docs/auth.md +67 -0
  58. package/resources/skills/rafter-secure-design/docs/data-storage.md +90 -0
  59. package/resources/skills/rafter-secure-design/docs/dependencies.md +101 -0
  60. package/resources/skills/rafter-secure-design/docs/deployment.md +104 -0
  61. package/resources/skills/rafter-secure-design/docs/ingestion.md +98 -0
  62. package/resources/skills/rafter-secure-design/docs/standards-pointers.md +102 -0
  63. package/resources/skills/rafter-secure-design/docs/threat-modeling.md +128 -0
  64. package/resources/skills/rafter-skill-review/SKILL.md +106 -0
  65. package/resources/skills/rafter-skill-review/docs/authorship-provenance.md +82 -0
  66. package/resources/skills/rafter-skill-review/docs/changelog-review.md +99 -0
  67. package/resources/skills/rafter-skill-review/docs/data-practices.md +88 -0
  68. package/resources/skills/rafter-skill-review/docs/malware-indicators.md +79 -0
  69. package/resources/skills/rafter-skill-review/docs/prompt-injection.md +85 -0
  70. package/resources/skills/rafter-skill-review/docs/telemetry.md +78 -0
@@ -0,0 +1,333 @@
1
+ // Remote source resolution and persistent cache for `rafter skill review`.
2
+ //
3
+ // Accepts three shorthands and a persistent cache, in addition to the local
4
+ // path / raw git URL forms already handled by review.ts:
5
+ //
6
+ // github:owner/repo[/subpath]
7
+ // gitlab:owner/repo[/subpath]
8
+ // npm:<pkg>[@<version>]
9
+ //
10
+ // Cache layout under ~/.rafter/skill-cache/:
11
+ //
12
+ // resolutions/<sha256(shorthand)>.json — {shorthand, sha|version, resolvedAt}
13
+ // content/<key>/ — extracted working tree
14
+ // meta.json — {source, key, sha|version, fetchedAt}
15
+ //
16
+ // The resolution cache memoizes "what SHA is github:foo/bar@HEAD right now?"
17
+ // The content cache memoizes "what does that SHA look like on disk?"
18
+ // Both expire on --cache-ttl (default 24h).
19
+ import fs from "fs";
20
+ import path from "path";
21
+ import os from "os";
22
+ import crypto from "crypto";
23
+ import zlib from "zlib";
24
+ import { spawnSync } from "child_process";
25
+ // tar@7 is dual CJS/ESM. We use its sync API (`tar.x({ sync, file, cwd, strip })`)
26
+ // so the rest of the reviewer stays synchronous.
27
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
28
+ import * as tarModule from "tar";
29
+ export function isShorthand(input) {
30
+ return /^(github|gitlab|npm):/.test(input);
31
+ }
32
+ /**
33
+ * Parse a shorthand source spec. Throws on malformed input.
34
+ */
35
+ export function parseShorthand(input) {
36
+ const m = input.match(/^(github|gitlab|npm):(.+)$/);
37
+ if (!m)
38
+ throw new Error(`Not a shorthand: ${input}`);
39
+ const kind = m[1];
40
+ const tail = m[2];
41
+ if (kind === "npm") {
42
+ // Forms: pkg | pkg@version | @scope/pkg | @scope/pkg@version
43
+ let pkg = tail;
44
+ let version = "latest";
45
+ if (tail.startsWith("@")) {
46
+ // Scoped: locate the second '@' (after the scope)
47
+ const secondAt = tail.indexOf("@", 1);
48
+ if (secondAt !== -1) {
49
+ pkg = tail.slice(0, secondAt);
50
+ version = tail.slice(secondAt + 1) || "latest";
51
+ }
52
+ }
53
+ else {
54
+ const at = tail.indexOf("@");
55
+ if (at !== -1) {
56
+ pkg = tail.slice(0, at);
57
+ version = tail.slice(at + 1) || "latest";
58
+ }
59
+ }
60
+ if (!pkg)
61
+ throw new Error(`Invalid npm shorthand: ${input}`);
62
+ return { kind, raw: input, pkg, version };
63
+ }
64
+ // git-based: owner/repo[/subpath]
65
+ const parts = tail.split("/").filter(Boolean);
66
+ if (parts.length < 2) {
67
+ throw new Error(`Invalid ${kind} shorthand: expected ${kind}:owner/repo[/subpath], got ${input}`);
68
+ }
69
+ const owner = parts[0];
70
+ const repo = parts[1];
71
+ const subpath = parts.slice(2).join("/");
72
+ const host = kind === "github" ? "github.com" : "gitlab.com";
73
+ const gitUrl = `https://${host}/${owner}/${repo}.git`;
74
+ return { kind, raw: input, host, owner, repo, subpath, gitUrl };
75
+ }
76
+ // ── Cache layout ───────────────────────────────────────────────────
77
+ export const DEFAULT_CACHE_TTL_MS = 24 * 60 * 60 * 1000; // 24h
78
+ export function defaultCacheRoot() {
79
+ // Honor RAFTER_SKILL_CACHE_DIR for tests.
80
+ if (process.env.RAFTER_SKILL_CACHE_DIR) {
81
+ return process.env.RAFTER_SKILL_CACHE_DIR;
82
+ }
83
+ return path.join(os.homedir(), ".rafter", "skill-cache");
84
+ }
85
+ export function resolutionPath(cacheRoot, shorthand) {
86
+ const hash = crypto.createHash("sha256").update(shorthand).digest("hex").slice(0, 40);
87
+ return path.join(cacheRoot, "resolutions", `${hash}.json`);
88
+ }
89
+ export function contentDir(cacheRoot, key) {
90
+ return path.join(cacheRoot, "content", key);
91
+ }
92
+ function safeSlug(input) {
93
+ return input.replace(/[^a-zA-Z0-9._-]+/g, "_").slice(0, 80);
94
+ }
95
+ export function contentKeyGit(parsed, sha) {
96
+ const owner = safeSlug(parsed.owner ?? "unknown");
97
+ const repo = safeSlug(parsed.repo ?? "unknown");
98
+ return `git-${parsed.kind}-${owner}-${repo}-${sha.slice(0, 40)}`;
99
+ }
100
+ export function contentKeyNpm(pkg, version) {
101
+ return `npm-${safeSlug(pkg)}-${safeSlug(version)}`;
102
+ }
103
+ export function readResolution(cacheRoot, shorthand) {
104
+ const fpath = resolutionPath(cacheRoot, shorthand);
105
+ if (!fs.existsSync(fpath))
106
+ return null;
107
+ try {
108
+ const raw = JSON.parse(fs.readFileSync(fpath, "utf-8"));
109
+ if (typeof raw !== "object" ||
110
+ raw === null ||
111
+ typeof raw.shorthand !== "string" ||
112
+ typeof raw.resolvedAt !== "number") {
113
+ return null;
114
+ }
115
+ return raw;
116
+ }
117
+ catch {
118
+ return null;
119
+ }
120
+ }
121
+ export function writeResolution(cacheRoot, res) {
122
+ const fpath = resolutionPath(cacheRoot, res.shorthand);
123
+ fs.mkdirSync(path.dirname(fpath), { recursive: true });
124
+ fs.writeFileSync(fpath, JSON.stringify(res, null, 2));
125
+ }
126
+ export function resolutionIsFresh(r, ttlMs) {
127
+ return Date.now() - r.resolvedAt < ttlMs;
128
+ }
129
+ export function readContentMeta(cacheRoot, key) {
130
+ const dir = contentDir(cacheRoot, key);
131
+ const meta = path.join(dir, "meta.json");
132
+ if (!fs.existsSync(meta))
133
+ return null;
134
+ try {
135
+ const raw = JSON.parse(fs.readFileSync(meta, "utf-8"));
136
+ if (typeof raw !== "object" ||
137
+ raw === null ||
138
+ typeof raw.source !== "string" ||
139
+ typeof raw.key !== "string" ||
140
+ typeof raw.fetchedAt !== "number") {
141
+ return null;
142
+ }
143
+ return raw;
144
+ }
145
+ catch {
146
+ return null;
147
+ }
148
+ }
149
+ export function contentWorkingTree(cacheRoot, key) {
150
+ return path.join(contentDir(cacheRoot, key), "content");
151
+ }
152
+ export function contentIsUsable(cacheRoot, key) {
153
+ const meta = readContentMeta(cacheRoot, key);
154
+ if (!meta)
155
+ return false;
156
+ const tree = contentWorkingTree(cacheRoot, key);
157
+ if (!fs.existsSync(tree))
158
+ return false;
159
+ try {
160
+ const entries = fs.readdirSync(tree);
161
+ // Empty directory counts as corrupt — a real clone/extract leaves something.
162
+ if (entries.length === 0)
163
+ return false;
164
+ }
165
+ catch {
166
+ return false;
167
+ }
168
+ return true;
169
+ }
170
+ export function dropCacheEntry(cacheRoot, key) {
171
+ const dir = contentDir(cacheRoot, key);
172
+ try {
173
+ fs.rmSync(dir, { recursive: true, force: true });
174
+ }
175
+ catch {
176
+ // ignore
177
+ }
178
+ }
179
+ export const defaultRemoteOps = {
180
+ gitLsRemoteHead(url) {
181
+ const r = spawnSync("git", ["ls-remote", url, "HEAD"], {
182
+ encoding: "utf-8",
183
+ timeout: 30000,
184
+ });
185
+ if (r.status !== 0) {
186
+ const err = (r.stderr ?? "").toString().trim() || "git ls-remote failed";
187
+ throw new Error(`ls-remote ${url}: ${err}`);
188
+ }
189
+ const line = (r.stdout ?? "").split("\n")[0] ?? "";
190
+ const sha = line.split(/\s+/)[0];
191
+ if (!/^[0-9a-f]{40}$/i.test(sha)) {
192
+ throw new Error(`ls-remote ${url}: could not parse SHA from "${line}"`);
193
+ }
194
+ return sha.toLowerCase();
195
+ },
196
+ gitCloneAtSha(url, sha, destDir) {
197
+ // Shallow clone then checkout the pinned SHA. We do a --depth 1 of default
198
+ // branch first (fastest common case) and only fall back to a full fetch if
199
+ // the target SHA isn't HEAD.
200
+ fs.mkdirSync(destDir, { recursive: true });
201
+ const r = spawnSync("git", ["clone", "--depth", "1", "--quiet", url, destDir], { encoding: "utf-8", timeout: 120000 });
202
+ if (r.status !== 0) {
203
+ const err = (r.stderr ?? "").toString().trim() || "git clone failed";
204
+ throw new Error(`clone ${url}: ${err}`);
205
+ }
206
+ // Best-effort: check that the resulting HEAD matches the expected SHA.
207
+ // If not, fetch that specific SHA explicitly.
208
+ const headR = spawnSync("git", ["rev-parse", "HEAD"], {
209
+ cwd: destDir,
210
+ encoding: "utf-8",
211
+ });
212
+ const head = (headR.stdout ?? "").trim().toLowerCase();
213
+ if (head !== sha) {
214
+ const fetchR = spawnSync("git", ["fetch", "--depth", "1", "origin", sha], { cwd: destDir, encoding: "utf-8", timeout: 120000 });
215
+ if (fetchR.status === 0) {
216
+ spawnSync("git", ["checkout", "--quiet", sha], {
217
+ cwd: destDir,
218
+ encoding: "utf-8",
219
+ });
220
+ }
221
+ // If the fetch failed we keep whatever HEAD we have — audit still works,
222
+ // we just mismatched the resolved SHA. Record that in meta.
223
+ }
224
+ },
225
+ npmFetchMetadata(pkg) {
226
+ const encoded = pkg.startsWith("@")
227
+ ? `@${encodeURIComponent(pkg.slice(1))}`
228
+ : encodeURIComponent(pkg);
229
+ const url = `https://registry.npmjs.org/${encoded}`;
230
+ return syncHttpJson(url);
231
+ },
232
+ npmFetchTarball(tarballUrl, destFile) {
233
+ fs.mkdirSync(path.dirname(destFile), { recursive: true });
234
+ // Spawn a short-lived node subprocess that awaits fetch() and streams the
235
+ // tarball to destFile. Keeps the caller synchronous.
236
+ const script = `
237
+ (async () => {
238
+ const fs = require('fs');
239
+ const r = await fetch(${JSON.stringify(tarballUrl)});
240
+ if (!r.ok) { process.stderr.write('HTTP ' + r.status); process.exit(2); }
241
+ const buf = Buffer.from(await r.arrayBuffer());
242
+ fs.writeFileSync(${JSON.stringify(destFile)}, buf);
243
+ })().catch((e) => { process.stderr.write(String(e?.message || e)); process.exit(1); });
244
+ `;
245
+ const r = spawnSync(process.execPath, ["-e", script], {
246
+ encoding: "utf-8",
247
+ timeout: 120000,
248
+ });
249
+ if (r.status !== 0) {
250
+ throw new Error(`fetch ${tarballUrl}: ${(r.stderr ?? "").toString().trim() || "failed"}`);
251
+ }
252
+ },
253
+ };
254
+ // Tiny blocking HTTP-GET-JSON helper. npm registry endpoints are small,
255
+ // latency-insensitive, and called at most once per audit — we do this inline
256
+ // rather than bolting an async path through the whole reviewer.
257
+ function syncHttpJson(url) {
258
+ // Node 18+ has global fetch, but it's async. For synchronous behavior we
259
+ // spawn a short-lived node subprocess. This keeps review.ts synchronous.
260
+ const script = `
261
+ (async () => {
262
+ const r = await fetch(${JSON.stringify(url)});
263
+ if (!r.ok) { process.stderr.write("HTTP " + r.status); process.exit(2); }
264
+ const txt = await r.text();
265
+ process.stdout.write(txt);
266
+ })().catch((e) => { process.stderr.write(String(e?.message || e)); process.exit(1); });
267
+ `;
268
+ const r = spawnSync(process.execPath, ["-e", script], {
269
+ encoding: "utf-8",
270
+ timeout: 30000,
271
+ });
272
+ if (r.status !== 0) {
273
+ throw new Error(`GET ${url}: ${(r.stderr ?? "").toString().trim() || "failed"}`);
274
+ }
275
+ try {
276
+ return JSON.parse(r.stdout ?? "");
277
+ }
278
+ catch (e) {
279
+ throw new Error(`GET ${url}: invalid JSON (${e.message})`);
280
+ }
281
+ }
282
+ // ── Extraction helpers ─────────────────────────────────────────────
283
+ export function extractNpmTarball(tgzFile, destDir) {
284
+ fs.mkdirSync(destDir, { recursive: true });
285
+ // npm tarballs have a leading "package/" directory; strip it.
286
+ // tar@7 exposes a sync option that writes everything before returning.
287
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
288
+ tarModule.x({ sync: true, file: tgzFile, cwd: destDir, strip: 1 });
289
+ }
290
+ /** Gunzip a .tgz file synchronously to a .tar, for fixture generation in tests. */
291
+ export function gunzipFile(src, dest) {
292
+ const zipped = fs.readFileSync(src);
293
+ fs.writeFileSync(dest, zlib.gunzipSync(zipped));
294
+ }
295
+ const SKILL_WALK_SKIP = new Set([".git", "node_modules", ".venv", "__pycache__"]);
296
+ const SKILL_WALK_MAX_FILES = 5000;
297
+ /** Depth-first walk looking for every SKILL.md file. Deterministic order. */
298
+ export function findSkillFiles(root) {
299
+ if (!fs.existsSync(root) || !fs.statSync(root).isDirectory())
300
+ return [];
301
+ const out = [];
302
+ const stack = [root];
303
+ let visited = 0;
304
+ while (stack.length && visited < SKILL_WALK_MAX_FILES) {
305
+ const dir = stack.pop();
306
+ let entries;
307
+ try {
308
+ entries = fs.readdirSync(dir, { withFileTypes: true });
309
+ }
310
+ catch {
311
+ continue;
312
+ }
313
+ // Sort for determinism (stack order reverses; pre-sort so pops ordered).
314
+ entries.sort((a, b) => a.name.localeCompare(b.name));
315
+ for (const entry of [...entries].reverse()) {
316
+ const full = path.join(dir, entry.name);
317
+ if (entry.isDirectory()) {
318
+ if (SKILL_WALK_SKIP.has(entry.name))
319
+ continue;
320
+ stack.push(full);
321
+ }
322
+ else if (entry.isFile()) {
323
+ visited += 1;
324
+ if (entry.name.toLowerCase() === "skill.md") {
325
+ const rel = path.relative(root, dir) || ".";
326
+ out.push({ file: full, dir, relDir: rel });
327
+ }
328
+ }
329
+ }
330
+ }
331
+ out.sort((a, b) => a.relDir.localeCompare(b.relDir));
332
+ return out;
333
+ }