security-mcp 1.0.3 → 1.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +77 -21
- package/defaults/control-catalog.json +157 -0
- package/defaults/security-exceptions.json +4 -0
- package/defaults/security-tools.json +41 -0
- package/dist/ci/pr-gate.js +2 -3
- package/dist/cli/index.js +51 -16
- package/dist/cli/install.js +39 -17
- package/dist/cli/update.js +124 -0
- package/dist/gate/catalog.js +55 -0
- package/dist/gate/checks/ai.js +45 -14
- package/dist/gate/checks/dependencies.js +4 -0
- package/dist/gate/checks/scanners.js +84 -0
- package/dist/gate/checks/secrets.js +53 -26
- package/dist/gate/diff.js +2 -2
- package/dist/gate/evidence.js +116 -0
- package/dist/gate/exceptions.js +85 -0
- package/dist/gate/policy.js +110 -4
- package/dist/mcp/server.js +440 -6
- package/dist/repo/fs.js +10 -5
- package/dist/review/store.js +80 -0
- package/dist/tests/run.js +103 -0
- package/package.json +13 -3
- package/prompts/SECURITY_PROMPT.md +40 -0
- package/skills/senior-security-engineer/SKILL.md +46 -1
|
@@ -0,0 +1,124 @@
|
|
|
1
|
+
import { mkdirSync, readFileSync, writeFileSync } from "node:fs";
|
|
2
|
+
import { homedir } from "node:os";
|
|
3
|
+
import { dirname, join } from "node:path";
|
|
4
|
+
import * as https from "node:https";
|
|
5
|
+
const CACHE_DIR = join(homedir(), ".security-mcp");
|
|
6
|
+
const CACHE_PATH = join(CACHE_DIR, "update-check.json");
|
|
7
|
+
const CHECK_INTERVAL_MS = 24 * 60 * 60 * 1000;
|
|
8
|
+
const PROMPT_INTERVAL_MS = 24 * 60 * 60 * 1000;
|
|
9
|
+
const REGISTRY_URL = "https://registry.npmjs.org/security-mcp/latest";
|
|
10
|
+
function parseVersion(input) {
|
|
11
|
+
const match = input.trim().match(/^v?(\d+)\.(\d+)\.(\d+)(?:-([0-9A-Za-z.-]+))?$/);
|
|
12
|
+
if (!match)
|
|
13
|
+
return null;
|
|
14
|
+
return {
|
|
15
|
+
major: Number(match[1]),
|
|
16
|
+
minor: Number(match[2]),
|
|
17
|
+
patch: Number(match[3]),
|
|
18
|
+
prerelease: match[4] ?? null
|
|
19
|
+
};
|
|
20
|
+
}
|
|
21
|
+
function compareVersions(a, b) {
|
|
22
|
+
const parsedA = parseVersion(a);
|
|
23
|
+
const parsedB = parseVersion(b);
|
|
24
|
+
if (!parsedA || !parsedB)
|
|
25
|
+
return 0;
|
|
26
|
+
if (parsedA.major !== parsedB.major)
|
|
27
|
+
return parsedA.major < parsedB.major ? -1 : 1;
|
|
28
|
+
if (parsedA.minor !== parsedB.minor)
|
|
29
|
+
return parsedA.minor < parsedB.minor ? -1 : 1;
|
|
30
|
+
if (parsedA.patch !== parsedB.patch)
|
|
31
|
+
return parsedA.patch < parsedB.patch ? -1 : 1;
|
|
32
|
+
if (parsedA.prerelease === parsedB.prerelease)
|
|
33
|
+
return 0;
|
|
34
|
+
if (parsedA.prerelease === null)
|
|
35
|
+
return 1;
|
|
36
|
+
if (parsedB.prerelease === null)
|
|
37
|
+
return -1;
|
|
38
|
+
return parsedA.prerelease < parsedB.prerelease ? -1 : 1;
|
|
39
|
+
}
|
|
40
|
+
function readCache() {
|
|
41
|
+
try {
|
|
42
|
+
return JSON.parse(readFileSync(CACHE_PATH, "utf-8"));
|
|
43
|
+
}
|
|
44
|
+
catch {
|
|
45
|
+
return {};
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
function writeCache(cache) {
|
|
49
|
+
try {
|
|
50
|
+
mkdirSync(dirname(CACHE_PATH), { recursive: true });
|
|
51
|
+
writeFileSync(CACHE_PATH, JSON.stringify(cache, null, 2) + "\n", "utf-8");
|
|
52
|
+
}
|
|
53
|
+
catch {
|
|
54
|
+
// Non-fatal: update notifications should never block command execution.
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
function fetchLatestVersion(timeoutMs = 1500) {
|
|
58
|
+
return new Promise((resolve) => {
|
|
59
|
+
const req = https.get(REGISTRY_URL, {
|
|
60
|
+
headers: { "User-Agent": "security-mcp-update-checker" }
|
|
61
|
+
}, (res) => {
|
|
62
|
+
if ((res.statusCode ?? 500) >= 400) {
|
|
63
|
+
res.resume();
|
|
64
|
+
resolve(null);
|
|
65
|
+
return;
|
|
66
|
+
}
|
|
67
|
+
let body = "";
|
|
68
|
+
res.setEncoding("utf8");
|
|
69
|
+
res.on("data", (chunk) => {
|
|
70
|
+
body += chunk;
|
|
71
|
+
});
|
|
72
|
+
res.on("end", () => {
|
|
73
|
+
try {
|
|
74
|
+
const parsed = JSON.parse(body);
|
|
75
|
+
resolve(parsed.version ?? null);
|
|
76
|
+
}
|
|
77
|
+
catch {
|
|
78
|
+
resolve(null);
|
|
79
|
+
}
|
|
80
|
+
});
|
|
81
|
+
});
|
|
82
|
+
req.on("error", () => resolve(null));
|
|
83
|
+
req.setTimeout(timeoutMs, () => {
|
|
84
|
+
req.destroy();
|
|
85
|
+
resolve(null);
|
|
86
|
+
});
|
|
87
|
+
});
|
|
88
|
+
}
|
|
89
|
+
function shouldPrompt(cache, latestVersion, now) {
|
|
90
|
+
if (!cache.lastPromptedVersion || !cache.lastPromptedAt)
|
|
91
|
+
return true;
|
|
92
|
+
if (cache.lastPromptedVersion !== latestVersion)
|
|
93
|
+
return true;
|
|
94
|
+
const lastPromptedAt = Date.parse(cache.lastPromptedAt);
|
|
95
|
+
if (Number.isNaN(lastPromptedAt))
|
|
96
|
+
return true;
|
|
97
|
+
return now - lastPromptedAt >= PROMPT_INTERVAL_MS;
|
|
98
|
+
}
|
|
99
|
+
export async function notifyIfUpdateAvailable(currentVersion) {
|
|
100
|
+
const now = Date.now();
|
|
101
|
+
const cache = readCache();
|
|
102
|
+
const lastCheckedAt = cache.lastCheckedAt ? Date.parse(cache.lastCheckedAt) : Number.NaN;
|
|
103
|
+
const shouldRefresh = Number.isNaN(lastCheckedAt) || now - lastCheckedAt >= CHECK_INTERVAL_MS;
|
|
104
|
+
if (shouldRefresh) {
|
|
105
|
+
const latestVersion = await fetchLatestVersion();
|
|
106
|
+
if (latestVersion) {
|
|
107
|
+
cache.latestVersion = latestVersion;
|
|
108
|
+
}
|
|
109
|
+
cache.lastCheckedAt = new Date(now).toISOString();
|
|
110
|
+
writeCache(cache);
|
|
111
|
+
}
|
|
112
|
+
if (!cache.latestVersion)
|
|
113
|
+
return;
|
|
114
|
+
if (compareVersions(currentVersion, cache.latestVersion) >= 0)
|
|
115
|
+
return;
|
|
116
|
+
if (!shouldPrompt(cache, cache.latestVersion, now))
|
|
117
|
+
return;
|
|
118
|
+
process.stderr.write(`\nUpdate available: security-mcp ${currentVersion} -> ${cache.latestVersion}\n` +
|
|
119
|
+
"Update command: npm install -g security-mcp@latest\n" +
|
|
120
|
+
"Then refresh editor config: security-mcp install-global\n\n");
|
|
121
|
+
cache.lastPromptedVersion = cache.latestVersion;
|
|
122
|
+
cache.lastPromptedAt = new Date(now).toISOString();
|
|
123
|
+
writeCache(cache);
|
|
124
|
+
}
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
import { readFile } from "node:fs/promises";
|
|
2
|
+
import { dirname, join, resolve } from "node:path";
|
|
3
|
+
import { fileURLToPath } from "node:url";
|
|
4
|
+
import { z } from "zod";
|
|
5
|
+
const __dirname = dirname(fileURLToPath(import.meta.url));
|
|
6
|
+
const PKG_ROOT = resolve(__dirname, "../..");
|
|
7
|
+
const ControlSchema = z.object({
|
|
8
|
+
id: z.string(),
|
|
9
|
+
description: z.string(),
|
|
10
|
+
automation: z.enum(["workflow", "evidence", "tooling", "approval"]),
|
|
11
|
+
surfaces: z.array(z.string()).default(["all"]),
|
|
12
|
+
frameworks: z.array(z.string()).default([]),
|
|
13
|
+
evidence: z.array(z.string()).optional(),
|
|
14
|
+
required_scanners: z.array(z.string()).optional(),
|
|
15
|
+
required_steps: z.array(z.string()).optional()
|
|
16
|
+
});
|
|
17
|
+
const CatalogSchema = z.object({
|
|
18
|
+
version: z.string(),
|
|
19
|
+
controls: z.array(ControlSchema)
|
|
20
|
+
});
|
|
21
|
+
async function readJsonWithFallback(relPath, fallbackName) {
|
|
22
|
+
const overrideEnvMap = {
|
|
23
|
+
".mcp/catalog/control-catalog.json": "SECURITY_GATE_CONTROL_CATALOG"
|
|
24
|
+
};
|
|
25
|
+
const overrideEnv = overrideEnvMap[relPath];
|
|
26
|
+
if (overrideEnv && process.env[overrideEnv]) {
|
|
27
|
+
return await readFile(join(process.cwd(), process.env[overrideEnv]), "utf-8");
|
|
28
|
+
}
|
|
29
|
+
try {
|
|
30
|
+
return await readFile(join(process.cwd(), relPath), "utf-8");
|
|
31
|
+
}
|
|
32
|
+
catch {
|
|
33
|
+
return await readFile(join(PKG_ROOT, "defaults", fallbackName), "utf-8");
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
export async function loadControlCatalog() {
|
|
37
|
+
const raw = await readJsonWithFallback(".mcp/catalog/control-catalog.json", "control-catalog.json");
|
|
38
|
+
return CatalogSchema.parse(JSON.parse(raw));
|
|
39
|
+
}
|
|
40
|
+
export function controlApplies(control, surfaces) {
|
|
41
|
+
const mobile = surfaces.mobileIos || surfaces.mobileAndroid;
|
|
42
|
+
if (control.surfaces.includes("all"))
|
|
43
|
+
return true;
|
|
44
|
+
if (control.surfaces.includes("web") && surfaces.web)
|
|
45
|
+
return true;
|
|
46
|
+
if (control.surfaces.includes("api") && surfaces.api)
|
|
47
|
+
return true;
|
|
48
|
+
if (control.surfaces.includes("infra") && surfaces.infra)
|
|
49
|
+
return true;
|
|
50
|
+
if (control.surfaces.includes("ai") && surfaces.ai)
|
|
51
|
+
return true;
|
|
52
|
+
if (control.surfaces.includes("mobile") && mobile)
|
|
53
|
+
return true;
|
|
54
|
+
return false;
|
|
55
|
+
}
|
package/dist/gate/checks/ai.js
CHANGED
|
@@ -1,34 +1,65 @@
|
|
|
1
|
-
import
|
|
1
|
+
import fg from "fast-glob";
|
|
2
|
+
import { readFileSafe } from "../../repo/fs.js";
|
|
3
|
+
const SOURCE_FILE_RE = /\.(ts|tsx|js|jsx|mjs|cjs|py|go|java|json)$/i;
|
|
4
|
+
const SCHEMA_RE = /zod\.object\(|outputSchema|json_schema|JSON schema/i;
|
|
5
|
+
const TOOL_RE = /\bfunction_call\b|\btools?\b\s*[:=]/i;
|
|
6
|
+
const INJECTION_RE = /system prompt|developer message|ignore previous|prompt injection/i;
|
|
2
7
|
export async function checkAi(_) {
|
|
3
8
|
const findings = [];
|
|
4
|
-
const
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
9
|
+
const files = await fg(["**/*.*"], {
|
|
10
|
+
dot: true,
|
|
11
|
+
onlyFiles: true,
|
|
12
|
+
ignore: [
|
|
13
|
+
"**/node_modules/**",
|
|
14
|
+
"**/.git/**",
|
|
15
|
+
"**/dist/**",
|
|
16
|
+
"**/fixtures/**",
|
|
17
|
+
"**/.mcp/**",
|
|
18
|
+
"**/.mcp/reviews/**",
|
|
19
|
+
"**/.mcp/reports/**"
|
|
20
|
+
]
|
|
8
21
|
});
|
|
9
|
-
|
|
10
|
-
|
|
22
|
+
let schemaDetected = false;
|
|
23
|
+
const toolEvidence = [];
|
|
24
|
+
const injectionEvidence = [];
|
|
25
|
+
for (const file of files) {
|
|
26
|
+
if (!SOURCE_FILE_RE.test(file))
|
|
27
|
+
continue;
|
|
28
|
+
let text = "";
|
|
29
|
+
try {
|
|
30
|
+
text = await readFileSafe(file);
|
|
31
|
+
}
|
|
32
|
+
catch {
|
|
33
|
+
continue;
|
|
34
|
+
}
|
|
35
|
+
if (SCHEMA_RE.test(text)) {
|
|
36
|
+
schemaDetected = true;
|
|
37
|
+
}
|
|
38
|
+
if (TOOL_RE.test(text)) {
|
|
39
|
+
toolEvidence.push(file);
|
|
40
|
+
}
|
|
41
|
+
if (INJECTION_RE.test(text)) {
|
|
42
|
+
injectionEvidence.push(file);
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
if (toolEvidence.length > 0 && !schemaDetected) {
|
|
11
46
|
findings.push({
|
|
12
47
|
id: "AI_OUTPUT_BOUNDS_MISSING",
|
|
13
48
|
title: "AI/tooling present but bounded output (schema validation) not detected",
|
|
14
49
|
severity: "HIGH",
|
|
50
|
+
evidence: toolEvidence,
|
|
15
51
|
requiredActions: [
|
|
16
52
|
"Enforce bounded outputs via JSON schema validation for every AI response used by code.",
|
|
17
53
|
"Add prompt-injection defenses: input sanitization, tool allowlists, deny-by-default tool router, and sensitive data redaction."
|
|
18
54
|
]
|
|
19
55
|
});
|
|
20
56
|
}
|
|
21
|
-
|
|
22
|
-
query: "system prompt|developer message|ignore previous|prompt injection",
|
|
23
|
-
isRegex: true,
|
|
24
|
-
maxMatches: 200
|
|
25
|
-
});
|
|
26
|
-
if (systemPromptLeaks.length > 0) {
|
|
57
|
+
if (injectionEvidence.length > 0) {
|
|
27
58
|
findings.push({
|
|
28
59
|
id: "AI_INJECTION_CUES",
|
|
29
60
|
title: "Potential prompt injection cues detected. Requires explicit mitigations and tests.",
|
|
30
61
|
severity: "MEDIUM",
|
|
31
|
-
evidence:
|
|
62
|
+
evidence: injectionEvidence,
|
|
32
63
|
requiredActions: [
|
|
33
64
|
"Add multi-layer prompt-injection protection: instruction hierarchy enforcement, content isolation, tool gating, and output validation.",
|
|
34
65
|
"Add a red-team test harness with injection payloads and exfil attempts."
|
|
@@ -2,7 +2,11 @@ import fg from "fast-glob";
|
|
|
2
2
|
import { readFileSafe } from "../../repo/fs.js";
|
|
3
3
|
export async function checkDependencies(_) {
|
|
4
4
|
const findings = [];
|
|
5
|
+
const manifests = await fg(["package.json"], { dot: true });
|
|
5
6
|
const lockfiles = await fg(["package-lock.json", "pnpm-lock.yaml", "yarn.lock"], { dot: true });
|
|
7
|
+
if (manifests.length === 0 && lockfiles.length === 0) {
|
|
8
|
+
return findings;
|
|
9
|
+
}
|
|
6
10
|
if (lockfiles.length === 0) {
|
|
7
11
|
findings.push({
|
|
8
12
|
id: "LOCKFILE_MISSING",
|
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
import { readFile } from "node:fs/promises";
|
|
2
|
+
import { dirname, join, resolve } from "node:path";
|
|
3
|
+
import { fileURLToPath } from "node:url";
|
|
4
|
+
import { z } from "zod";
|
|
5
|
+
import { execa } from "execa";
|
|
6
|
+
const __dirname = dirname(fileURLToPath(import.meta.url));
|
|
7
|
+
const PKG_ROOT = resolve(__dirname, "../../..");
|
|
8
|
+
const ScannerSchema = z.object({
|
|
9
|
+
command: z.string(),
|
|
10
|
+
args: z.array(z.string()).default(["--version"]),
|
|
11
|
+
required_for: z.array(z.string()).default(["all"])
|
|
12
|
+
});
|
|
13
|
+
const ScannerConfigSchema = z.object({
|
|
14
|
+
version: z.string(),
|
|
15
|
+
fail_closed: z.boolean().default(true),
|
|
16
|
+
scanners: z.record(ScannerSchema)
|
|
17
|
+
});
|
|
18
|
+
async function loadScannerConfig() {
|
|
19
|
+
const overridePath = process.env["SECURITY_GATE_SCANNERS"];
|
|
20
|
+
if (overridePath) {
|
|
21
|
+
const raw = await readFile(join(process.cwd(), overridePath), "utf-8");
|
|
22
|
+
return ScannerConfigSchema.parse(JSON.parse(raw));
|
|
23
|
+
}
|
|
24
|
+
try {
|
|
25
|
+
const raw = await readFile(join(process.cwd(), ".mcp", "scanners", "security-tools.json"), "utf-8");
|
|
26
|
+
return ScannerConfigSchema.parse(JSON.parse(raw));
|
|
27
|
+
}
|
|
28
|
+
catch {
|
|
29
|
+
const raw = await readFile(join(PKG_ROOT, "defaults", "security-tools.json"), "utf-8");
|
|
30
|
+
return ScannerConfigSchema.parse(JSON.parse(raw));
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
function scannerApplies(requiredFor, surfaces) {
|
|
34
|
+
const mobile = surfaces.mobileIos || surfaces.mobileAndroid;
|
|
35
|
+
if (requiredFor.includes("all"))
|
|
36
|
+
return true;
|
|
37
|
+
if (requiredFor.includes("web") && surfaces.web)
|
|
38
|
+
return true;
|
|
39
|
+
if (requiredFor.includes("api") && surfaces.api)
|
|
40
|
+
return true;
|
|
41
|
+
if (requiredFor.includes("infra") && surfaces.infra)
|
|
42
|
+
return true;
|
|
43
|
+
if (requiredFor.includes("ai") && surfaces.ai)
|
|
44
|
+
return true;
|
|
45
|
+
if (requiredFor.includes("mobile") && mobile)
|
|
46
|
+
return true;
|
|
47
|
+
return false;
|
|
48
|
+
}
|
|
49
|
+
async function commandExists(command, args) {
|
|
50
|
+
try {
|
|
51
|
+
const result = await execa(command, args, { reject: false });
|
|
52
|
+
return result.exitCode === 0;
|
|
53
|
+
}
|
|
54
|
+
catch {
|
|
55
|
+
return false;
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
export async function checkScannerReadiness(opts) {
|
|
59
|
+
const config = await loadScannerConfig();
|
|
60
|
+
const configured = [];
|
|
61
|
+
const missing = [];
|
|
62
|
+
const findings = [];
|
|
63
|
+
for (const [scannerId, scanner] of Object.entries(config.scanners)) {
|
|
64
|
+
if (!scannerApplies(scanner.required_for, opts.surfaces))
|
|
65
|
+
continue;
|
|
66
|
+
configured.push(scannerId);
|
|
67
|
+
if (!(await commandExists(scanner.command, scanner.args))) {
|
|
68
|
+
missing.push(scannerId);
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
if (missing.length > 0 && config.fail_closed) {
|
|
72
|
+
findings.push({
|
|
73
|
+
id: "SCANNER_TOOLCHAIN_INCOMPLETE",
|
|
74
|
+
title: "Required security scanners are not installed or not runnable",
|
|
75
|
+
severity: "HIGH",
|
|
76
|
+
evidence: missing,
|
|
77
|
+
requiredActions: [
|
|
78
|
+
"Install the missing scanners or adjust the approved scanner config intentionally.",
|
|
79
|
+
"Do not rely on heuristic checks alone when fail-closed scanner enforcement is enabled."
|
|
80
|
+
]
|
|
81
|
+
});
|
|
82
|
+
}
|
|
83
|
+
return { findings, configured, missing };
|
|
84
|
+
}
|
|
@@ -1,36 +1,63 @@
|
|
|
1
|
-
import
|
|
1
|
+
import fg from "fast-glob";
|
|
2
|
+
import { readFileSafe } from "../../repo/fs.js";
|
|
3
|
+
const SECRET_PATTERNS = [
|
|
4
|
+
{ name: "private_key_pem", regex: /-----BEGIN (?:RSA |EC |OPENSSH |DSA )?PRIVATE KEY-----/ },
|
|
5
|
+
{ name: "aws_access_key", regex: /\bAKIA[0-9A-Z]{16}\b/ },
|
|
6
|
+
{ name: "google_api_key", regex: /\bAIza[0-9A-Za-z\-_]{35}\b/ },
|
|
7
|
+
{ name: "slack_bot_token", regex: /\bxoxb-[0-9A-Za-z-]{20,}\b/ },
|
|
8
|
+
{ name: "llm_api_key", regex: /\bsk-[A-Za-z0-9]{20,}\b/ },
|
|
9
|
+
{ name: "secret_key_assignment", regex: /\bSECRET_KEY\s*[:=]\s*["'][^"'\n]{8,}["']/ },
|
|
10
|
+
{ name: "private_key_assignment", regex: /\bPRIVATE_KEY\s*[:=]\s*["'][^"'\n]{16,}["']/ }
|
|
11
|
+
];
|
|
12
|
+
function previewLine(text, index) {
|
|
13
|
+
const lineStart = text.lastIndexOf("\n", index);
|
|
14
|
+
const lineEnd = text.indexOf("\n", index);
|
|
15
|
+
return text.slice(lineStart === -1 ? 0 : lineStart + 1, lineEnd === -1 ? undefined : lineEnd).trim();
|
|
16
|
+
}
|
|
2
17
|
export async function checkSecrets(_) {
|
|
3
|
-
// CI will also run gitleaks. This is a fast local heuristic backup.
|
|
4
18
|
const findings = [];
|
|
5
|
-
const
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
const
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
19
|
+
const files = await fg(["**/*.*"], {
|
|
20
|
+
dot: true,
|
|
21
|
+
onlyFiles: true,
|
|
22
|
+
ignore: [
|
|
23
|
+
"**/node_modules/**",
|
|
24
|
+
"**/.git/**",
|
|
25
|
+
"**/dist/**",
|
|
26
|
+
"**/fixtures/**",
|
|
27
|
+
"**/.mcp/reviews/**",
|
|
28
|
+
"**/.mcp/reports/**"
|
|
29
|
+
]
|
|
30
|
+
});
|
|
31
|
+
const evidence = [];
|
|
32
|
+
for (const file of files) {
|
|
33
|
+
let text = "";
|
|
34
|
+
try {
|
|
35
|
+
text = await readFileSafe(file);
|
|
36
|
+
}
|
|
37
|
+
catch {
|
|
38
|
+
continue;
|
|
39
|
+
}
|
|
40
|
+
for (const pattern of SECRET_PATTERNS) {
|
|
41
|
+
const match = pattern.regex.exec(text);
|
|
42
|
+
if (!match || match.index === undefined)
|
|
43
|
+
continue;
|
|
44
|
+
evidence.push(`${file}:${pattern.name}:${previewLine(text, match.index)}`);
|
|
45
|
+
if (evidence.length >= 25)
|
|
46
|
+
break;
|
|
47
|
+
}
|
|
48
|
+
if (evidence.length >= 25)
|
|
49
|
+
break;
|
|
50
|
+
}
|
|
51
|
+
if (evidence.length > 0) {
|
|
25
52
|
findings.push({
|
|
26
53
|
id: "POSSIBLE_SECRET",
|
|
27
|
-
title: "Potential secret material detected by heuristic scan",
|
|
54
|
+
title: "Potential secret material detected by whole-repo heuristic scan",
|
|
28
55
|
severity: "CRITICAL",
|
|
29
|
-
evidence
|
|
56
|
+
evidence,
|
|
30
57
|
requiredActions: [
|
|
31
|
-
"Remove secrets from
|
|
58
|
+
"Remove secrets from the affected files immediately.",
|
|
32
59
|
"Rotate any exposed credentials.",
|
|
33
|
-
"Store secrets only in
|
|
60
|
+
"Store secrets only in a dedicated secret manager and keep them out of logs."
|
|
34
61
|
]
|
|
35
62
|
});
|
|
36
63
|
}
|
package/dist/gate/diff.js
CHANGED
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
import { execa } from "execa";
|
|
2
2
|
// Allowlist for git ref strings. Blocks option injection (e.g. --upload-pack=…)
|
|
3
3
|
// and git pathspec magic characters. CWE-88 / MITRE ATT&CK T1059.
|
|
4
|
-
const SAFE_REF_RE = /^[a-zA-Z0-9_
|
|
4
|
+
const SAFE_REF_RE = /^[a-zA-Z0-9_./~^-]+$/;
|
|
5
5
|
function validateRef(name, value) {
|
|
6
6
|
if (!value || !SAFE_REF_RE.test(value)) {
|
|
7
|
-
throw new Error(`Invalid git ref for ${name}: must contain only alphanumerics, _, ., -,
|
|
7
|
+
throw new Error(`Invalid git ref for ${name}: must contain only alphanumerics, _, ., -, /, ~, ^`);
|
|
8
8
|
}
|
|
9
9
|
}
|
|
10
10
|
export async function getChangedFiles(opts) {
|
|
@@ -0,0 +1,116 @@
|
|
|
1
|
+
import { readFile } from "node:fs/promises";
|
|
2
|
+
import fg from "fast-glob";
|
|
3
|
+
import { dirname, join, resolve } from "node:path";
|
|
4
|
+
import { fileURLToPath } from "node:url";
|
|
5
|
+
import { loadControlCatalog, controlApplies } from "./catalog.js";
|
|
6
|
+
const __dirname = dirname(fileURLToPath(import.meta.url));
|
|
7
|
+
const PKG_ROOT = resolve(__dirname, "../..");
|
|
8
|
+
async function loadEvidenceMap() {
|
|
9
|
+
const overridePath = process.env["SECURITY_GATE_EVIDENCE_MAP"];
|
|
10
|
+
if (overridePath) {
|
|
11
|
+
const raw = await readFile(join(process.cwd(), overridePath), "utf-8");
|
|
12
|
+
return JSON.parse(raw);
|
|
13
|
+
}
|
|
14
|
+
try {
|
|
15
|
+
const raw = await readFile(join(process.cwd(), ".mcp", "mappings", "evidence-map.json"), "utf-8");
|
|
16
|
+
return JSON.parse(raw);
|
|
17
|
+
}
|
|
18
|
+
catch {
|
|
19
|
+
const raw = await readFile(join(PKG_ROOT, "defaults", "evidence-map.json"), "utf-8");
|
|
20
|
+
return JSON.parse(raw);
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
function getPolicyControl(policy, control) {
|
|
24
|
+
return policy.requirements.find((requirement) => requirement.id === control.id);
|
|
25
|
+
}
|
|
26
|
+
export async function evaluateEvidenceCoverage(opts) {
|
|
27
|
+
const evidenceMap = await loadEvidenceMap();
|
|
28
|
+
const catalog = await loadControlCatalog();
|
|
29
|
+
const findings = [];
|
|
30
|
+
const controls = [];
|
|
31
|
+
for (const control of catalog.controls) {
|
|
32
|
+
if (!controlApplies(control, opts.surfaces)) {
|
|
33
|
+
controls.push({
|
|
34
|
+
id: control.id,
|
|
35
|
+
description: control.description,
|
|
36
|
+
automation: control.automation,
|
|
37
|
+
frameworks: control.frameworks,
|
|
38
|
+
status: "not_applicable",
|
|
39
|
+
details: ["Surface not in scope for this review."]
|
|
40
|
+
});
|
|
41
|
+
continue;
|
|
42
|
+
}
|
|
43
|
+
if (control.automation !== "evidence") {
|
|
44
|
+
controls.push({
|
|
45
|
+
id: control.id,
|
|
46
|
+
description: control.description,
|
|
47
|
+
automation: control.automation,
|
|
48
|
+
frameworks: control.frameworks,
|
|
49
|
+
status: "not_applicable",
|
|
50
|
+
details: ["Resolved outside evidence coverage evaluation."]
|
|
51
|
+
});
|
|
52
|
+
continue;
|
|
53
|
+
}
|
|
54
|
+
const policyControl = getPolicyControl(opts.policy, control);
|
|
55
|
+
const evidenceIds = policyControl?.evidence ?? control.evidence ?? [];
|
|
56
|
+
const missingMappings = evidenceIds.filter((evidenceId) => !evidenceMap[evidenceId]);
|
|
57
|
+
if (missingMappings.length > 0) {
|
|
58
|
+
findings.push({
|
|
59
|
+
id: "EVIDENCE_MAPPING_MISSING",
|
|
60
|
+
title: `Evidence mapping missing for control ${control.id}`,
|
|
61
|
+
severity: "HIGH",
|
|
62
|
+
evidence: missingMappings,
|
|
63
|
+
requiredActions: [
|
|
64
|
+
"Add the missing evidence IDs to .mcp/mappings/evidence-map.json.",
|
|
65
|
+
"Map each control to file globs that prove the control exists."
|
|
66
|
+
]
|
|
67
|
+
});
|
|
68
|
+
}
|
|
69
|
+
const matchedEvidence = [];
|
|
70
|
+
const missingEvidence = [];
|
|
71
|
+
for (const evidenceId of evidenceIds) {
|
|
72
|
+
const globs = evidenceMap[evidenceId] ?? [];
|
|
73
|
+
const matches = await fg(globs, {
|
|
74
|
+
dot: true,
|
|
75
|
+
onlyFiles: true,
|
|
76
|
+
ignore: ["**/node_modules/**", "**/.git/**", "**/dist/**"]
|
|
77
|
+
});
|
|
78
|
+
if (matches.length === 0) {
|
|
79
|
+
missingEvidence.push(evidenceId);
|
|
80
|
+
}
|
|
81
|
+
else {
|
|
82
|
+
matchedEvidence.push(`${evidenceId}: ${matches[0]}`);
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
if (missingEvidence.length > 0) {
|
|
86
|
+
findings.push({
|
|
87
|
+
id: "CONTROL_EVIDENCE_MISSING",
|
|
88
|
+
title: `Required evidence missing for control ${control.id}`,
|
|
89
|
+
severity: "HIGH",
|
|
90
|
+
evidence: missingEvidence,
|
|
91
|
+
requiredActions: [
|
|
92
|
+
`Implement or surface evidence for control ${control.id}.`,
|
|
93
|
+
"Add or update code, tests, or config so the evidence globs resolve."
|
|
94
|
+
]
|
|
95
|
+
});
|
|
96
|
+
controls.push({
|
|
97
|
+
id: control.id,
|
|
98
|
+
description: control.description,
|
|
99
|
+
automation: control.automation,
|
|
100
|
+
frameworks: control.frameworks,
|
|
101
|
+
status: "missing",
|
|
102
|
+
details: missingEvidence
|
|
103
|
+
});
|
|
104
|
+
continue;
|
|
105
|
+
}
|
|
106
|
+
controls.push({
|
|
107
|
+
id: control.id,
|
|
108
|
+
description: control.description,
|
|
109
|
+
automation: control.automation,
|
|
110
|
+
frameworks: control.frameworks,
|
|
111
|
+
status: "satisfied",
|
|
112
|
+
details: matchedEvidence
|
|
113
|
+
});
|
|
114
|
+
}
|
|
115
|
+
return { findings, controls };
|
|
116
|
+
}
|
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
import { readFile } from "node:fs/promises";
|
|
2
|
+
import { dirname, join, resolve } from "node:path";
|
|
3
|
+
import { fileURLToPath } from "node:url";
|
|
4
|
+
import { z } from "zod";
|
|
5
|
+
const __dirname = dirname(fileURLToPath(import.meta.url));
|
|
6
|
+
const PKG_ROOT = resolve(__dirname, "../..");
|
|
7
|
+
const ExceptionSchema = z.object({
|
|
8
|
+
id: z.string(),
|
|
9
|
+
finding_ids: z.array(z.string()).default([]),
|
|
10
|
+
control_ids: z.array(z.string()).default([]),
|
|
11
|
+
justification: z.string(),
|
|
12
|
+
ticket: z.string().optional(),
|
|
13
|
+
owner: z.string(),
|
|
14
|
+
approver: z.string(),
|
|
15
|
+
approval_role: z.string(),
|
|
16
|
+
expires_on: z.string()
|
|
17
|
+
});
|
|
18
|
+
const ExceptionFileSchema = z.object({
|
|
19
|
+
version: z.string(),
|
|
20
|
+
exceptions: z.array(ExceptionSchema).default([])
|
|
21
|
+
});
|
|
22
|
+
async function readExceptionsJson() {
|
|
23
|
+
const overridePath = process.env["SECURITY_GATE_EXCEPTIONS"];
|
|
24
|
+
if (overridePath) {
|
|
25
|
+
return await readFile(join(process.cwd(), overridePath), "utf-8");
|
|
26
|
+
}
|
|
27
|
+
try {
|
|
28
|
+
return await readFile(join(process.cwd(), ".mcp", "exceptions", "security-exceptions.json"), "utf-8");
|
|
29
|
+
}
|
|
30
|
+
catch {
|
|
31
|
+
return await readFile(join(PKG_ROOT, "defaults", "security-exceptions.json"), "utf-8");
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
export async function loadSecurityExceptions() {
|
|
35
|
+
const raw = await readExceptionsJson();
|
|
36
|
+
return ExceptionFileSchema.parse(JSON.parse(raw)).exceptions;
|
|
37
|
+
}
|
|
38
|
+
export async function applySecurityExceptions(findings) {
|
|
39
|
+
const exceptions = await loadSecurityExceptions();
|
|
40
|
+
const active = [];
|
|
41
|
+
const suppressed = [];
|
|
42
|
+
const exceptionFindings = [];
|
|
43
|
+
const activeControlExceptionIds = new Set();
|
|
44
|
+
for (const entry of exceptions) {
|
|
45
|
+
const expiresAt = new Date(entry.expires_on);
|
|
46
|
+
if (!Number.isNaN(expiresAt.getTime()) && expiresAt.getTime() >= Date.now()) {
|
|
47
|
+
for (const controlId of entry.control_ids) {
|
|
48
|
+
activeControlExceptionIds.add(controlId);
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
for (const finding of findings) {
|
|
53
|
+
const match = exceptions.find((entry) => entry.finding_ids.includes(finding.id));
|
|
54
|
+
if (!match) {
|
|
55
|
+
active.push(finding);
|
|
56
|
+
continue;
|
|
57
|
+
}
|
|
58
|
+
const expiresAt = new Date(match.expires_on);
|
|
59
|
+
if (Number.isNaN(expiresAt.getTime()) || expiresAt.getTime() < Date.now()) {
|
|
60
|
+
active.push(finding);
|
|
61
|
+
exceptionFindings.push({
|
|
62
|
+
id: "SECURITY_EXCEPTION_EXPIRED",
|
|
63
|
+
title: `Security exception ${match.id} is expired or invalid`,
|
|
64
|
+
severity: "HIGH",
|
|
65
|
+
evidence: [`Finding: ${finding.id}`, `Owner: ${match.owner}`, `Expires: ${match.expires_on}`],
|
|
66
|
+
requiredActions: [
|
|
67
|
+
"Renew or remove the expired exception.",
|
|
68
|
+
"Resolve the underlying finding or obtain a new approved exception."
|
|
69
|
+
]
|
|
70
|
+
});
|
|
71
|
+
continue;
|
|
72
|
+
}
|
|
73
|
+
suppressed.push({
|
|
74
|
+
finding,
|
|
75
|
+
exceptionId: match.id,
|
|
76
|
+
expiresOn: match.expires_on
|
|
77
|
+
});
|
|
78
|
+
}
|
|
79
|
+
return {
|
|
80
|
+
findings: active,
|
|
81
|
+
suppressed,
|
|
82
|
+
exceptionFindings,
|
|
83
|
+
activeControlExceptionIds: Array.from(activeControlExceptionIds)
|
|
84
|
+
};
|
|
85
|
+
}
|