secure-review-extension 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +9 -0
- package/LICENSE +21 -0
- package/README.md +304 -0
- package/bin/secure-review.js +269 -0
- package/extension.js +368 -0
- package/media/shield.png +0 -0
- package/media/shield.svg +6 -0
- package/package.json +323 -0
- package/scripts/bootstrap-review-tools.js +54 -0
- package/src/code-actions.js +47 -0
- package/src/constants.js +20 -0
- package/src/diagnostics.js +41 -0
- package/src/findings-provider.js +78 -0
- package/src/report.js +837 -0
- package/src/scanners/bootstrap-tools.js +303 -0
- package/src/scanners/dynamic-scan.js +224 -0
- package/src/scanners/static-rules.js +497 -0
- package/src/scanners/static-scan.js +341 -0
- package/src/scanners/tool-integrations.js +666 -0
- package/src/scanners/workspace-profile.js +316 -0
- package/src/store.js +49 -0
- package/src/utils.js +24 -0
|
@@ -0,0 +1,341 @@
|
|
|
1
|
+
const fs = require("node:fs/promises");
|
|
2
|
+
const path = require("node:path");
|
|
3
|
+
let vscode;
|
|
4
|
+
try {
|
|
5
|
+
vscode = require("vscode");
|
|
6
|
+
} catch {
|
|
7
|
+
vscode = null;
|
|
8
|
+
}
|
|
9
|
+
const { STATIC_RULES } = require("./static-rules");
|
|
10
|
+
const { runRegisteredScanners } = require("./tool-integrations");
|
|
11
|
+
const { buildWorkspaceProfile, buildWorkspaceProfileForRoot } = require("./workspace-profile");
|
|
12
|
+
const { hashFinding, toPosixPath } = require("../utils");
|
|
13
|
+
|
|
14
|
+
const ALLOWED_EXTENSIONS = new Set([
|
|
15
|
+
".js",
|
|
16
|
+
".jsx",
|
|
17
|
+
".mjs",
|
|
18
|
+
".cjs",
|
|
19
|
+
".ts",
|
|
20
|
+
".tsx",
|
|
21
|
+
".py",
|
|
22
|
+
".java",
|
|
23
|
+
".go",
|
|
24
|
+
".rs",
|
|
25
|
+
".c",
|
|
26
|
+
".h",
|
|
27
|
+
".cpp",
|
|
28
|
+
".cc",
|
|
29
|
+
".cxx",
|
|
30
|
+
".hpp",
|
|
31
|
+
".hh",
|
|
32
|
+
".rb",
|
|
33
|
+
".php",
|
|
34
|
+
".cs",
|
|
35
|
+
".json",
|
|
36
|
+
".yaml",
|
|
37
|
+
".yml",
|
|
38
|
+
".toml",
|
|
39
|
+
".tf",
|
|
40
|
+
".sh",
|
|
41
|
+
".env",
|
|
42
|
+
".properties",
|
|
43
|
+
".xml"
|
|
44
|
+
]);
|
|
45
|
+
|
|
46
|
+
async function scanWorkspace(config) {
|
|
47
|
+
const workspaceProfile = await buildWorkspaceProfile(config);
|
|
48
|
+
return scanProfile(workspaceProfile, config);
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
async function scanWorkspaceAtPath(workspaceRoot, rawConfig = {}) {
|
|
52
|
+
const config = toConfigAccessor(rawConfig);
|
|
53
|
+
const workspaceProfile = await buildWorkspaceProfileForRoot(workspaceRoot, {
|
|
54
|
+
excludeGlobs: config.get("excludeGlobs", []),
|
|
55
|
+
maxFiles: config.get("maxFiles", 400)
|
|
56
|
+
});
|
|
57
|
+
return scanProfile(workspaceProfile, config);
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
async function scanCurrentFile() {
|
|
61
|
+
const editor = vscode.window.activeTextEditor;
|
|
62
|
+
if (!editor) {
|
|
63
|
+
return [];
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
const content = await readText(editor.document.uri.fsPath);
|
|
67
|
+
if (content === null) {
|
|
68
|
+
return [];
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
const workspaceRoot = vscode?.workspace?.workspaceFolders?.[0]?.uri.fsPath || path.dirname(editor.document.uri.fsPath);
|
|
72
|
+
return dedupeFindings(scanContent(editor.document.uri.fsPath, content, workspaceRoot));
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
function scanProfile(workspaceProfile, config) {
|
|
76
|
+
const findings = [];
|
|
77
|
+
|
|
78
|
+
for (const file of workspaceProfile.files) {
|
|
79
|
+
if (config.get("enableBuiltInRules", true)) {
|
|
80
|
+
findings.push(...scanContent(file.fsPath, file.content, workspaceProfile.workspaceRoot));
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
const relativePaths = new Set(workspaceProfile.files.map((file) => file.relativePath));
|
|
85
|
+
const testedFindings = correlateTestCoverage(findings, relativePaths);
|
|
86
|
+
const repoHeuristics = config.get("enableBuiltInRules", true)
|
|
87
|
+
? analyzeRepository(workspaceProfile.files, workspaceProfile.workspaceRoot)
|
|
88
|
+
: [];
|
|
89
|
+
return runRegisteredScanners(config, workspaceProfile).then((externalFindings) =>
|
|
90
|
+
dedupeFindings([...testedFindings, ...repoHeuristics, ...externalFindings])
|
|
91
|
+
);
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
function scanContent(filePath, content, workspaceRoot) {
|
|
95
|
+
const lines = content.split(/\r?\n/);
|
|
96
|
+
const findings = [];
|
|
97
|
+
const relativePath = toRelativePath(filePath, workspaceRoot);
|
|
98
|
+
|
|
99
|
+
for (const rule of STATIC_RULES) {
|
|
100
|
+
const regex = new RegExp(rule.regex);
|
|
101
|
+
let match;
|
|
102
|
+
|
|
103
|
+
while ((match = regex.exec(content)) !== null) {
|
|
104
|
+
const index = match.index;
|
|
105
|
+
const before = content.slice(0, index);
|
|
106
|
+
const lineNumber = before.split(/\r?\n/).length;
|
|
107
|
+
const lineText = lines[lineNumber - 1] || "";
|
|
108
|
+
const id = hashFinding([filePath, String(lineNumber), rule.id, rule.title]);
|
|
109
|
+
|
|
110
|
+
findings.push({
|
|
111
|
+
id,
|
|
112
|
+
source: "static",
|
|
113
|
+
title: rule.title,
|
|
114
|
+
severity: rule.severity,
|
|
115
|
+
confidence: rule.confidence,
|
|
116
|
+
category: rule.category,
|
|
117
|
+
subcategory: rule.subcategory,
|
|
118
|
+
reviewDomain: rule.reviewDomain,
|
|
119
|
+
filePath,
|
|
120
|
+
relativePath,
|
|
121
|
+
line: lineNumber,
|
|
122
|
+
column: Math.max(1, lineText.indexOf(match[0]) + 1),
|
|
123
|
+
code: rule.id,
|
|
124
|
+
message: `${rule.title} in ${toPosixPath(relativePath)}:${lineNumber}`,
|
|
125
|
+
evidence: lineText.trim(),
|
|
126
|
+
remediation: rule.remediation,
|
|
127
|
+
suggestion: rule.suggestion,
|
|
128
|
+
whyItMatters: rule.whyItMatters,
|
|
129
|
+
standards: rule.standards
|
|
130
|
+
});
|
|
131
|
+
|
|
132
|
+
if (match.index === regex.lastIndex) {
|
|
133
|
+
regex.lastIndex += 1;
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
return findings;
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
function correlateTestCoverage(findings, relativePaths) {
|
|
142
|
+
return findings.filter((finding) => {
|
|
143
|
+
if (finding.code !== "missing-tests-heuristic") {
|
|
144
|
+
return true;
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
const relativePath = finding.relativePath || "";
|
|
148
|
+
const normalized = relativePath.toLowerCase();
|
|
149
|
+
const baseName = normalized
|
|
150
|
+
.replace(/\.[^.]+$/, "")
|
|
151
|
+
.replace(/^src\//, "")
|
|
152
|
+
.replace(/^backend\//, "");
|
|
153
|
+
|
|
154
|
+
const hasNearbyTest = [...relativePaths].some((candidate) => {
|
|
155
|
+
const lower = candidate.toLowerCase();
|
|
156
|
+
return lower.includes(baseName) && /(test|spec)/.test(lower);
|
|
157
|
+
});
|
|
158
|
+
|
|
159
|
+
return !hasNearbyTest;
|
|
160
|
+
});
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
function dedupeFindings(findings) {
|
|
164
|
+
const seen = new Set();
|
|
165
|
+
const unique = [];
|
|
166
|
+
|
|
167
|
+
for (const finding of findings) {
|
|
168
|
+
const key = `${finding.code}|${finding.relativePath || ""}|${finding.line || ""}|${finding.title}`;
|
|
169
|
+
if (seen.has(key)) {
|
|
170
|
+
continue;
|
|
171
|
+
}
|
|
172
|
+
seen.add(key);
|
|
173
|
+
unique.push(finding);
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
return unique;
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
async function readText(filePath) {
|
|
180
|
+
try {
|
|
181
|
+
return await fs.readFile(filePath, "utf8");
|
|
182
|
+
} catch {
|
|
183
|
+
return null;
|
|
184
|
+
}
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
module.exports = {
|
|
188
|
+
scanWorkspace,
|
|
189
|
+
scanWorkspaceAtPath,
|
|
190
|
+
scanCurrentFile
|
|
191
|
+
};
|
|
192
|
+
|
|
193
|
+
function analyzeRepository(files, workspaceRoot) {
|
|
194
|
+
const findings = [];
|
|
195
|
+
const fileSet = new Set(files.map((file) => file.relativePath));
|
|
196
|
+
|
|
197
|
+
const packageJsonFile = files.find((file) => path.basename(file.relativePath) === "package.json");
|
|
198
|
+
if (packageJsonFile) {
|
|
199
|
+
findings.push(...analyzePackageJson(packageJsonFile, fileSet, workspaceRoot));
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
const requirementsFile = files.find((file) => path.basename(file.relativePath) === "requirements.txt");
|
|
203
|
+
if (requirementsFile) {
|
|
204
|
+
findings.push(...analyzeRequirements(requirementsFile, workspaceRoot));
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
for (const file of files) {
|
|
208
|
+
const base = path.basename(file.relativePath);
|
|
209
|
+
if (base === "Dockerfile") {
|
|
210
|
+
findings.push(...analyzeDockerfile(file, workspaceRoot));
|
|
211
|
+
}
|
|
212
|
+
if (file.relativePath.includes(".github/workflows/") && file.ext === ".yml") {
|
|
213
|
+
findings.push(...analyzeGithubWorkflow(file, workspaceRoot));
|
|
214
|
+
}
|
|
215
|
+
if (file.relativePath.toLowerCase().includes("openapi") || file.relativePath.endsWith("swagger.json") || file.relativePath.endsWith("swagger.yaml")) {
|
|
216
|
+
findings.push(...analyzeApiSpec(file, workspaceRoot));
|
|
217
|
+
}
|
|
218
|
+
findings.push(...analyzeFileSizeAndComplexity(file, workspaceRoot));
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
return findings;
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
function analyzePackageJson(file, fileSet, workspaceRoot) {
|
|
225
|
+
const findings = [];
|
|
226
|
+
try {
|
|
227
|
+
const payload = JSON.parse(file.content);
|
|
228
|
+
const dependencies = { ...(payload.dependencies || {}), ...(payload.devDependencies || {}) };
|
|
229
|
+
const dependencyNames = Object.keys(dependencies);
|
|
230
|
+
|
|
231
|
+
if (dependencyNames.length > 80) {
|
|
232
|
+
findings.push(repoFinding(workspaceRoot, file.relativePath, "dependency-bloat", "High dependency count may increase maintenance and supply-chain risk", "medium", "Dependency Risk", "Dependency Bloat", "dependency-risk", `Detected ${dependencyNames.length} dependencies in package.json.`, "Review whether all dependencies are still required and remove unnecessary packages.", "Trim unused dependencies and keep the dependency graph intentionally small.", "Large dependency graphs increase attack surface and upgrade complexity.", ["Supply Chain Review"]));
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
const weakPins = dependencyNames.filter((name) => /^[~^*]/.test(String(dependencies[name])));
|
|
236
|
+
if (weakPins.length >= 5) {
|
|
237
|
+
findings.push(repoFinding(workspaceRoot, file.relativePath, "weak-version-pinning", "Dependency version pinning appears loose", "medium", "Dependency Risk", "Version Pinning", "dependency-risk", weakPins.slice(0, 8).map((name) => `${name}@${dependencies[name]}`).join(", "), "Pin critical runtime dependencies more tightly or use a disciplined update workflow.", "Adopt explicit upgrade windows and lockfile hygiene for runtime dependencies.", "Loose version ranges can increase drift and make builds less predictable.", ["Supply Chain Review"]));
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
if (!fileSet.has("package-lock.json") && !fileSet.has("pnpm-lock.yaml") && !fileSet.has("yarn.lock")) {
|
|
241
|
+
findings.push(repoFinding(workspaceRoot, file.relativePath, "missing-lockfile", "JavaScript workspace appears to be missing a lockfile", "medium", "Dependency Risk", "Build Reproducibility", "dependency-risk", "No package-lock.json, pnpm-lock.yaml, or yarn.lock detected near package.json.", "Commit a lockfile to support reproducible builds and dependency review.", "Use a lockfile in version control for deterministic installs and better auditability.", "Missing lockfiles make builds less reproducible and can hide dependency drift.", ["Build Integrity"]));
|
|
242
|
+
}
|
|
243
|
+
|
|
244
|
+
const scripts = payload.scripts || {};
|
|
245
|
+
for (const [name, command] of Object.entries(scripts)) {
|
|
246
|
+
if (/curl\s+.*\|\s*(bash|sh)/i.test(String(command))) {
|
|
247
|
+
findings.push(repoFinding(workspaceRoot, file.relativePath, "unsafe-install-script", "Script pipes remote content into a shell", "high", "DevOps", "Unsafe Script Pattern", "security", `${name}: ${command}`, "Avoid piping remote content directly into a shell in package scripts.", "Download artifacts explicitly, verify them, and execute only trusted local files.", "Remote shell piping increases supply-chain and command-execution risk.", ["CWE-494"]));
|
|
248
|
+
}
|
|
249
|
+
}
|
|
250
|
+
} catch {
|
|
251
|
+
findings.push(repoFinding(workspaceRoot, file.relativePath, "package-json-parse", "package.json could not be parsed during repository review", "low", "Reliability", "Manifest Parsing", "reliability", "Unable to parse package.json cleanly.", "Validate manifest syntax and keep project metadata well-formed.", "Fix malformed manifests early to keep tooling predictable.", "Broken manifests can disrupt review tooling and build workflows.", ["Build Reliability"]));
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
return findings;
|
|
255
|
+
}
|
|
256
|
+
|
|
257
|
+
function analyzeRequirements(file, workspaceRoot) {
|
|
258
|
+
const findings = [];
|
|
259
|
+
const lines = file.content.split(/\r?\n/).map((line) => line.trim()).filter(Boolean);
|
|
260
|
+
const unpinned = lines.filter((line) => !line.startsWith("#") && !/(==|~=|>=|<=)/.test(line));
|
|
261
|
+
if (unpinned.length >= 3) {
|
|
262
|
+
findings.push(repoFinding(workspaceRoot, file.relativePath, "unpinned-python-dependencies", "Python dependencies may be insufficiently pinned", "medium", "Dependency Risk", "Version Pinning", "dependency-risk", unpinned.slice(0, 8).join(", "), "Pin Python dependencies to reviewed versions and add an upgrade process.", "Use constraints or lock tooling for deterministic Python environments.", "Unpinned dependencies increase build drift and make vulnerability management harder.", ["Supply Chain Review"]));
|
|
263
|
+
}
|
|
264
|
+
return findings;
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
function analyzeDockerfile(file, workspaceRoot) {
|
|
268
|
+
const findings = [];
|
|
269
|
+
if (/^FROM\s+.+:latest/im.test(file.content)) {
|
|
270
|
+
findings.push(repoFinding(workspaceRoot, file.relativePath, "docker-latest-tag", "Docker image uses the latest tag", "medium", "DevOps", "Container Reproducibility", "outdated-practices", "Dockerfile uses a latest base image tag.", "Pin the base image to a reviewed version or digest.", "Use immutable digests or explicit version tags for container bases.", "Latest tags create non-deterministic builds and make rollbacks harder.", ["Container Review"]));
|
|
271
|
+
}
|
|
272
|
+
if (!/^USER\s+/im.test(file.content)) {
|
|
273
|
+
findings.push(repoFinding(workspaceRoot, file.relativePath, "docker-root-user", "Dockerfile does not appear to switch away from root", "medium", "DevOps", "Container Privilege", "security", "No USER directive detected in Dockerfile.", "Run containers as a non-root user where feasible.", "Create a dedicated runtime user and drop privileges before launching the app.", "Root containers increase blast radius if the application is compromised.", ["Container Review"]));
|
|
274
|
+
}
|
|
275
|
+
return findings;
|
|
276
|
+
}
|
|
277
|
+
|
|
278
|
+
function analyzeGithubWorkflow(file, workspaceRoot) {
|
|
279
|
+
const findings = [];
|
|
280
|
+
const unpinnedActions = [...file.content.matchAll(/uses:\s*([^\n@]+)@(main|master|v?\d+\s*$)/gmi)];
|
|
281
|
+
if (unpinnedActions.length) {
|
|
282
|
+
findings.push(repoFinding(workspaceRoot, file.relativePath, "workflow-unpinned-action", "GitHub Actions workflow may use loosely pinned actions", "medium", "DevOps", "CI Supply Chain", "security", unpinnedActions.slice(0, 5).map((match) => match[0].trim()).join("; "), "Pin actions to trusted versions or commit SHAs.", "Use immutable action SHAs for sensitive CI workflows.", "Loosely pinned CI actions increase supply-chain risk in build pipelines.", ["CI/CD Review"]));
|
|
283
|
+
}
|
|
284
|
+
return findings;
|
|
285
|
+
}
|
|
286
|
+
|
|
287
|
+
function analyzeApiSpec(file, workspaceRoot) {
|
|
288
|
+
const findings = [];
|
|
289
|
+
if (!/securitySchemes|security:/i.test(file.content)) {
|
|
290
|
+
findings.push(repoFinding(workspaceRoot, file.relativePath, "api-spec-missing-security", "API specification may be missing explicit security definitions", "medium", "API Review", "Security Contract", "architecture", "No obvious securitySchemes or security declarations were found in the API spec.", "Document authentication and authorization expectations directly in the API contract.", "Use the API spec as the source of truth for auth and sensitive endpoint behavior.", "Underspecified API security contracts increase integration and review ambiguity.", ["API Review"]));
|
|
291
|
+
}
|
|
292
|
+
return findings;
|
|
293
|
+
}
|
|
294
|
+
|
|
295
|
+
function analyzeFileSizeAndComplexity(file, workspaceRoot) {
|
|
296
|
+
const findings = [];
|
|
297
|
+
const lineCount = file.content.split(/\r?\n/).length;
|
|
298
|
+
if (lineCount > 800) {
|
|
299
|
+
findings.push(repoFinding(workspaceRoot, file.relativePath, "large-file", "Large source file may indicate weak modularity", "low", "Maintainability", "Large File", "maintainability", `${lineCount} lines detected in a single file.`, "Split large files into smaller modules with clearer ownership boundaries.", "Separate transport, business logic, and data handling concerns.", "Very large files are harder to review, test, and secure correctly.", ["Maintainability Review"]));
|
|
300
|
+
}
|
|
301
|
+
return findings;
|
|
302
|
+
}
|
|
303
|
+
|
|
304
|
+
function repoFinding(workspaceRoot, relativePath, code, title, severity, category, subcategory, reviewDomain, evidence, remediation, suggestion, whyItMatters, standards) {
|
|
305
|
+
return {
|
|
306
|
+
id: hashFinding([relativePath, code, title]),
|
|
307
|
+
source: "static",
|
|
308
|
+
title,
|
|
309
|
+
severity,
|
|
310
|
+
confidence: "medium",
|
|
311
|
+
category,
|
|
312
|
+
subcategory,
|
|
313
|
+
reviewDomain,
|
|
314
|
+
filePath: workspaceRoot ? path.join(workspaceRoot, relativePath) : relativePath,
|
|
315
|
+
relativePath,
|
|
316
|
+
line: 1,
|
|
317
|
+
column: 1,
|
|
318
|
+
code,
|
|
319
|
+
message: `${title} in ${toPosixPath(relativePath)}`,
|
|
320
|
+
evidence,
|
|
321
|
+
remediation,
|
|
322
|
+
suggestion,
|
|
323
|
+
whyItMatters,
|
|
324
|
+
standards
|
|
325
|
+
};
|
|
326
|
+
}
|
|
327
|
+
|
|
328
|
+
function toRelativePath(filePath, workspaceRoot) {
|
|
329
|
+
if (!workspaceRoot) {
|
|
330
|
+
return path.basename(filePath);
|
|
331
|
+
}
|
|
332
|
+
return path.relative(workspaceRoot, filePath).split(path.sep).join("/");
|
|
333
|
+
}
|
|
334
|
+
|
|
335
|
+
function toConfigAccessor(rawConfig) {
|
|
336
|
+
return {
|
|
337
|
+
get(key, fallback) {
|
|
338
|
+
return rawConfig[key] === undefined ? fallback : rawConfig[key];
|
|
339
|
+
}
|
|
340
|
+
};
|
|
341
|
+
}
|