security-reporter 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +37 -0
- package/LICENSE +21 -0
- package/README.md +452 -0
- package/dist/cli.d.ts +3 -0
- package/dist/cli.d.ts.map +1 -0
- package/dist/cli.js +272 -0
- package/dist/cli.js.map +1 -0
- package/dist/core/checks/docker.d.ts +6 -0
- package/dist/core/checks/docker.d.ts.map +1 -0
- package/dist/core/checks/docker.js +69 -0
- package/dist/core/checks/docker.js.map +1 -0
- package/dist/core/checks/quality.d.ts +6 -0
- package/dist/core/checks/quality.d.ts.map +1 -0
- package/dist/core/checks/quality.js +89 -0
- package/dist/core/checks/quality.js.map +1 -0
- package/dist/core/checks/security.d.ts +6 -0
- package/dist/core/checks/security.d.ts.map +1 -0
- package/dist/core/checks/security.js +927 -0
- package/dist/core/checks/security.js.map +1 -0
- package/dist/core/checks/tests.d.ts +6 -0
- package/dist/core/checks/tests.d.ts.map +1 -0
- package/dist/core/checks/tests.js +204 -0
- package/dist/core/checks/tests.js.map +1 -0
- package/dist/core/html-reporter.d.ts +11 -0
- package/dist/core/html-reporter.d.ts.map +1 -0
- package/dist/core/html-reporter.js +474 -0
- package/dist/core/html-reporter.js.map +1 -0
- package/dist/core/pdf-reporter.d.ts +6 -0
- package/dist/core/pdf-reporter.d.ts.map +1 -0
- package/dist/core/pdf-reporter.js +147 -0
- package/dist/core/pdf-reporter.js.map +1 -0
- package/dist/core/quality-advanced.d.ts +6 -0
- package/dist/core/quality-advanced.d.ts.map +1 -0
- package/dist/core/quality-advanced.js +536 -0
- package/dist/core/quality-advanced.js.map +1 -0
- package/dist/core/reporter.d.ts +18 -0
- package/dist/core/reporter.d.ts.map +1 -0
- package/dist/core/reporter.js +389 -0
- package/dist/core/reporter.js.map +1 -0
- package/dist/core/security-advanced.d.ts +6 -0
- package/dist/core/security-advanced.d.ts.map +1 -0
- package/dist/core/security-advanced.js +676 -0
- package/dist/core/security-advanced.js.map +1 -0
- package/dist/core/validators.d.ts +6 -0
- package/dist/core/validators.d.ts.map +1 -0
- package/dist/core/validators.js +81 -0
- package/dist/core/validators.js.map +1 -0
- package/dist/index.d.ts +5 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +14 -0
- package/dist/index.js.map +1 -0
- package/dist/interfaces/Types.d.ts +63 -0
- package/dist/interfaces/Types.d.ts.map +1 -0
- package/dist/interfaces/Types.js +3 -0
- package/dist/interfaces/Types.js.map +1 -0
- package/dist/test/security.test.d.ts +2 -0
- package/dist/test/security.test.d.ts.map +1 -0
- package/dist/test/security.test.js +16 -0
- package/dist/test/security.test.js.map +1 -0
- package/dist/test/test.d.ts +2 -0
- package/dist/test/test.d.ts.map +1 -0
- package/dist/test/test.js +11 -0
- package/dist/test/test.js.map +1 -0
- package/dist/test/test.spec.d.ts +2 -0
- package/dist/test/test.spec.d.ts.map +1 -0
- package/dist/test/test.spec.js +7 -0
- package/dist/test/test.spec.js.map +1 -0
- package/dist/types/cli.d.ts +4 -0
- package/dist/types/cli.d.ts.map +1 -0
- package/dist/types/core/checks/docker.d.ts +11 -0
- package/dist/types/core/checks/docker.d.ts.map +1 -0
- package/dist/types/core/checks/quality.d.ts +13 -0
- package/dist/types/core/checks/quality.d.ts.map +1 -0
- package/dist/types/core/checks/security.d.ts +13 -0
- package/dist/types/core/checks/security.d.ts.map +1 -0
- package/dist/types/core/checks/tests.d.ts +9 -0
- package/dist/types/core/checks/tests.d.ts.map +1 -0
- package/dist/types/core/reporter.d.ts +13 -0
- package/dist/types/core/reporter.d.ts.map +1 -0
- package/dist/types/index.d.ts +3 -0
- package/dist/types/index.d.ts.map +1 -0
- package/dist/types/interfaces/Types.d.ts +27 -0
- package/dist/types/interfaces/Types.d.ts.map +1 -0
- package/package.json +75 -0
|
@@ -0,0 +1,927 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
36
|
+
exports.runSecurityChecks = void 0;
|
|
37
|
+
const child_process_1 = require("child_process");
|
|
38
|
+
const fs = __importStar(require("fs"));
|
|
39
|
+
const path = __importStar(require("path"));
|
|
40
|
+
/**
|
|
41
|
+
* Run all security-related checks
|
|
42
|
+
*/
|
|
43
|
+
const runSecurityChecks = async (config, projectType) => {
|
|
44
|
+
const checks = [
|
|
45
|
+
checkNpmAudit(config),
|
|
46
|
+
checkSecrets(config),
|
|
47
|
+
checkEnvFiles(projectType),
|
|
48
|
+
checkLicenses(config),
|
|
49
|
+
checkPackagePublishSafety(),
|
|
50
|
+
checkLockfilePresence(),
|
|
51
|
+
checkNpmScripts(),
|
|
52
|
+
checkPublishDryRun(config),
|
|
53
|
+
checkSbomGeneration(config),
|
|
54
|
+
checkTyposquatting(config),
|
|
55
|
+
];
|
|
56
|
+
return Promise.all(checks);
|
|
57
|
+
};
|
|
58
|
+
exports.runSecurityChecks = runSecurityChecks;
|
|
59
|
+
/**
|
|
60
|
+
* Check package.json publish safety
|
|
61
|
+
*/
|
|
62
|
+
const checkPackagePublishSafety = async () => {
|
|
63
|
+
try {
|
|
64
|
+
const pkgPath = path.join(process.cwd(), "package.json");
|
|
65
|
+
if (!fs.existsSync(pkgPath)) {
|
|
66
|
+
return {
|
|
67
|
+
name: "publish safety",
|
|
68
|
+
status: "skip",
|
|
69
|
+
severity: "info",
|
|
70
|
+
message: "No package.json found",
|
|
71
|
+
};
|
|
72
|
+
}
|
|
73
|
+
const pkgContent = fs.readFileSync(pkgPath, "utf-8");
|
|
74
|
+
const pkg = safeParseJSON(pkgContent, "package.json");
|
|
75
|
+
const suggestions = [];
|
|
76
|
+
if (pkg.private === true) {
|
|
77
|
+
return {
|
|
78
|
+
name: "publish safety",
|
|
79
|
+
status: "pass",
|
|
80
|
+
severity: "info",
|
|
81
|
+
message: "Package is marked private",
|
|
82
|
+
};
|
|
83
|
+
}
|
|
84
|
+
if (!pkg.files || (Array.isArray(pkg.files) && pkg.files.length === 0)) {
|
|
85
|
+
suggestions.push("Add a 'files' allowlist in package.json to control published files");
|
|
86
|
+
}
|
|
87
|
+
if (!pkg.name) {
|
|
88
|
+
suggestions.push("Set a package name in package.json before publishing");
|
|
89
|
+
}
|
|
90
|
+
suggestions.push("Run 'npm publish --dry-run' before releasing to verify package contents");
|
|
91
|
+
return {
|
|
92
|
+
name: "publish safety",
|
|
93
|
+
status: suggestions.length > 0 ? "warn" : "pass",
|
|
94
|
+
severity: suggestions.length > 0 ? "warning" : "info",
|
|
95
|
+
message: suggestions.length > 0 ? "Publish safety checks recommend changes" : "Publish settings look good",
|
|
96
|
+
details: {
|
|
97
|
+
name: pkg.name,
|
|
98
|
+
private: pkg.private,
|
|
99
|
+
files: pkg.files,
|
|
100
|
+
},
|
|
101
|
+
suggestions,
|
|
102
|
+
};
|
|
103
|
+
}
|
|
104
|
+
catch (error) {
|
|
105
|
+
return {
|
|
106
|
+
name: "publish safety",
|
|
107
|
+
status: "fail",
|
|
108
|
+
severity: "error",
|
|
109
|
+
message: "Could not evaluate publish safety",
|
|
110
|
+
details: error.message,
|
|
111
|
+
};
|
|
112
|
+
}
|
|
113
|
+
};
|
|
114
|
+
/**
|
|
115
|
+
* Check for presence of lockfile
|
|
116
|
+
*/
|
|
117
|
+
const checkLockfilePresence = async () => {
|
|
118
|
+
try {
|
|
119
|
+
const lockFiles = ["package-lock.json", "yarn.lock", "pnpm-lock.yaml"];
|
|
120
|
+
const found = lockFiles.filter((f) => fs.existsSync(path.join(process.cwd(), f)));
|
|
121
|
+
if (found.length === 0) {
|
|
122
|
+
return {
|
|
123
|
+
name: "lockfile",
|
|
124
|
+
status: "warn",
|
|
125
|
+
severity: "warning",
|
|
126
|
+
message: "No lockfile found",
|
|
127
|
+
details: { checked: lockFiles },
|
|
128
|
+
suggestions: [
|
|
129
|
+
"Commit a lockfile (package-lock.json/yarn.lock/pnpm-lock.yaml)",
|
|
130
|
+
"Use 'npm ci' in CI for reproducible installs",
|
|
131
|
+
],
|
|
132
|
+
};
|
|
133
|
+
}
|
|
134
|
+
return {
|
|
135
|
+
name: "lockfile",
|
|
136
|
+
status: "pass",
|
|
137
|
+
severity: "info",
|
|
138
|
+
message: `Lockfile present: ${found.join(", ")}`,
|
|
139
|
+
details: { found },
|
|
140
|
+
};
|
|
141
|
+
}
|
|
142
|
+
catch (error) {
|
|
143
|
+
return {
|
|
144
|
+
name: "lockfile",
|
|
145
|
+
status: "fail",
|
|
146
|
+
severity: "error",
|
|
147
|
+
message: "Could not check lockfile",
|
|
148
|
+
details: error.message,
|
|
149
|
+
};
|
|
150
|
+
}
|
|
151
|
+
};
|
|
152
|
+
/**
|
|
153
|
+
* Check for risky npm lifecycle scripts
|
|
154
|
+
*/
|
|
155
|
+
const checkNpmScripts = async () => {
|
|
156
|
+
try {
|
|
157
|
+
const pkgPath = path.join(process.cwd(), "package.json");
|
|
158
|
+
if (!fs.existsSync(pkgPath)) {
|
|
159
|
+
return {
|
|
160
|
+
name: "npm scripts",
|
|
161
|
+
status: "skip",
|
|
162
|
+
severity: "info",
|
|
163
|
+
message: "No package.json found",
|
|
164
|
+
};
|
|
165
|
+
}
|
|
166
|
+
const pkgContent = fs.readFileSync(pkgPath, "utf-8");
|
|
167
|
+
const pkg = safeParseJSON(pkgContent, "package.json");
|
|
168
|
+
const scripts = pkg.scripts || {};
|
|
169
|
+
const risky = ["preinstall", "install", "postinstall", "prepare"].filter((s) => scripts[s]);
|
|
170
|
+
if (risky.length > 0) {
|
|
171
|
+
return {
|
|
172
|
+
name: "npm scripts",
|
|
173
|
+
status: "warn",
|
|
174
|
+
severity: "warning",
|
|
175
|
+
message: `Found lifecycle scripts: ${risky.join(", ")}`,
|
|
176
|
+
details: { scripts: risky },
|
|
177
|
+
suggestions: [
|
|
178
|
+
"Avoid running untrusted scripts during install",
|
|
179
|
+
"Consider using --ignore-scripts in CI or allowlist scripts",
|
|
180
|
+
"Review script contents for unexpected network or file operations",
|
|
181
|
+
],
|
|
182
|
+
};
|
|
183
|
+
}
|
|
184
|
+
return {
|
|
185
|
+
name: "npm scripts",
|
|
186
|
+
status: "pass",
|
|
187
|
+
severity: "info",
|
|
188
|
+
message: "No risky lifecycle scripts detected",
|
|
189
|
+
};
|
|
190
|
+
}
|
|
191
|
+
catch (error) {
|
|
192
|
+
return {
|
|
193
|
+
name: "npm scripts",
|
|
194
|
+
status: "fail",
|
|
195
|
+
severity: "error",
|
|
196
|
+
message: "Could not inspect npm scripts",
|
|
197
|
+
details: error.message,
|
|
198
|
+
};
|
|
199
|
+
}
|
|
200
|
+
};
|
|
201
|
+
/**
|
|
202
|
+
* FIX #1: Run npm audit with spawn instead of exec for security
|
|
203
|
+
* FIXED: Command injection vulnerability
|
|
204
|
+
*/
|
|
205
|
+
const checkNpmAudit = async (config) => {
|
|
206
|
+
var _a, _b;
|
|
207
|
+
try {
|
|
208
|
+
const result = await spawnCommand("npm", ["audit", "--json"], {
|
|
209
|
+
timeout: 30000,
|
|
210
|
+
maxBuffer: 10 * 1024 * 1024,
|
|
211
|
+
});
|
|
212
|
+
const audit = JSON.parse(result.stdout);
|
|
213
|
+
const vulnerabilities = ((_a = audit.metadata) === null || _a === void 0 ? void 0 : _a.vulnerabilities) || {};
|
|
214
|
+
const total = (vulnerabilities.info || 0) +
|
|
215
|
+
(vulnerabilities.low || 0) +
|
|
216
|
+
(vulnerabilities.moderate || 0) +
|
|
217
|
+
(vulnerabilities.high || 0) +
|
|
218
|
+
(vulnerabilities.critical || 0);
|
|
219
|
+
if (total === 0) {
|
|
220
|
+
return {
|
|
221
|
+
name: "npm audit",
|
|
222
|
+
status: "pass",
|
|
223
|
+
severity: "info",
|
|
224
|
+
message: "No vulnerabilities found",
|
|
225
|
+
};
|
|
226
|
+
}
|
|
227
|
+
const hasHighOrCritical = vulnerabilities.high > 0 || vulnerabilities.critical > 0;
|
|
228
|
+
return {
|
|
229
|
+
name: "npm audit",
|
|
230
|
+
status: hasHighOrCritical ? "fail" : "warn",
|
|
231
|
+
severity: hasHighOrCritical ? "critical" : "warning",
|
|
232
|
+
message: `Found ${total} vulnerabilities`,
|
|
233
|
+
details: vulnerabilities,
|
|
234
|
+
suggestions: ["Run 'npm audit fix' to fix vulnerabilities"],
|
|
235
|
+
};
|
|
236
|
+
}
|
|
237
|
+
catch (error) {
|
|
238
|
+
// npm audit exits with code 1 if vulnerabilities found
|
|
239
|
+
if (error.stdout) {
|
|
240
|
+
try {
|
|
241
|
+
const audit = JSON.parse(error.stdout);
|
|
242
|
+
const vulnerabilities = ((_b = audit.metadata) === null || _b === void 0 ? void 0 : _b.vulnerabilities) || {};
|
|
243
|
+
const total = Object.values(vulnerabilities).reduce((sum, val) => sum + val, 0);
|
|
244
|
+
const hasHighOrCritical = vulnerabilities.high > 0 || vulnerabilities.critical > 0;
|
|
245
|
+
return {
|
|
246
|
+
name: "npm audit",
|
|
247
|
+
status: hasHighOrCritical ? "fail" : "warn",
|
|
248
|
+
severity: hasHighOrCritical ? "critical" : "warning",
|
|
249
|
+
message: `Found ${total} vulnerabilities`,
|
|
250
|
+
details: vulnerabilities,
|
|
251
|
+
suggestions: ["Run 'npm audit fix' to fix vulnerabilities"],
|
|
252
|
+
};
|
|
253
|
+
}
|
|
254
|
+
catch {
|
|
255
|
+
// Could not parse
|
|
256
|
+
}
|
|
257
|
+
}
|
|
258
|
+
return {
|
|
259
|
+
name: "npm audit",
|
|
260
|
+
status: "fail",
|
|
261
|
+
severity: "error",
|
|
262
|
+
message: "Could not run npm audit",
|
|
263
|
+
details: error.message,
|
|
264
|
+
};
|
|
265
|
+
}
|
|
266
|
+
};
|
|
267
|
+
/**
|
|
268
|
+
* FIX #3: Check for common secrets patterns with SAFE regex patterns
|
|
269
|
+
* FIXED: ReDoS vulnerability by limiting length and using safer patterns
|
|
270
|
+
*/
|
|
271
|
+
const checkSecrets = async (config) => {
|
|
272
|
+
if (config.checkSecrets === false) {
|
|
273
|
+
return {
|
|
274
|
+
name: "secrets scan",
|
|
275
|
+
status: "skip",
|
|
276
|
+
severity: "info",
|
|
277
|
+
message: "Secrets scanning disabled",
|
|
278
|
+
};
|
|
279
|
+
}
|
|
280
|
+
// FIX #3: Safe regex patterns with length limits to prevent ReDoS
|
|
281
|
+
const loadPatterns = () => {
|
|
282
|
+
const cfgPath = path.join(process.cwd(), "config", "patterns.json");
|
|
283
|
+
if (fs.existsSync(cfgPath)) {
|
|
284
|
+
try {
|
|
285
|
+
const raw = fs.readFileSync(cfgPath, "utf-8");
|
|
286
|
+
const list = safeParseJSON(raw, "patterns.json");
|
|
287
|
+
return list.map((p) => ({ name: p.name, pattern: new RegExp(p.pattern, p.flags || "i") }));
|
|
288
|
+
}
|
|
289
|
+
catch {
|
|
290
|
+
// fallthrough to defaults
|
|
291
|
+
}
|
|
292
|
+
}
|
|
293
|
+
// SAFE patterns with length limits to prevent ReDoS
|
|
294
|
+
const defaults = [
|
|
295
|
+
{ name: "AWS Access Key", pattern: /AKIA[0-9A-Z]{16}/i },
|
|
296
|
+
{ name: "AWS Secret Key", pattern: /aws_secret_access_key\s*[=:]\s*['"]?[A-Za-z0-9/+=]{40}['"]?/i },
|
|
297
|
+
{ name: "Stripe Live Key", pattern: /sk_live_[0-9a-zA-Z]{24,99}/i },
|
|
298
|
+
{ name: "Google API Key", pattern: /AIza[0-9A-Za-z\-_]{35}/i },
|
|
299
|
+
{ name: "GitHub PAT", pattern: /ghp_[0-9a-zA-Z]{36}/i },
|
|
300
|
+
{ name: "GitHub PAT (alt)", pattern: /github_pat_[0-9a-zA-Z]{22}_[0-9a-zA-Z]{59}/i },
|
|
301
|
+
// FIXED: Limited length to prevent ReDoS
|
|
302
|
+
{ name: "Bearer Token", pattern: /Bearer\s+[A-Za-z0-9\-._~+/]{10,500}/i },
|
|
303
|
+
{ name: "JWT Token", pattern: /eyJ[A-Za-z0-9_-]{10,500}\.[A-Za-z0-9_-]{10,500}\.[A-Za-z0-9_-]{10,500}/i },
|
|
304
|
+
];
|
|
305
|
+
return defaults;
|
|
306
|
+
};
|
|
307
|
+
const patterns = loadPatterns();
|
|
308
|
+
const foundSecrets = [];
|
|
309
|
+
const srcDir = path.join(process.cwd(), "src");
|
|
310
|
+
const excludeDirs = [path.join(process.cwd(), "src", "core")];
|
|
311
|
+
if (!fs.existsSync(srcDir)) {
|
|
312
|
+
return {
|
|
313
|
+
name: "secrets scan",
|
|
314
|
+
status: "skip",
|
|
315
|
+
severity: "info",
|
|
316
|
+
message: "No src directory found",
|
|
317
|
+
};
|
|
318
|
+
}
|
|
319
|
+
// FIX #2: Safe directory scanning with path traversal protection
|
|
320
|
+
const scanDirectory = (dir) => {
|
|
321
|
+
const normalizedDir = path.resolve(dir);
|
|
322
|
+
const projectRoot = path.resolve(process.cwd());
|
|
323
|
+
// FIX: Verify we're within project root
|
|
324
|
+
if (!normalizedDir.startsWith(projectRoot)) {
|
|
325
|
+
console.warn(`[Security] Attempted to scan outside project: ${dir}`);
|
|
326
|
+
return;
|
|
327
|
+
}
|
|
328
|
+
let entries;
|
|
329
|
+
try {
|
|
330
|
+
entries = fs.readdirSync(dir, { withFileTypes: true });
|
|
331
|
+
}
|
|
332
|
+
catch (err) {
|
|
333
|
+
// Skip inaccessible directories
|
|
334
|
+
return;
|
|
335
|
+
}
|
|
336
|
+
entries.forEach((entry) => {
|
|
337
|
+
const filePath = path.join(dir, entry.name);
|
|
338
|
+
const resolvedPath = path.resolve(filePath);
|
|
339
|
+
// FIX: Skip symlinks to prevent traversal attacks
|
|
340
|
+
if (entry.isSymbolicLink()) {
|
|
341
|
+
return;
|
|
342
|
+
}
|
|
343
|
+
// FIX: Double-check resolved path is still within project
|
|
344
|
+
if (!resolvedPath.startsWith(projectRoot)) {
|
|
345
|
+
console.warn(`[Security] Skipping file outside project: ${filePath}`);
|
|
346
|
+
return;
|
|
347
|
+
}
|
|
348
|
+
// Skip scanning our own core scanner files to avoid false positives
|
|
349
|
+
if (excludeDirs.some((d) => filePath.startsWith(d))) {
|
|
350
|
+
return;
|
|
351
|
+
}
|
|
352
|
+
if (entry.isDirectory()) {
|
|
353
|
+
if (!entry.name.startsWith(".") && entry.name !== "node_modules") {
|
|
354
|
+
scanDirectory(filePath);
|
|
355
|
+
}
|
|
356
|
+
}
|
|
357
|
+
else if (entry.name.match(/\.(ts|js|jsx|tsx|json)$/)) {
|
|
358
|
+
scanFile(filePath, patterns, foundSecrets);
|
|
359
|
+
}
|
|
360
|
+
});
|
|
361
|
+
};
|
|
362
|
+
try {
|
|
363
|
+
scanDirectory(srcDir);
|
|
364
|
+
if (foundSecrets.length > 0) {
|
|
365
|
+
const defs = foundSecrets.filter((s) => s.isDefinition);
|
|
366
|
+
const reals = foundSecrets.filter((s) => !s.isDefinition);
|
|
367
|
+
if (reals.length > 0) {
|
|
368
|
+
return {
|
|
369
|
+
name: "secrets scan",
|
|
370
|
+
status: "fail",
|
|
371
|
+
severity: "critical",
|
|
372
|
+
message: `Found potential secrets in ${reals.length} locations (plus ${defs.length} possible pattern definitions)`,
|
|
373
|
+
details: {
|
|
374
|
+
matches: reals.slice(0, 10), // Limit output
|
|
375
|
+
possibleDefinitions: defs.slice(0, 5),
|
|
376
|
+
totalMatches: reals.length,
|
|
377
|
+
totalDefinitions: defs.length,
|
|
378
|
+
},
|
|
379
|
+
suggestions: ["Remove hardcoded secrets", "Use environment variables", "Add files to .gitignore"],
|
|
380
|
+
};
|
|
381
|
+
}
|
|
382
|
+
// Only pattern definitions found
|
|
383
|
+
return {
|
|
384
|
+
name: "secrets scan",
|
|
385
|
+
status: "warn",
|
|
386
|
+
severity: "warning",
|
|
387
|
+
message: `Only pattern/regex definitions found (${defs.length} matches) — possible false positives`,
|
|
388
|
+
details: {
|
|
389
|
+
possibleDefinitions: defs.slice(0, 5),
|
|
390
|
+
total: defs.length,
|
|
391
|
+
},
|
|
392
|
+
suggestions: [
|
|
393
|
+
"Move pattern/regex definitions to a separate config file (e.g. config/patterns.json)",
|
|
394
|
+
"Or annotate pattern definition lines with a comment like // security-reporter:ignore",
|
|
395
|
+
],
|
|
396
|
+
};
|
|
397
|
+
}
|
|
398
|
+
return {
|
|
399
|
+
name: "secrets scan",
|
|
400
|
+
status: "pass",
|
|
401
|
+
severity: "info",
|
|
402
|
+
message: "No hardcoded secrets found",
|
|
403
|
+
details: {
|
|
404
|
+
filesScanned: countFilesInDir(srcDir),
|
|
405
|
+
patternsChecked: patterns.length,
|
|
406
|
+
},
|
|
407
|
+
};
|
|
408
|
+
}
|
|
409
|
+
catch (error) {
|
|
410
|
+
return {
|
|
411
|
+
name: "secrets scan",
|
|
412
|
+
status: "fail",
|
|
413
|
+
severity: "error",
|
|
414
|
+
message: "Could not scan for secrets",
|
|
415
|
+
details: error.message,
|
|
416
|
+
};
|
|
417
|
+
}
|
|
418
|
+
};
|
|
419
|
+
/**
|
|
420
|
+
* Helper: Scan individual file for secrets with size limit
|
|
421
|
+
* FIX #15: Prevent memory issues by limiting file size
|
|
422
|
+
*/
|
|
423
|
+
const MAX_FILE_SIZE = 10 * 1024 * 1024; // 10MB
|
|
424
|
+
const scanFile = (filePath, patterns, foundSecrets) => {
|
|
425
|
+
try {
|
|
426
|
+
const stats = fs.statSync(filePath);
|
|
427
|
+
// FIX: Skip files that are too large
|
|
428
|
+
if (stats.size > MAX_FILE_SIZE) {
|
|
429
|
+
console.warn(`[Security] Skipping large file: ${filePath} (${stats.size} bytes)`);
|
|
430
|
+
return;
|
|
431
|
+
}
|
|
432
|
+
const content = fs.readFileSync(filePath, "utf-8");
|
|
433
|
+
const lines = content.split(/\r?\n/);
|
|
434
|
+
patterns.forEach(({ name, pattern }) => {
|
|
435
|
+
lines.forEach((line, index) => {
|
|
436
|
+
// Reset regex lastIndex to prevent issues with global flag
|
|
437
|
+
pattern.lastIndex = 0;
|
|
438
|
+
if (pattern.test(line)) {
|
|
439
|
+
const trimmed = line.trim();
|
|
440
|
+
if (!trimmed.startsWith("//") &&
|
|
441
|
+
!trimmed.startsWith("#") &&
|
|
442
|
+
!trimmed.startsWith("*") &&
|
|
443
|
+
!trimmed.includes("example") &&
|
|
444
|
+
!trimmed.includes("placeholder") &&
|
|
445
|
+
!trimmed.includes("TODO") &&
|
|
446
|
+
!trimmed.includes("FIXME")) {
|
|
447
|
+
// Detect if this line looks like a pattern/regex definition to avoid false positives
|
|
448
|
+
const isDefinition = /pattern\s*[:=]|new RegExp\(|const\s+patterns\b|let\s+patterns\b|var\s+patterns\b|\/.*\//.test(trimmed);
|
|
449
|
+
foundSecrets.push({
|
|
450
|
+
file: filePath.replace(process.cwd(), ""),
|
|
451
|
+
type: name,
|
|
452
|
+
line: index + 1,
|
|
453
|
+
snippet: line.trim().slice(0, 200),
|
|
454
|
+
isDefinition,
|
|
455
|
+
});
|
|
456
|
+
}
|
|
457
|
+
}
|
|
458
|
+
});
|
|
459
|
+
});
|
|
460
|
+
}
|
|
461
|
+
catch (err) {
|
|
462
|
+
// Skip files that can't be read
|
|
463
|
+
}
|
|
464
|
+
};
|
|
465
|
+
/**
|
|
466
|
+
* Helper: Count files in directory
|
|
467
|
+
*/
|
|
468
|
+
const countFilesInDir = (dir) => {
|
|
469
|
+
let count = 0;
|
|
470
|
+
try {
|
|
471
|
+
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
|
472
|
+
entries.forEach((entry) => {
|
|
473
|
+
if (entry.isDirectory() && entry.name !== "node_modules" && !entry.name.startsWith(".")) {
|
|
474
|
+
count += countFilesInDir(path.join(dir, entry.name));
|
|
475
|
+
}
|
|
476
|
+
else if (entry.isFile() && entry.name.match(/\.(ts|js|jsx|tsx|json)$/)) {
|
|
477
|
+
count++;
|
|
478
|
+
}
|
|
479
|
+
});
|
|
480
|
+
}
|
|
481
|
+
catch (err) {
|
|
482
|
+
// Ignore errors
|
|
483
|
+
}
|
|
484
|
+
return count;
|
|
485
|
+
};
|
|
486
|
+
/**
|
|
487
|
+
* Check .env file configuration
|
|
488
|
+
*/
|
|
489
|
+
const checkEnvFiles = async (projectType) => {
|
|
490
|
+
const hasEnv = fs.existsSync(path.join(process.cwd(), ".env"));
|
|
491
|
+
const hasEnvExample = fs.existsSync(path.join(process.cwd(), ".env.example"));
|
|
492
|
+
const hasGitignore = fs.existsSync(path.join(process.cwd(), ".gitignore"));
|
|
493
|
+
const issues = [];
|
|
494
|
+
if (hasEnv && hasGitignore) {
|
|
495
|
+
const gitignore = fs.readFileSync(path.join(process.cwd(), ".gitignore"), "utf-8");
|
|
496
|
+
if (!gitignore.includes(".env")) {
|
|
497
|
+
issues.push(".env file not in .gitignore");
|
|
498
|
+
}
|
|
499
|
+
}
|
|
500
|
+
if (hasEnv && !hasEnvExample) {
|
|
501
|
+
issues.push("Missing .env.example file for documentation");
|
|
502
|
+
}
|
|
503
|
+
if (issues.length > 0) {
|
|
504
|
+
return {
|
|
505
|
+
name: "env files",
|
|
506
|
+
status: "warn",
|
|
507
|
+
severity: "warning",
|
|
508
|
+
message: "Environment file issues detected",
|
|
509
|
+
details: issues,
|
|
510
|
+
suggestions: ["Add .env to .gitignore", "Create .env.example with dummy values"],
|
|
511
|
+
};
|
|
512
|
+
}
|
|
513
|
+
return {
|
|
514
|
+
name: "env files",
|
|
515
|
+
status: "pass",
|
|
516
|
+
severity: "info",
|
|
517
|
+
message: "Environment files properly configured",
|
|
518
|
+
};
|
|
519
|
+
};
|
|
520
|
+
/**
|
|
521
|
+
* Enhanced License Compliance Checker
|
|
522
|
+
*/
|
|
523
|
+
const checkLicenses = async (config) => {
|
|
524
|
+
if (!config.allowedLicenses || config.allowedLicenses.length === 0) {
|
|
525
|
+
return {
|
|
526
|
+
name: "licenses",
|
|
527
|
+
status: "skip",
|
|
528
|
+
severity: "info",
|
|
529
|
+
message: "License checking disabled (no allowedLicenses configured)",
|
|
530
|
+
suggestions: [
|
|
531
|
+
'Add "allowedLicenses": ["MIT", "Apache-2.0", "BSD-3-Clause"] to .securityrc.json',
|
|
532
|
+
"Run: security-reporter init to create config",
|
|
533
|
+
],
|
|
534
|
+
};
|
|
535
|
+
}
|
|
536
|
+
try {
|
|
537
|
+
const issues = [];
|
|
538
|
+
const warnings = [];
|
|
539
|
+
// 1. Check package.json license
|
|
540
|
+
let pkgLicense;
|
|
541
|
+
try {
|
|
542
|
+
const pkgContent = fs.readFileSync(path.join(process.cwd(), "package.json"), "utf-8");
|
|
543
|
+
const pkg = safeParseJSON(pkgContent, "package.json");
|
|
544
|
+
pkgLicense = pkg.license;
|
|
545
|
+
}
|
|
546
|
+
catch {
|
|
547
|
+
issues.push("Could not read package.json");
|
|
548
|
+
}
|
|
549
|
+
// 2. Check LICENSE file (common variations)
|
|
550
|
+
const licenseFiles = ["LICENSE", "LICENSE.md", "LICENSE.txt", "LICENCE", "LICENCE.md"];
|
|
551
|
+
let licenseFile;
|
|
552
|
+
let licenseFileContent;
|
|
553
|
+
for (const filename of licenseFiles) {
|
|
554
|
+
const filepath = path.join(process.cwd(), filename);
|
|
555
|
+
if (fs.existsSync(filepath)) {
|
|
556
|
+
licenseFile = filename;
|
|
557
|
+
licenseFileContent = fs.readFileSync(filepath, "utf-8");
|
|
558
|
+
break;
|
|
559
|
+
}
|
|
560
|
+
}
|
|
561
|
+
// Detect license type from LICENSE file content
|
|
562
|
+
let detectedLicense;
|
|
563
|
+
if (licenseFileContent) {
|
|
564
|
+
if (licenseFileContent.includes("MIT License")) {
|
|
565
|
+
detectedLicense = "MIT";
|
|
566
|
+
}
|
|
567
|
+
else if (licenseFileContent.includes("Apache License")) {
|
|
568
|
+
detectedLicense = "Apache-2.0";
|
|
569
|
+
}
|
|
570
|
+
else if (licenseFileContent.includes("BSD 3-Clause")) {
|
|
571
|
+
detectedLicense = "BSD-3-Clause";
|
|
572
|
+
}
|
|
573
|
+
else if (licenseFileContent.includes("ISC License")) {
|
|
574
|
+
detectedLicense = "ISC";
|
|
575
|
+
}
|
|
576
|
+
else if (licenseFileContent.includes("GNU General Public License")) {
|
|
577
|
+
if (licenseFileContent.includes("version 3")) {
|
|
578
|
+
detectedLicense = "GPL-3.0";
|
|
579
|
+
}
|
|
580
|
+
else if (licenseFileContent.includes("version 2")) {
|
|
581
|
+
detectedLicense = "GPL-2.0";
|
|
582
|
+
}
|
|
583
|
+
}
|
|
584
|
+
}
|
|
585
|
+
// 3. Validate and compare
|
|
586
|
+
if (!pkgLicense && !licenseFile) {
|
|
587
|
+
return {
|
|
588
|
+
name: "licenses",
|
|
589
|
+
status: "fail",
|
|
590
|
+
severity: "error",
|
|
591
|
+
message: "⚠️ No license found in package.json or LICENSE file",
|
|
592
|
+
suggestions: [
|
|
593
|
+
"Add license field to package.json",
|
|
594
|
+
"Create a LICENSE file",
|
|
595
|
+
"Choose from: MIT, Apache-2.0, BSD-3-Clause, ISC",
|
|
596
|
+
"See: https://choosealicense.com/",
|
|
597
|
+
],
|
|
598
|
+
};
|
|
599
|
+
}
|
|
600
|
+
if (!pkgLicense) {
|
|
601
|
+
warnings.push("Missing license field in package.json");
|
|
602
|
+
}
|
|
603
|
+
if (!licenseFile) {
|
|
604
|
+
warnings.push("Missing LICENSE file in repository");
|
|
605
|
+
}
|
|
606
|
+
// 4. Check if they differ
|
|
607
|
+
if (pkgLicense && detectedLicense && pkgLicense !== detectedLicense) {
|
|
608
|
+
issues.push(`License mismatch: package.json says "${pkgLicense}" but ${licenseFile} appears to be "${detectedLicense}"`);
|
|
609
|
+
}
|
|
610
|
+
// 5. Check against allowed list
|
|
611
|
+
const licensesToCheck = [pkgLicense, detectedLicense].filter(Boolean);
|
|
612
|
+
const disallowedLicenses = licensesToCheck.filter((lic) => !config.allowedLicenses.includes(lic));
|
|
613
|
+
if (disallowedLicenses.length > 0) {
|
|
614
|
+
return {
|
|
615
|
+
name: "licenses",
|
|
616
|
+
status: "fail",
|
|
617
|
+
severity: "critical",
|
|
618
|
+
message: `🚨 Disallowed license(s) found: ${disallowedLicenses.join(", ")}`,
|
|
619
|
+
details: {
|
|
620
|
+
packageJson: pkgLicense,
|
|
621
|
+
licenseFile: detectedLicense,
|
|
622
|
+
allowed: config.allowedLicenses,
|
|
623
|
+
disallowed: disallowedLicenses,
|
|
624
|
+
},
|
|
625
|
+
suggestions: [
|
|
626
|
+
`Change to an allowed license: ${config.allowedLicenses.join(", ")}`,
|
|
627
|
+
"Update both package.json and LICENSE file",
|
|
628
|
+
"Consult legal team if needed",
|
|
629
|
+
],
|
|
630
|
+
};
|
|
631
|
+
}
|
|
632
|
+
// 6. Report results
|
|
633
|
+
if (issues.length > 0) {
|
|
634
|
+
return {
|
|
635
|
+
name: "licenses",
|
|
636
|
+
status: "fail",
|
|
637
|
+
severity: "error",
|
|
638
|
+
message: "License compliance issues detected",
|
|
639
|
+
details: {
|
|
640
|
+
issues,
|
|
641
|
+
warnings,
|
|
642
|
+
packageJson: pkgLicense,
|
|
643
|
+
licenseFile: detectedLicense,
|
|
644
|
+
},
|
|
645
|
+
suggestions: [
|
|
646
|
+
"Ensure package.json and LICENSE file match",
|
|
647
|
+
"Use the same license identifier in both",
|
|
648
|
+
"Regenerate LICENSE file if needed",
|
|
649
|
+
],
|
|
650
|
+
};
|
|
651
|
+
}
|
|
652
|
+
if (warnings.length > 0) {
|
|
653
|
+
return {
|
|
654
|
+
name: "licenses",
|
|
655
|
+
status: "warn",
|
|
656
|
+
severity: "warning",
|
|
657
|
+
message: "License configuration could be improved",
|
|
658
|
+
details: {
|
|
659
|
+
warnings,
|
|
660
|
+
packageJson: pkgLicense,
|
|
661
|
+
licenseFile: licenseFile ? `${licenseFile} (${detectedLicense})` : undefined,
|
|
662
|
+
},
|
|
663
|
+
suggestions: warnings.map((w) => `Fix: ${w}`),
|
|
664
|
+
};
|
|
665
|
+
}
|
|
666
|
+
return {
|
|
667
|
+
name: "licenses",
|
|
668
|
+
status: "pass",
|
|
669
|
+
severity: "info",
|
|
670
|
+
message: `✅ License: ${pkgLicense || detectedLicense} (allowed)`,
|
|
671
|
+
details: {
|
|
672
|
+
packageJson: pkgLicense,
|
|
673
|
+
licenseFile: licenseFile ? `${licenseFile} (${detectedLicense})` : undefined,
|
|
674
|
+
allowed: config.allowedLicenses,
|
|
675
|
+
},
|
|
676
|
+
};
|
|
677
|
+
}
|
|
678
|
+
catch (error) {
|
|
679
|
+
return {
|
|
680
|
+
name: "licenses",
|
|
681
|
+
status: "fail",
|
|
682
|
+
severity: "error",
|
|
683
|
+
message: "Could not check licenses",
|
|
684
|
+
details: error.message,
|
|
685
|
+
};
|
|
686
|
+
}
|
|
687
|
+
};
|
|
688
|
+
// --- Additional optional checks ---
|
|
689
|
+
const checkPublishDryRun = async (config) => {
|
|
690
|
+
if (!config.publishDryRun) {
|
|
691
|
+
return {
|
|
692
|
+
name: "publish dry-run",
|
|
693
|
+
status: "skip",
|
|
694
|
+
severity: "info",
|
|
695
|
+
message: "Publish dry-run disabled (set security.publishDryRun=true to enable)",
|
|
696
|
+
};
|
|
697
|
+
}
|
|
698
|
+
try {
|
|
699
|
+
const result = await spawnCommand("npm", ["pack", "--dry-run"], {
|
|
700
|
+
timeout: 30000,
|
|
701
|
+
maxBuffer: 10 * 1024 * 1024,
|
|
702
|
+
});
|
|
703
|
+
return {
|
|
704
|
+
name: "publish dry-run",
|
|
705
|
+
status: "pass",
|
|
706
|
+
severity: "info",
|
|
707
|
+
message: "npm pack --dry-run completed",
|
|
708
|
+
details: result.stdout.slice(0, 8000),
|
|
709
|
+
suggestions: ["Review pack output before publishing"],
|
|
710
|
+
};
|
|
711
|
+
}
|
|
712
|
+
catch (error) {
|
|
713
|
+
return {
|
|
714
|
+
name: "publish dry-run",
|
|
715
|
+
status: "warn",
|
|
716
|
+
severity: "warning",
|
|
717
|
+
message: "Publish dry-run failed or returned warnings",
|
|
718
|
+
details: error.stdout || error.message,
|
|
719
|
+
suggestions: ["Run 'npm pack --dry-run' locally to inspect package contents"],
|
|
720
|
+
};
|
|
721
|
+
}
|
|
722
|
+
};
|
|
723
|
+
const checkSbomGeneration = async (config) => {
|
|
724
|
+
if (!config.generateSbom) {
|
|
725
|
+
return {
|
|
726
|
+
name: "sbom",
|
|
727
|
+
status: "skip",
|
|
728
|
+
severity: "info",
|
|
729
|
+
message: "SBOM generation disabled (set security.generateSbom=true to enable)",
|
|
730
|
+
};
|
|
731
|
+
}
|
|
732
|
+
try {
|
|
733
|
+
const reportsDir = path.join(process.cwd(), "reports");
|
|
734
|
+
// FIX #7: Atomic directory creation without race condition
|
|
735
|
+
try {
|
|
736
|
+
fs.mkdirSync(reportsDir, { recursive: true });
|
|
737
|
+
}
|
|
738
|
+
catch (err) {
|
|
739
|
+
if (err.code !== "EEXIST") {
|
|
740
|
+
throw err;
|
|
741
|
+
}
|
|
742
|
+
}
|
|
743
|
+
const result = await spawnCommand("npm", ["ls", "--all", "--json"], {
|
|
744
|
+
timeout: 30000,
|
|
745
|
+
maxBuffer: 10 * 1024 * 1024,
|
|
746
|
+
});
|
|
747
|
+
const sbomPath = path.join(reportsDir, "sbom-npm-ls.json");
|
|
748
|
+
fs.writeFileSync(sbomPath, result.stdout, "utf-8");
|
|
749
|
+
return {
|
|
750
|
+
name: "sbom",
|
|
751
|
+
status: "pass",
|
|
752
|
+
severity: "info",
|
|
753
|
+
message: "Basic SBOM generated via 'npm ls'",
|
|
754
|
+
details: { path: sbomPath },
|
|
755
|
+
suggestions: ["Consider generating CycloneDX SBOM for standards compliance"],
|
|
756
|
+
};
|
|
757
|
+
}
|
|
758
|
+
catch (error) {
|
|
759
|
+
return {
|
|
760
|
+
name: "sbom",
|
|
761
|
+
status: "warn",
|
|
762
|
+
severity: "warning",
|
|
763
|
+
message: "Could not generate SBOM via 'npm ls'",
|
|
764
|
+
details: error.stdout || error.message,
|
|
765
|
+
suggestions: ["Install CycloneDX tooling or run 'npm ls --all --json' manually"],
|
|
766
|
+
};
|
|
767
|
+
}
|
|
768
|
+
};
|
|
769
|
+
/**
|
|
770
|
+
* FIX #5: Use native fetch API with proper error handling
|
|
771
|
+
* FIXED: HTTPS without certificate validation
|
|
772
|
+
*/
|
|
773
|
+
const checkTyposquatting = async (config) => {
|
|
774
|
+
var _a;
|
|
775
|
+
if (!config.checkRegistry) {
|
|
776
|
+
return {
|
|
777
|
+
name: "typosquatting",
|
|
778
|
+
status: "skip",
|
|
779
|
+
severity: "info",
|
|
780
|
+
message: "Registry checks disabled (set security.checkRegistry=true to enable)",
|
|
781
|
+
};
|
|
782
|
+
}
|
|
783
|
+
try {
|
|
784
|
+
const pkgPath = path.join(process.cwd(), "package.json");
|
|
785
|
+
if (!fs.existsSync(pkgPath)) {
|
|
786
|
+
return {
|
|
787
|
+
name: "typosquatting",
|
|
788
|
+
status: "skip",
|
|
789
|
+
severity: "info",
|
|
790
|
+
message: "No package.json found",
|
|
791
|
+
};
|
|
792
|
+
}
|
|
793
|
+
const pkgContent = fs.readFileSync(pkgPath, "utf-8");
|
|
794
|
+
const pkg = safeParseJSON(pkgContent, "package.json");
|
|
795
|
+
const name = pkg.name;
|
|
796
|
+
if (!name) {
|
|
797
|
+
return {
|
|
798
|
+
name: "typosquatting",
|
|
799
|
+
status: "warn",
|
|
800
|
+
severity: "warning",
|
|
801
|
+
message: "Package name missing in package.json",
|
|
802
|
+
};
|
|
803
|
+
}
|
|
804
|
+
const regUrl = `https://registry.npmjs.org/${encodeURIComponent(name)}`;
|
|
805
|
+
// FIX: Use fetch with proper timeout and headers
|
|
806
|
+
const controller = new AbortController();
|
|
807
|
+
const timeout = setTimeout(() => controller.abort(), 5000);
|
|
808
|
+
try {
|
|
809
|
+
const response = await fetch(regUrl, {
|
|
810
|
+
signal: controller.signal,
|
|
811
|
+
headers: {
|
|
812
|
+
"User-Agent": "security-reporter/1.0.0",
|
|
813
|
+
},
|
|
814
|
+
});
|
|
815
|
+
clearTimeout(timeout);
|
|
816
|
+
if (!response.ok) {
|
|
817
|
+
throw new Error(`Registry returned ${response.status}`);
|
|
818
|
+
}
|
|
819
|
+
const info = (await response.json());
|
|
820
|
+
const latest = (_a = info["dist-tags"]) === null || _a === void 0 ? void 0 : _a.latest;
|
|
821
|
+
const maintainers = info.maintainers || [];
|
|
822
|
+
return {
|
|
823
|
+
name: "typosquatting",
|
|
824
|
+
status: "pass",
|
|
825
|
+
severity: "info",
|
|
826
|
+
message: `Registry lookup for ${name} succeeded`,
|
|
827
|
+
details: { latest, maintainersCount: maintainers.length },
|
|
828
|
+
suggestions: ["Verify maintainer list and download counts manually if package is new"],
|
|
829
|
+
};
|
|
830
|
+
}
|
|
831
|
+
finally {
|
|
832
|
+
clearTimeout(timeout);
|
|
833
|
+
}
|
|
834
|
+
}
|
|
835
|
+
catch (error) {
|
|
836
|
+
return {
|
|
837
|
+
name: "typosquatting",
|
|
838
|
+
status: "warn",
|
|
839
|
+
severity: "warning",
|
|
840
|
+
message: "Could not query npm registry",
|
|
841
|
+
details: error.message,
|
|
842
|
+
suggestions: ["Run registry checks locally or enable network access"],
|
|
843
|
+
};
|
|
844
|
+
}
|
|
845
|
+
};
|
|
846
|
+
// ============================================================================
|
|
847
|
+
// UTILITY FUNCTIONS - Security Helpers
|
|
848
|
+
// ============================================================================
|
|
849
|
+
/**
|
|
850
|
+
* FIX #4: Safe JSON parsing with validation
|
|
851
|
+
* FIXED: Unsafe JSON.parse() from untrusted sources
|
|
852
|
+
*/
|
|
853
|
+
const MAX_JSON_SIZE = 1024 * 1024; // 1MB
|
|
854
|
+
const safeParseJSON = (content, source) => {
|
|
855
|
+
// Check size
|
|
856
|
+
if (content.length > MAX_JSON_SIZE) {
|
|
857
|
+
throw new Error(`${source} is too large (${content.length} bytes, max ${MAX_JSON_SIZE})`);
|
|
858
|
+
}
|
|
859
|
+
try {
|
|
860
|
+
const parsed = JSON.parse(content);
|
|
861
|
+
// Validate it's an object
|
|
862
|
+
if (typeof parsed !== "object" || parsed === null) {
|
|
863
|
+
throw new Error(`${source} must contain a JSON object`);
|
|
864
|
+
}
|
|
865
|
+
return parsed;
|
|
866
|
+
}
|
|
867
|
+
catch (error) {
|
|
868
|
+
throw new Error(`Failed to parse ${source}: ${error.message}`);
|
|
869
|
+
}
|
|
870
|
+
};
|
|
871
|
+
const spawnCommand = (command, args, options = {}) => {
|
|
872
|
+
return new Promise((resolve, reject) => {
|
|
873
|
+
const { timeout = 30000, maxBuffer = 10 * 1024 * 1024 } = options;
|
|
874
|
+
const proc = (0, child_process_1.spawn)(command, args, {
|
|
875
|
+
cwd: process.cwd(),
|
|
876
|
+
env: process.env,
|
|
877
|
+
});
|
|
878
|
+
let stdout = "";
|
|
879
|
+
let stderr = "";
|
|
880
|
+
let killed = false;
|
|
881
|
+
// Set timeout
|
|
882
|
+
const timer = setTimeout(() => {
|
|
883
|
+
killed = true;
|
|
884
|
+
proc.kill();
|
|
885
|
+
reject(new Error(`Command timed out after ${timeout}ms`));
|
|
886
|
+
}, timeout);
|
|
887
|
+
proc.stdout.on("data", (data) => {
|
|
888
|
+
stdout += data.toString();
|
|
889
|
+
if (stdout.length > maxBuffer) {
|
|
890
|
+
killed = true;
|
|
891
|
+
proc.kill();
|
|
892
|
+
reject(new Error(`Output exceeded maxBuffer (${maxBuffer} bytes)`));
|
|
893
|
+
}
|
|
894
|
+
});
|
|
895
|
+
proc.stderr.on("data", (data) => {
|
|
896
|
+
stderr += data.toString();
|
|
897
|
+
if (stderr.length > maxBuffer) {
|
|
898
|
+
killed = true;
|
|
899
|
+
proc.kill();
|
|
900
|
+
reject(new Error(`Error output exceeded maxBuffer (${maxBuffer} bytes)`));
|
|
901
|
+
}
|
|
902
|
+
});
|
|
903
|
+
proc.on("close", (code) => {
|
|
904
|
+
clearTimeout(timer);
|
|
905
|
+
if (!killed) {
|
|
906
|
+
if (code === 0 || code === 1) {
|
|
907
|
+
// npm audit returns 1 if vulnerabilities found
|
|
908
|
+
resolve({ stdout, stderr, code: code || 0 });
|
|
909
|
+
}
|
|
910
|
+
else {
|
|
911
|
+
const error = new Error(`Command failed with exit code ${code}`);
|
|
912
|
+
error.stdout = stdout;
|
|
913
|
+
error.stderr = stderr;
|
|
914
|
+
error.code = code;
|
|
915
|
+
reject(error);
|
|
916
|
+
}
|
|
917
|
+
}
|
|
918
|
+
});
|
|
919
|
+
proc.on("error", (err) => {
|
|
920
|
+
clearTimeout(timer);
|
|
921
|
+
if (!killed) {
|
|
922
|
+
reject(err);
|
|
923
|
+
}
|
|
924
|
+
});
|
|
925
|
+
});
|
|
926
|
+
};
|
|
927
|
+
//# sourceMappingURL=security.js.map
|