hackmyagent-core 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/checker/check-skill.d.ts +48 -0
- package/dist/checker/check-skill.d.ts.map +1 -0
- package/dist/checker/check-skill.js +105 -0
- package/dist/checker/check-skill.js.map +1 -0
- package/dist/checker/check-skill.test.d.ts +2 -0
- package/dist/checker/check-skill.test.d.ts.map +1 -0
- package/dist/checker/check-skill.test.js +83 -0
- package/dist/checker/check-skill.test.js.map +1 -0
- package/dist/checker/index.d.ts +12 -0
- package/dist/checker/index.d.ts.map +1 -0
- package/dist/checker/index.js +16 -0
- package/dist/checker/index.js.map +1 -0
- package/dist/checker/permission-analyzer.d.ts +12 -0
- package/dist/checker/permission-analyzer.d.ts.map +1 -0
- package/dist/checker/permission-analyzer.js +84 -0
- package/dist/checker/permission-analyzer.js.map +1 -0
- package/dist/checker/permission-analyzer.test.d.ts +2 -0
- package/dist/checker/permission-analyzer.test.d.ts.map +1 -0
- package/dist/checker/permission-analyzer.test.js +87 -0
- package/dist/checker/permission-analyzer.test.js.map +1 -0
- package/dist/checker/publisher-verifier.d.ts +34 -0
- package/dist/checker/publisher-verifier.d.ts.map +1 -0
- package/dist/checker/publisher-verifier.js +121 -0
- package/dist/checker/publisher-verifier.js.map +1 -0
- package/dist/checker/publisher-verifier.test.d.ts +2 -0
- package/dist/checker/publisher-verifier.test.d.ts.map +1 -0
- package/dist/checker/publisher-verifier.test.js +171 -0
- package/dist/checker/publisher-verifier.test.js.map +1 -0
- package/dist/checker/skill-identifier.d.ts +14 -0
- package/dist/checker/skill-identifier.d.ts.map +1 -0
- package/dist/checker/skill-identifier.js +55 -0
- package/dist/checker/skill-identifier.js.map +1 -0
- package/dist/checker/skill-identifier.test.d.ts +2 -0
- package/dist/checker/skill-identifier.test.d.ts.map +1 -0
- package/dist/checker/skill-identifier.test.js +64 -0
- package/dist/checker/skill-identifier.test.js.map +1 -0
- package/dist/hardening/index.d.ts +7 -0
- package/dist/hardening/index.d.ts.map +1 -0
- package/dist/hardening/index.js +9 -0
- package/dist/hardening/index.js.map +1 -0
- package/dist/hardening/scanner.d.ts +85 -0
- package/dist/hardening/scanner.d.ts.map +1 -0
- package/dist/hardening/scanner.js +3410 -0
- package/dist/hardening/scanner.js.map +1 -0
- package/dist/hardening/scanner.test.d.ts +2 -0
- package/dist/hardening/scanner.test.d.ts.map +1 -0
- package/dist/hardening/scanner.test.js +1103 -0
- package/dist/hardening/scanner.test.js.map +1 -0
- package/dist/hardening/security-check.d.ts +56 -0
- package/dist/hardening/security-check.d.ts.map +1 -0
- package/dist/hardening/security-check.js +6 -0
- package/dist/hardening/security-check.js.map +1 -0
- package/dist/index.d.ts +27 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +35 -0
- package/dist/index.js.map +1 -0
- package/dist/scanner/external-scanner.d.ts +13 -0
- package/dist/scanner/external-scanner.d.ts.map +1 -0
- package/dist/scanner/external-scanner.js +299 -0
- package/dist/scanner/external-scanner.js.map +1 -0
- package/dist/scanner/external-scanner.test.d.ts +2 -0
- package/dist/scanner/external-scanner.test.d.ts.map +1 -0
- package/dist/scanner/external-scanner.test.js +302 -0
- package/dist/scanner/external-scanner.test.js.map +1 -0
- package/dist/scanner/index.d.ts +6 -0
- package/dist/scanner/index.d.ts.map +1 -0
- package/dist/scanner/index.js +9 -0
- package/dist/scanner/index.js.map +1 -0
- package/dist/scanner/types.d.ts +32 -0
- package/dist/scanner/types.d.ts.map +1 -0
- package/dist/scanner/types.js +6 -0
- package/dist/scanner/types.js.map +1 -0
- package/package.json +37 -0
|
@@ -0,0 +1,3410 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* Hardening Scanner
|
|
4
|
+
* Scans for security issues and optionally auto-fixes them
|
|
5
|
+
*/
|
|
6
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
7
|
+
if (k2 === undefined) k2 = k;
|
|
8
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
9
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
10
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
11
|
+
}
|
|
12
|
+
Object.defineProperty(o, k2, desc);
|
|
13
|
+
}) : (function(o, m, k, k2) {
|
|
14
|
+
if (k2 === undefined) k2 = k;
|
|
15
|
+
o[k2] = m[k];
|
|
16
|
+
}));
|
|
17
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
18
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
19
|
+
}) : function(o, v) {
|
|
20
|
+
o["default"] = v;
|
|
21
|
+
});
|
|
22
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
23
|
+
var ownKeys = function(o) {
|
|
24
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
25
|
+
var ar = [];
|
|
26
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
27
|
+
return ar;
|
|
28
|
+
};
|
|
29
|
+
return ownKeys(o);
|
|
30
|
+
};
|
|
31
|
+
return function (mod) {
|
|
32
|
+
if (mod && mod.__esModule) return mod;
|
|
33
|
+
var result = {};
|
|
34
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
35
|
+
__setModuleDefault(result, mod);
|
|
36
|
+
return result;
|
|
37
|
+
};
|
|
38
|
+
})();
|
|
39
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
40
|
+
exports.HardeningScanner = void 0;
|
|
41
|
+
const fs = __importStar(require("fs/promises"));
|
|
42
|
+
const path = __importStar(require("path"));
|
|
43
|
+
// Patterns for detecting exposed credentials
|
|
44
|
+
// Each pattern is carefully tuned to minimize false positives
|
|
45
|
+
const CREDENTIAL_PATTERNS = [
|
|
46
|
+
// Anthropic: sk-ant-api followed by version and 20+ char key
|
|
47
|
+
{ name: 'ANTHROPIC_API_KEY', pattern: /sk-ant-api\d{2}-[a-zA-Z0-9_-]{20,}/ },
|
|
48
|
+
// OpenAI project keys: sk-proj- prefix with 20+ chars
|
|
49
|
+
{ name: 'OPENAI_API_KEY', pattern: /sk-proj-[a-zA-Z0-9]{20,}/ },
|
|
50
|
+
// OpenAI legacy keys: sk- followed by 48+ chars (avoid short matches)
|
|
51
|
+
{ name: 'OPENAI_API_KEY', pattern: /sk-[a-zA-Z0-9]{48,}/ },
|
|
52
|
+
// AWS Access Key: AKIA prefix, exactly 20 chars total
|
|
53
|
+
{ name: 'AWS_ACCESS_KEY', pattern: /AKIA[0-9A-Z]{16}/ },
|
|
54
|
+
// Note: AWS Secret Key pattern removed - generic base64 causes false positives
|
|
55
|
+
// GitHub fine-grained PAT
|
|
56
|
+
{ name: 'GITHUB_TOKEN', pattern: /ghp_[a-zA-Z0-9]{36}/ },
|
|
57
|
+
// GitHub PAT (new format)
|
|
58
|
+
{ name: 'GITHUB_TOKEN', pattern: /github_pat_[a-zA-Z0-9]{22}_[a-zA-Z0-9]{59}/ },
|
|
59
|
+
// Slack tokens: very specific format
|
|
60
|
+
{ name: 'SLACK_TOKEN', pattern: /xox[baprs]-[0-9]{10,13}-[0-9]{10,13}-[a-zA-Z0-9]{24}/ },
|
|
61
|
+
// Google API keys: AIza prefix
|
|
62
|
+
{ name: 'GOOGLE_API_KEY', pattern: /AIza[0-9A-Za-z_-]{35}/ },
|
|
63
|
+
// Stripe live/test keys
|
|
64
|
+
{ name: 'STRIPE_KEY', pattern: /sk_live_[0-9a-zA-Z]{24,}/ },
|
|
65
|
+
// SendGrid
|
|
66
|
+
{ name: 'SENDGRID_KEY', pattern: /SG\.[a-zA-Z0-9_-]{22}\.[a-zA-Z0-9_-]{43}/ },
|
|
67
|
+
];
|
|
68
|
+
// Severity weights for score calculation
|
|
69
|
+
const SEVERITY_WEIGHTS = {
|
|
70
|
+
critical: 25,
|
|
71
|
+
high: 15,
|
|
72
|
+
medium: 8,
|
|
73
|
+
low: 3,
|
|
74
|
+
};
|
|
75
|
+
class HardeningScanner {
|
|
76
|
+
async scan(options) {
|
|
77
|
+
const { targetDir, autoFix = false, dryRun = false, ignore = [] } = options;
|
|
78
|
+
// Normalize ignore list to uppercase for case-insensitive matching
|
|
79
|
+
const ignoredChecks = new Set(ignore.map((id) => id.toUpperCase()));
|
|
80
|
+
// In dry-run mode, we detect what would be fixed but don't modify anything
|
|
81
|
+
const shouldFix = autoFix && !dryRun;
|
|
82
|
+
// Create backup before auto-fix (not in dry-run mode)
|
|
83
|
+
let backupPath;
|
|
84
|
+
if (shouldFix) {
|
|
85
|
+
backupPath = await this.createBackup(targetDir);
|
|
86
|
+
}
|
|
87
|
+
// Track if any fix fails for atomic rollback
|
|
88
|
+
let fixFailed = false;
|
|
89
|
+
// Detect platform
|
|
90
|
+
const platform = await this.detectPlatform(targetDir);
|
|
91
|
+
// Run all checks
|
|
92
|
+
const findings = [];
|
|
93
|
+
// Credential exposure checks
|
|
94
|
+
const credFindings = await this.checkCredentialExposure(targetDir, shouldFix);
|
|
95
|
+
findings.push(...credFindings);
|
|
96
|
+
// CLAUDE.md specific checks
|
|
97
|
+
const claudeFindings = await this.checkClaudeMd(targetDir, shouldFix);
|
|
98
|
+
findings.push(...claudeFindings);
|
|
99
|
+
// MCP configuration checks
|
|
100
|
+
const mcpFindings = await this.checkMcpConfig(targetDir, shouldFix);
|
|
101
|
+
findings.push(...mcpFindings);
|
|
102
|
+
// File permission checks
|
|
103
|
+
const permFindings = await this.checkFilePermissions(targetDir, shouldFix);
|
|
104
|
+
findings.push(...permFindings);
|
|
105
|
+
// Git security checks
|
|
106
|
+
const gitFindings = await this.checkGitSecurity(targetDir, shouldFix);
|
|
107
|
+
findings.push(...gitFindings);
|
|
108
|
+
// Network security checks
|
|
109
|
+
const netFindings = await this.checkNetworkSecurity(targetDir, shouldFix);
|
|
110
|
+
findings.push(...netFindings);
|
|
111
|
+
// Additional MCP checks
|
|
112
|
+
const mcpAdvFindings = await this.checkMcpAdvanced(targetDir, shouldFix);
|
|
113
|
+
findings.push(...mcpAdvFindings);
|
|
114
|
+
// Claude Code advanced checks
|
|
115
|
+
const claudeAdvFindings = await this.checkClaudeAdvanced(targetDir, shouldFix);
|
|
116
|
+
findings.push(...claudeAdvFindings);
|
|
117
|
+
// Cursor configuration checks
|
|
118
|
+
const cursorFindings = await this.checkCursorConfig(targetDir, shouldFix);
|
|
119
|
+
findings.push(...cursorFindings);
|
|
120
|
+
// VSCode configuration checks
|
|
121
|
+
const vscodeFindings = await this.checkVscodeConfig(targetDir, shouldFix);
|
|
122
|
+
findings.push(...vscodeFindings);
|
|
123
|
+
// Additional credential checks
|
|
124
|
+
const credAdvFindings = await this.checkCredentialsAdvanced(targetDir, shouldFix);
|
|
125
|
+
findings.push(...credAdvFindings);
|
|
126
|
+
// Additional permission checks
|
|
127
|
+
const permAdvFindings = await this.checkPermissionsAdvanced(targetDir, shouldFix);
|
|
128
|
+
findings.push(...permAdvFindings);
|
|
129
|
+
// Environment and config checks
|
|
130
|
+
const envFindings = await this.checkEnvironmentSecurity(targetDir, shouldFix);
|
|
131
|
+
findings.push(...envFindings);
|
|
132
|
+
// Logging and audit checks
|
|
133
|
+
const logFindings = await this.checkLoggingSecurity(targetDir, shouldFix);
|
|
134
|
+
findings.push(...logFindings);
|
|
135
|
+
// Dependency checks
|
|
136
|
+
const depFindings = await this.checkDependencySecurity(targetDir, shouldFix);
|
|
137
|
+
findings.push(...depFindings);
|
|
138
|
+
// Session and auth checks
|
|
139
|
+
const authFindings = await this.checkAuthSecurity(targetDir, shouldFix);
|
|
140
|
+
findings.push(...authFindings);
|
|
141
|
+
// Process and runtime checks
|
|
142
|
+
const procFindings = await this.checkProcessSecurity(targetDir, shouldFix);
|
|
143
|
+
findings.push(...procFindings);
|
|
144
|
+
// Additional Claude checks
|
|
145
|
+
const claude3Findings = await this.checkClaudeExtended(targetDir, shouldFix);
|
|
146
|
+
findings.push(...claude3Findings);
|
|
147
|
+
// Additional MCP checks
|
|
148
|
+
const mcp2Findings = await this.checkMcpExtended(targetDir, shouldFix);
|
|
149
|
+
findings.push(...mcp2Findings);
|
|
150
|
+
// Additional network checks
|
|
151
|
+
const net2Findings = await this.checkNetworkExtended(targetDir, shouldFix);
|
|
152
|
+
findings.push(...net2Findings);
|
|
153
|
+
// Input/output security checks
|
|
154
|
+
const ioFindings = await this.checkIOSecurity(targetDir, shouldFix);
|
|
155
|
+
findings.push(...ioFindings);
|
|
156
|
+
// API security checks
|
|
157
|
+
const apiFindings = await this.checkAPISecurity(targetDir, shouldFix);
|
|
158
|
+
findings.push(...apiFindings);
|
|
159
|
+
// Secret management checks
|
|
160
|
+
const secretFindings = await this.checkSecretManagement(targetDir, shouldFix);
|
|
161
|
+
findings.push(...secretFindings);
|
|
162
|
+
// Prompt injection defense checks
|
|
163
|
+
const promptFindings = await this.checkPromptSecurity(targetDir, shouldFix);
|
|
164
|
+
findings.push(...promptFindings);
|
|
165
|
+
// Input validation checks
|
|
166
|
+
const injFindings = await this.checkInputValidation(targetDir, shouldFix);
|
|
167
|
+
findings.push(...injFindings);
|
|
168
|
+
// Rate limiting checks
|
|
169
|
+
const rateFindings = await this.checkRateLimiting(targetDir, shouldFix);
|
|
170
|
+
findings.push(...rateFindings);
|
|
171
|
+
// Session security checks
|
|
172
|
+
const sessionFindings = await this.checkSessionSecurity(targetDir, shouldFix);
|
|
173
|
+
findings.push(...sessionFindings);
|
|
174
|
+
// Encryption checks
|
|
175
|
+
const encryptFindings = await this.checkEncryption(targetDir, shouldFix);
|
|
176
|
+
findings.push(...encryptFindings);
|
|
177
|
+
// Audit trail checks
|
|
178
|
+
const auditFindings = await this.checkAuditTrail(targetDir, shouldFix);
|
|
179
|
+
findings.push(...auditFindings);
|
|
180
|
+
// Sandboxing checks
|
|
181
|
+
const sandboxFindings = await this.checkSandboxing(targetDir, shouldFix);
|
|
182
|
+
findings.push(...sandboxFindings);
|
|
183
|
+
// Tool boundary checks
|
|
184
|
+
const toolFindings = await this.checkToolBoundaries(targetDir, shouldFix);
|
|
185
|
+
findings.push(...toolFindings);
|
|
186
|
+
// Filter out ignored checks
|
|
187
|
+
const filteredFindings = ignoredChecks.size > 0
|
|
188
|
+
? findings.filter((f) => !ignoredChecks.has(f.checkId.toUpperCase()))
|
|
189
|
+
: findings;
|
|
190
|
+
// Calculate score (only on non-ignored findings)
|
|
191
|
+
const { score, maxScore } = this.calculateScore(filteredFindings);
|
|
192
|
+
// In dry-run mode, mark fixable failed findings with wouldFix
|
|
193
|
+
if (dryRun && autoFix) {
|
|
194
|
+
for (const finding of filteredFindings) {
|
|
195
|
+
if (!finding.passed && finding.fixable) {
|
|
196
|
+
finding.wouldFix = true;
|
|
197
|
+
}
|
|
198
|
+
}
|
|
199
|
+
}
|
|
200
|
+
// Determine if all fixes completed successfully (atomic)
|
|
201
|
+
const hasFixedFindings = filteredFindings.some((f) => f.fixed);
|
|
202
|
+
const atomicFix = shouldFix ? !fixFailed && hasFixedFindings : undefined;
|
|
203
|
+
return {
|
|
204
|
+
timestamp: new Date(),
|
|
205
|
+
platform,
|
|
206
|
+
findings: filteredFindings,
|
|
207
|
+
score,
|
|
208
|
+
maxScore,
|
|
209
|
+
backupPath,
|
|
210
|
+
dryRun: dryRun && autoFix ? true : undefined,
|
|
211
|
+
atomicFix,
|
|
212
|
+
ignored: ignoredChecks.size > 0 ? Array.from(ignoredChecks) : undefined,
|
|
213
|
+
};
|
|
214
|
+
}
|
|
215
|
+
async detectPlatform(targetDir) {
|
|
216
|
+
const platforms = [];
|
|
217
|
+
try {
|
|
218
|
+
await fs.access(path.join(targetDir, 'CLAUDE.md'));
|
|
219
|
+
platforms.push('claude-code');
|
|
220
|
+
}
|
|
221
|
+
catch { }
|
|
222
|
+
try {
|
|
223
|
+
await fs.access(path.join(targetDir, '.cursor'));
|
|
224
|
+
platforms.push('cursor');
|
|
225
|
+
}
|
|
226
|
+
catch { }
|
|
227
|
+
try {
|
|
228
|
+
await fs.access(path.join(targetDir, 'mcp.json'));
|
|
229
|
+
platforms.push('mcp');
|
|
230
|
+
}
|
|
231
|
+
catch { }
|
|
232
|
+
try {
|
|
233
|
+
await fs.access(path.join(targetDir, '.claude'));
|
|
234
|
+
platforms.push('claude-code');
|
|
235
|
+
}
|
|
236
|
+
catch { }
|
|
237
|
+
if (platforms.length === 0) {
|
|
238
|
+
return 'generic';
|
|
239
|
+
}
|
|
240
|
+
return platforms.join('+');
|
|
241
|
+
}
|
|
242
|
+
async checkCredentialExposure(targetDir, autoFix) {
|
|
243
|
+
const findings = [];
|
|
244
|
+
const exposedKeys = [];
|
|
245
|
+
const fixedFiles = [];
|
|
246
|
+
const envVarsToAdd = new Set();
|
|
247
|
+
// Credential patterns with their env var names (stricter to avoid false positives)
|
|
248
|
+
const credentialPatterns = [
|
|
249
|
+
{ name: 'ANTHROPIC_API_KEY', pattern: /sk-ant-api\d{2}-[a-zA-Z0-9_-]{20,}/g, envVar: 'ANTHROPIC_API_KEY' },
|
|
250
|
+
{ name: 'OPENAI_API_KEY', pattern: /sk-proj-[a-zA-Z0-9]{20,}/g, envVar: 'OPENAI_API_KEY' },
|
|
251
|
+
{ name: 'OPENAI_API_KEY', pattern: /sk-[a-zA-Z0-9]{48,}/g, envVar: 'OPENAI_API_KEY' },
|
|
252
|
+
{ name: 'AWS_ACCESS_KEY', pattern: /AKIA[0-9A-Z]{16}/g, envVar: 'AWS_ACCESS_KEY_ID' },
|
|
253
|
+
{ name: 'GITHUB_TOKEN', pattern: /ghp_[a-zA-Z0-9]{36}/g, envVar: 'GITHUB_TOKEN' },
|
|
254
|
+
{ name: 'GITHUB_TOKEN', pattern: /github_pat_[a-zA-Z0-9]{22}_[a-zA-Z0-9]{59}/g, envVar: 'GITHUB_TOKEN' },
|
|
255
|
+
{ name: 'GOOGLE_API_KEY', pattern: /AIza[0-9A-Za-z_-]{35}/g, envVar: 'GOOGLE_API_KEY' },
|
|
256
|
+
{ name: 'STRIPE_KEY', pattern: /sk_live_[0-9a-zA-Z]{24,}/g, envVar: 'STRIPE_SECRET_KEY' },
|
|
257
|
+
];
|
|
258
|
+
// Files to check for credentials
|
|
259
|
+
const filesToCheck = [
|
|
260
|
+
'config.json',
|
|
261
|
+
'config.yaml',
|
|
262
|
+
'config.yml',
|
|
263
|
+
'mcp.json',
|
|
264
|
+
'settings.json',
|
|
265
|
+
'.env',
|
|
266
|
+
'.env.local',
|
|
267
|
+
];
|
|
268
|
+
for (const filename of filesToCheck) {
|
|
269
|
+
const filePath = path.join(targetDir, filename);
|
|
270
|
+
try {
|
|
271
|
+
let content = await fs.readFile(filePath, 'utf-8');
|
|
272
|
+
let fileModified = false;
|
|
273
|
+
for (const { name, pattern, envVar } of credentialPatterns) {
|
|
274
|
+
// Reset pattern lastIndex for global regex
|
|
275
|
+
pattern.lastIndex = 0;
|
|
276
|
+
if (pattern.test(content)) {
|
|
277
|
+
// Check if it's already an env var reference
|
|
278
|
+
pattern.lastIndex = 0;
|
|
279
|
+
const match = content.match(pattern);
|
|
280
|
+
if (match && !content.includes('${' + envVar + '}')) {
|
|
281
|
+
exposedKeys.push(name);
|
|
282
|
+
if (autoFix) {
|
|
283
|
+
// Replace the credential with env var reference
|
|
284
|
+
pattern.lastIndex = 0;
|
|
285
|
+
content = content.replace(pattern, '${' + envVar + '}');
|
|
286
|
+
fileModified = true;
|
|
287
|
+
envVarsToAdd.add(envVar);
|
|
288
|
+
}
|
|
289
|
+
}
|
|
290
|
+
}
|
|
291
|
+
}
|
|
292
|
+
if (fileModified) {
|
|
293
|
+
await fs.writeFile(filePath, content);
|
|
294
|
+
fixedFiles.push(filename);
|
|
295
|
+
}
|
|
296
|
+
}
|
|
297
|
+
catch {
|
|
298
|
+
// File doesn't exist, skip
|
|
299
|
+
}
|
|
300
|
+
}
|
|
301
|
+
// Create .env.example if we fixed any credentials
|
|
302
|
+
if (autoFix && envVarsToAdd.size > 0) {
|
|
303
|
+
const envExamplePath = path.join(targetDir, '.env.example');
|
|
304
|
+
let envExampleContent = '# Environment variables for this project\n# Copy to .env and fill in your values\n\n';
|
|
305
|
+
for (const envVar of envVarsToAdd) {
|
|
306
|
+
envExampleContent += `${envVar}=your_${envVar.toLowerCase()}_here\n`;
|
|
307
|
+
}
|
|
308
|
+
await fs.writeFile(envExamplePath, envExampleContent);
|
|
309
|
+
}
|
|
310
|
+
const passed = exposedKeys.length === 0;
|
|
311
|
+
const fixed = fixedFiles.length > 0;
|
|
312
|
+
findings.push({
|
|
313
|
+
checkId: 'CRED-001',
|
|
314
|
+
name: 'Exposed API Keys',
|
|
315
|
+
description: 'API keys or secrets found in plaintext configuration files',
|
|
316
|
+
category: 'credentials',
|
|
317
|
+
severity: 'critical',
|
|
318
|
+
passed: passed || fixed,
|
|
319
|
+
message: fixed
|
|
320
|
+
? `Replaced credentials with env var references in: ${fixedFiles.join(', ')}`
|
|
321
|
+
: passed
|
|
322
|
+
? 'No exposed API keys detected'
|
|
323
|
+
: `Found exposed credentials: ${[...new Set(exposedKeys)].join(', ')}`,
|
|
324
|
+
fixable: true,
|
|
325
|
+
fixed,
|
|
326
|
+
fixMessage: fixed ? `Created .env.example with: ${[...envVarsToAdd].join(', ')}` : undefined,
|
|
327
|
+
details: passed && !fixed ? undefined : { keys: [...new Set(exposedKeys)], fixedFiles },
|
|
328
|
+
});
|
|
329
|
+
return findings;
|
|
330
|
+
}
|
|
331
|
+
async checkClaudeMd(targetDir, autoFix) {
|
|
332
|
+
const findings = [];
|
|
333
|
+
const claudeMdPath = path.join(targetDir, 'CLAUDE.md');
|
|
334
|
+
try {
|
|
335
|
+
const content = await fs.readFile(claudeMdPath, 'utf-8');
|
|
336
|
+
let hasSecrets = false;
|
|
337
|
+
// Check for credentials in CLAUDE.md
|
|
338
|
+
for (const { pattern } of CREDENTIAL_PATTERNS) {
|
|
339
|
+
if (pattern.test(content)) {
|
|
340
|
+
hasSecrets = true;
|
|
341
|
+
break;
|
|
342
|
+
}
|
|
343
|
+
}
|
|
344
|
+
findings.push({
|
|
345
|
+
checkId: 'CLAUDE-001',
|
|
346
|
+
name: 'CLAUDE.md Sensitive Content',
|
|
347
|
+
description: 'CLAUDE.md file contains sensitive information like API keys',
|
|
348
|
+
category: 'claude-code',
|
|
349
|
+
severity: 'critical',
|
|
350
|
+
passed: !hasSecrets,
|
|
351
|
+
message: hasSecrets
|
|
352
|
+
? 'CLAUDE.md contains exposed credentials'
|
|
353
|
+
: 'CLAUDE.md does not contain sensitive credentials',
|
|
354
|
+
fixable: false,
|
|
355
|
+
});
|
|
356
|
+
}
|
|
357
|
+
catch {
|
|
358
|
+
// CLAUDE.md doesn't exist, that's fine
|
|
359
|
+
findings.push({
|
|
360
|
+
checkId: 'CLAUDE-001',
|
|
361
|
+
name: 'CLAUDE.md Sensitive Content',
|
|
362
|
+
description: 'CLAUDE.md file contains sensitive information like API keys',
|
|
363
|
+
category: 'claude-code',
|
|
364
|
+
severity: 'critical',
|
|
365
|
+
passed: true,
|
|
366
|
+
message: 'No CLAUDE.md file found (OK)',
|
|
367
|
+
fixable: false,
|
|
368
|
+
});
|
|
369
|
+
}
|
|
370
|
+
return findings;
|
|
371
|
+
}
|
|
372
|
+
async checkMcpConfig(targetDir, autoFix) {
|
|
373
|
+
const findings = [];
|
|
374
|
+
const mcpConfigPath = path.join(targetDir, 'mcp.json');
|
|
375
|
+
try {
|
|
376
|
+
const content = await fs.readFile(mcpConfigPath, 'utf-8');
|
|
377
|
+
const config = JSON.parse(content);
|
|
378
|
+
// Check for dangerous filesystem access
|
|
379
|
+
let hasRootAccess = false;
|
|
380
|
+
let hasUnrestrictedShell = false;
|
|
381
|
+
let mcp001Fixed = false;
|
|
382
|
+
if (config.servers) {
|
|
383
|
+
for (const [name, server] of Object.entries(config.servers)) {
|
|
384
|
+
// Check for root filesystem access
|
|
385
|
+
if (server.args) {
|
|
386
|
+
const rootIndex = server.args.findIndex((arg) => arg === '/');
|
|
387
|
+
const homeIndex = server.args.findIndex((arg) => arg === '~');
|
|
388
|
+
if (rootIndex !== -1 || homeIndex !== -1) {
|
|
389
|
+
hasRootAccess = true;
|
|
390
|
+
if (autoFix) {
|
|
391
|
+
// Replace "/" with "./data" and "~" with "./"
|
|
392
|
+
if (rootIndex !== -1) {
|
|
393
|
+
server.args[rootIndex] = './data';
|
|
394
|
+
}
|
|
395
|
+
if (homeIndex !== -1) {
|
|
396
|
+
server.args[homeIndex] = './';
|
|
397
|
+
}
|
|
398
|
+
mcp001Fixed = true;
|
|
399
|
+
}
|
|
400
|
+
}
|
|
401
|
+
}
|
|
402
|
+
// Check for unrestricted shell access
|
|
403
|
+
if (name.includes('shell') ||
|
|
404
|
+
server.command?.includes('shell')) {
|
|
405
|
+
// Shell server without allowedCommands is dangerous
|
|
406
|
+
if (!server.args?.some((arg) => arg.includes('allowed'))) {
|
|
407
|
+
hasUnrestrictedShell = true;
|
|
408
|
+
}
|
|
409
|
+
}
|
|
410
|
+
}
|
|
411
|
+
}
|
|
412
|
+
// Save fixed config
|
|
413
|
+
if (mcp001Fixed) {
|
|
414
|
+
await fs.writeFile(mcpConfigPath, JSON.stringify(config, null, 2));
|
|
415
|
+
}
|
|
416
|
+
findings.push({
|
|
417
|
+
checkId: 'MCP-001',
|
|
418
|
+
name: 'MCP Root Filesystem Access',
|
|
419
|
+
description: 'MCP server configured with root or home directory access',
|
|
420
|
+
category: 'mcp',
|
|
421
|
+
severity: 'high',
|
|
422
|
+
passed: !hasRootAccess || mcp001Fixed,
|
|
423
|
+
message: mcp001Fixed
|
|
424
|
+
? 'Changed dangerous filesystem paths to scoped directories'
|
|
425
|
+
: hasRootAccess
|
|
426
|
+
? 'MCP server has dangerous filesystem access (/ or ~)'
|
|
427
|
+
: 'MCP filesystem access is scoped appropriately',
|
|
428
|
+
fixable: true,
|
|
429
|
+
fixed: mcp001Fixed,
|
|
430
|
+
fixMessage: mcp001Fixed ? 'Replaced "/" with "./data" and "~" with "./"' : undefined,
|
|
431
|
+
});
|
|
432
|
+
findings.push({
|
|
433
|
+
checkId: 'MCP-002',
|
|
434
|
+
name: 'MCP Unrestricted Shell',
|
|
435
|
+
description: 'MCP shell server without command restrictions',
|
|
436
|
+
category: 'mcp',
|
|
437
|
+
severity: 'critical',
|
|
438
|
+
passed: !hasUnrestrictedShell,
|
|
439
|
+
message: hasUnrestrictedShell
|
|
440
|
+
? 'MCP shell server has no command restrictions'
|
|
441
|
+
: 'MCP shell server is properly restricted or not present',
|
|
442
|
+
fixable: false,
|
|
443
|
+
});
|
|
444
|
+
}
|
|
445
|
+
catch {
|
|
446
|
+
// mcp.json doesn't exist or is invalid
|
|
447
|
+
findings.push({
|
|
448
|
+
checkId: 'MCP-001',
|
|
449
|
+
name: 'MCP Root Filesystem Access',
|
|
450
|
+
description: 'MCP server configured with root or home directory access',
|
|
451
|
+
category: 'mcp',
|
|
452
|
+
severity: 'high',
|
|
453
|
+
passed: true,
|
|
454
|
+
message: 'No mcp.json found (OK)',
|
|
455
|
+
fixable: true,
|
|
456
|
+
});
|
|
457
|
+
findings.push({
|
|
458
|
+
checkId: 'MCP-002',
|
|
459
|
+
name: 'MCP Unrestricted Shell',
|
|
460
|
+
description: 'MCP shell server without command restrictions',
|
|
461
|
+
category: 'mcp',
|
|
462
|
+
severity: 'critical',
|
|
463
|
+
passed: true,
|
|
464
|
+
message: 'No mcp.json found (OK)',
|
|
465
|
+
fixable: false,
|
|
466
|
+
});
|
|
467
|
+
}
|
|
468
|
+
return findings;
|
|
469
|
+
}
|
|
470
|
+
async checkFilePermissions(targetDir, autoFix) {
|
|
471
|
+
const findings = [];
|
|
472
|
+
// Files that should have restricted permissions
|
|
473
|
+
const sensitiveFiles = [
|
|
474
|
+
'secrets.json',
|
|
475
|
+
'.env',
|
|
476
|
+
'.env.local',
|
|
477
|
+
'credentials.json',
|
|
478
|
+
'auth.json',
|
|
479
|
+
];
|
|
480
|
+
const permissionIssues = [];
|
|
481
|
+
for (const filename of sensitiveFiles) {
|
|
482
|
+
const filePath = path.join(targetDir, filename);
|
|
483
|
+
try {
|
|
484
|
+
const stats = await fs.stat(filePath);
|
|
485
|
+
const mode = stats.mode & 0o777;
|
|
486
|
+
// Check if world-readable (others have read permission)
|
|
487
|
+
if (mode & 0o004) {
|
|
488
|
+
permissionIssues.push(filename);
|
|
489
|
+
if (autoFix) {
|
|
490
|
+
await fs.chmod(filePath, 0o600);
|
|
491
|
+
}
|
|
492
|
+
}
|
|
493
|
+
}
|
|
494
|
+
catch {
|
|
495
|
+
// File doesn't exist, skip
|
|
496
|
+
}
|
|
497
|
+
}
|
|
498
|
+
const passed = permissionIssues.length === 0;
|
|
499
|
+
findings.push({
|
|
500
|
+
checkId: 'PERM-001',
|
|
501
|
+
name: 'Sensitive File Permissions',
|
|
502
|
+
description: 'Sensitive files have overly permissive permissions',
|
|
503
|
+
category: 'permissions',
|
|
504
|
+
severity: 'high',
|
|
505
|
+
passed,
|
|
506
|
+
message: passed
|
|
507
|
+
? 'All sensitive files have appropriate permissions'
|
|
508
|
+
: `Files with overly permissive permissions: ${permissionIssues.join(', ')}`,
|
|
509
|
+
fixable: true,
|
|
510
|
+
fixed: autoFix && !passed,
|
|
511
|
+
fixMessage: autoFix && !passed ? 'Changed permissions to 600' : undefined,
|
|
512
|
+
details: passed ? undefined : { files: permissionIssues },
|
|
513
|
+
});
|
|
514
|
+
return findings;
|
|
515
|
+
}
|
|
516
|
+
async checkGitSecurity(targetDir, autoFix) {
|
|
517
|
+
const findings = [];
|
|
518
|
+
// GIT-001: Check for missing .gitignore
|
|
519
|
+
const gitignorePath = path.join(targetDir, '.gitignore');
|
|
520
|
+
let gitignoreExists = false;
|
|
521
|
+
let gitignoreContent = '';
|
|
522
|
+
try {
|
|
523
|
+
gitignoreContent = await fs.readFile(gitignorePath, 'utf-8');
|
|
524
|
+
gitignoreExists = true;
|
|
525
|
+
}
|
|
526
|
+
catch { }
|
|
527
|
+
// Default .gitignore content
|
|
528
|
+
const defaultGitignore = `# Secrets and credentials
|
|
529
|
+
.env
|
|
530
|
+
.env.*
|
|
531
|
+
secrets.json
|
|
532
|
+
credentials.json
|
|
533
|
+
*.pem
|
|
534
|
+
*.key
|
|
535
|
+
|
|
536
|
+
# IDE
|
|
537
|
+
.idea/
|
|
538
|
+
.vscode/
|
|
539
|
+
|
|
540
|
+
# Dependencies
|
|
541
|
+
node_modules/
|
|
542
|
+
|
|
543
|
+
# Build
|
|
544
|
+
dist/
|
|
545
|
+
`;
|
|
546
|
+
let git001Fixed = false;
|
|
547
|
+
if (!gitignoreExists && autoFix) {
|
|
548
|
+
await fs.writeFile(gitignorePath, defaultGitignore);
|
|
549
|
+
gitignoreContent = defaultGitignore;
|
|
550
|
+
gitignoreExists = true;
|
|
551
|
+
git001Fixed = true;
|
|
552
|
+
}
|
|
553
|
+
findings.push({
|
|
554
|
+
checkId: 'GIT-001',
|
|
555
|
+
name: 'Missing .gitignore',
|
|
556
|
+
description: 'No .gitignore file found to prevent accidental commits of sensitive files',
|
|
557
|
+
category: 'git',
|
|
558
|
+
severity: 'medium',
|
|
559
|
+
passed: gitignoreExists,
|
|
560
|
+
message: git001Fixed
|
|
561
|
+
? '.gitignore file created with recommended patterns'
|
|
562
|
+
: gitignoreExists
|
|
563
|
+
? '.gitignore file present'
|
|
564
|
+
: 'No .gitignore file found - sensitive files may be accidentally committed',
|
|
565
|
+
fixable: true,
|
|
566
|
+
fixed: git001Fixed,
|
|
567
|
+
fixMessage: git001Fixed ? 'Created .gitignore with secure defaults' : undefined,
|
|
568
|
+
});
|
|
569
|
+
// GIT-002: Check for missing sensitive patterns in .gitignore
|
|
570
|
+
const sensitivePatterns = ['.env', 'secrets.json', '*.pem', '*.key'];
|
|
571
|
+
const missingPatterns = [];
|
|
572
|
+
for (const pattern of sensitivePatterns) {
|
|
573
|
+
if (!gitignoreContent.includes(pattern) && !gitignoreContent.includes(pattern.replace('*', ''))) {
|
|
574
|
+
missingPatterns.push(pattern);
|
|
575
|
+
}
|
|
576
|
+
}
|
|
577
|
+
let git002Fixed = false;
|
|
578
|
+
if (missingPatterns.length > 0 && autoFix) {
|
|
579
|
+
const patternsToAdd = '\n# Security patterns (auto-added)\n' + missingPatterns.join('\n') + '\n';
|
|
580
|
+
gitignoreContent += patternsToAdd;
|
|
581
|
+
await fs.writeFile(gitignorePath, gitignoreContent);
|
|
582
|
+
git002Fixed = true;
|
|
583
|
+
}
|
|
584
|
+
findings.push({
|
|
585
|
+
checkId: 'GIT-002',
|
|
586
|
+
name: 'Incomplete .gitignore',
|
|
587
|
+
description: '.gitignore missing patterns for sensitive files',
|
|
588
|
+
category: 'git',
|
|
589
|
+
severity: 'high',
|
|
590
|
+
passed: missingPatterns.length === 0 || git002Fixed,
|
|
591
|
+
message: git002Fixed
|
|
592
|
+
? `Added missing patterns to .gitignore: ${missingPatterns.join(', ')}`
|
|
593
|
+
: missingPatterns.length === 0
|
|
594
|
+
? '.gitignore has all recommended sensitive file patterns'
|
|
595
|
+
: `Missing patterns in .gitignore: ${missingPatterns.join(', ')}`,
|
|
596
|
+
fixable: true,
|
|
597
|
+
fixed: git002Fixed,
|
|
598
|
+
fixMessage: git002Fixed ? `Added: ${missingPatterns.join(', ')}` : undefined,
|
|
599
|
+
details: missingPatterns.length > 0 && !git002Fixed ? { missing: missingPatterns } : undefined,
|
|
600
|
+
});
|
|
601
|
+
// GIT-003: Check if .env exists but not in .gitignore
|
|
602
|
+
let envExists = false;
|
|
603
|
+
try {
|
|
604
|
+
await fs.access(path.join(targetDir, '.env'));
|
|
605
|
+
envExists = true;
|
|
606
|
+
}
|
|
607
|
+
catch { }
|
|
608
|
+
// Re-read gitignore in case we modified it
|
|
609
|
+
try {
|
|
610
|
+
gitignoreContent = await fs.readFile(gitignorePath, 'utf-8');
|
|
611
|
+
}
|
|
612
|
+
catch { }
|
|
613
|
+
const envIgnored = gitignoreContent.includes('.env');
|
|
614
|
+
const envAtRisk = envExists && !envIgnored;
|
|
615
|
+
let git003Fixed = false;
|
|
616
|
+
if (envAtRisk && autoFix) {
|
|
617
|
+
gitignoreContent += '\n.env\n';
|
|
618
|
+
await fs.writeFile(gitignorePath, gitignoreContent);
|
|
619
|
+
git003Fixed = true;
|
|
620
|
+
}
|
|
621
|
+
findings.push({
|
|
622
|
+
checkId: 'GIT-003',
|
|
623
|
+
name: '.env File at Risk',
|
|
624
|
+
description: '.env file exists but may not be ignored by git',
|
|
625
|
+
category: 'git',
|
|
626
|
+
severity: 'critical',
|
|
627
|
+
passed: !envAtRisk || git003Fixed,
|
|
628
|
+
message: git003Fixed
|
|
629
|
+
? 'Added .env to .gitignore'
|
|
630
|
+
: envAtRisk
|
|
631
|
+
? '.env file exists but is not in .gitignore - secrets may be committed!'
|
|
632
|
+
: envExists
|
|
633
|
+
? '.env file is properly ignored'
|
|
634
|
+
: 'No .env file present',
|
|
635
|
+
fixable: true,
|
|
636
|
+
fixed: git003Fixed,
|
|
637
|
+
});
|
|
638
|
+
return findings;
|
|
639
|
+
}
|
|
640
|
+
async checkNetworkSecurity(targetDir, autoFix) {
|
|
641
|
+
const findings = [];
|
|
642
|
+
const mcpConfigPath = path.join(targetDir, 'mcp.json');
|
|
643
|
+
let mcpConfig = null;
|
|
644
|
+
let mcpContent = '';
|
|
645
|
+
try {
|
|
646
|
+
mcpContent = await fs.readFile(mcpConfigPath, 'utf-8');
|
|
647
|
+
mcpConfig = JSON.parse(mcpContent);
|
|
648
|
+
}
|
|
649
|
+
catch { }
|
|
650
|
+
// NET-001: Check for servers bound to 0.0.0.0
|
|
651
|
+
let boundToAllInterfaces = false;
|
|
652
|
+
if (mcpConfig?.servers) {
|
|
653
|
+
for (const [, server] of Object.entries(mcpConfig.servers)) {
|
|
654
|
+
if (server.args?.some((arg) => arg.includes('0.0.0.0'))) {
|
|
655
|
+
boundToAllInterfaces = true;
|
|
656
|
+
break;
|
|
657
|
+
}
|
|
658
|
+
}
|
|
659
|
+
}
|
|
660
|
+
let net001Fixed = false;
|
|
661
|
+
if (boundToAllInterfaces && autoFix && mcpContent) {
|
|
662
|
+
// Replace 0.0.0.0 with 127.0.0.1 in the file
|
|
663
|
+
const fixedContent = mcpContent.replace(/0\.0\.0\.0/g, '127.0.0.1');
|
|
664
|
+
await fs.writeFile(mcpConfigPath, fixedContent);
|
|
665
|
+
net001Fixed = true;
|
|
666
|
+
}
|
|
667
|
+
findings.push({
|
|
668
|
+
checkId: 'NET-001',
|
|
669
|
+
name: 'Server Bound to All Interfaces',
|
|
670
|
+
description: 'MCP server bound to 0.0.0.0 exposes it to all network interfaces',
|
|
671
|
+
category: 'network',
|
|
672
|
+
severity: 'critical',
|
|
673
|
+
passed: !boundToAllInterfaces || net001Fixed,
|
|
674
|
+
message: net001Fixed
|
|
675
|
+
? 'Changed 0.0.0.0 to 127.0.0.1 in mcp.json'
|
|
676
|
+
: boundToAllInterfaces
|
|
677
|
+
? 'MCP server bound to 0.0.0.0 - accessible from any network interface'
|
|
678
|
+
: 'No servers bound to 0.0.0.0',
|
|
679
|
+
fixable: true,
|
|
680
|
+
fixed: net001Fixed,
|
|
681
|
+
fixMessage: net001Fixed ? 'Replaced 0.0.0.0 with 127.0.0.1' : undefined,
|
|
682
|
+
});
|
|
683
|
+
// NET-002: Check for remote MCP servers without TLS
|
|
684
|
+
let hasInsecureRemote = false;
|
|
685
|
+
if (mcpConfig?.servers) {
|
|
686
|
+
for (const [, server] of Object.entries(mcpConfig.servers)) {
|
|
687
|
+
if (server.url && server.url.startsWith('http://')) {
|
|
688
|
+
hasInsecureRemote = true;
|
|
689
|
+
break;
|
|
690
|
+
}
|
|
691
|
+
}
|
|
692
|
+
}
|
|
693
|
+
findings.push({
|
|
694
|
+
checkId: 'NET-002',
|
|
695
|
+
name: 'Remote MCP Without TLS',
|
|
696
|
+
description: 'Remote MCP server configured without HTTPS',
|
|
697
|
+
category: 'network',
|
|
698
|
+
severity: 'high',
|
|
699
|
+
passed: !hasInsecureRemote,
|
|
700
|
+
message: hasInsecureRemote
|
|
701
|
+
? 'Remote MCP server using HTTP instead of HTTPS - traffic is unencrypted'
|
|
702
|
+
: 'All remote MCP servers use HTTPS or no remote servers configured',
|
|
703
|
+
fixable: false,
|
|
704
|
+
});
|
|
705
|
+
return findings;
|
|
706
|
+
}
|
|
707
|
+
async checkMcpAdvanced(targetDir, autoFix) {
|
|
708
|
+
const findings = [];
|
|
709
|
+
const mcpConfigPath = path.join(targetDir, 'mcp.json');
|
|
710
|
+
let mcpConfig = null;
|
|
711
|
+
try {
|
|
712
|
+
const content = await fs.readFile(mcpConfigPath, 'utf-8');
|
|
713
|
+
mcpConfig = JSON.parse(content);
|
|
714
|
+
}
|
|
715
|
+
catch { }
|
|
716
|
+
// Credential patterns with their env var names for auto-fix (stricter patterns to reduce false positives)
|
|
717
|
+
const credPatterns = [
|
|
718
|
+
{ name: 'ANTHROPIC_API_KEY', pattern: /sk-ant-api\d{2}-[a-zA-Z0-9_-]{20,}/, envVar: 'ANTHROPIC_API_KEY' },
|
|
719
|
+
{ name: 'OPENAI_API_KEY', pattern: /sk-proj-[a-zA-Z0-9]{20,}/, envVar: 'OPENAI_API_KEY' },
|
|
720
|
+
{ name: 'OPENAI_API_KEY', pattern: /sk-[a-zA-Z0-9]{48,}/, envVar: 'OPENAI_API_KEY' },
|
|
721
|
+
{ name: 'GITHUB_TOKEN', pattern: /ghp_[a-zA-Z0-9]{36}/, envVar: 'GITHUB_TOKEN' },
|
|
722
|
+
{ name: 'GITHUB_TOKEN', pattern: /github_pat_[a-zA-Z0-9]{22}_[a-zA-Z0-9]{59}/, envVar: 'GITHUB_TOKEN' },
|
|
723
|
+
{ name: 'GOOGLE_API_KEY', pattern: /AIza[0-9A-Za-z_-]{35}/, envVar: 'GOOGLE_API_KEY' },
|
|
724
|
+
{ name: 'STRIPE_KEY', pattern: /sk_live_[0-9a-zA-Z]{24,}/, envVar: 'STRIPE_SECRET_KEY' },
|
|
725
|
+
{ name: 'SLACK_TOKEN', pattern: /xox[baprs]-[0-9]{10,13}-[0-9]{10,13}-[a-zA-Z0-9]{24}/, envVar: 'SLACK_TOKEN' },
|
|
726
|
+
{ name: 'SENDGRID_KEY', pattern: /SG\.[a-zA-Z0-9_-]{22}\.[a-zA-Z0-9_-]{43}/, envVar: 'SENDGRID_API_KEY' },
|
|
727
|
+
];
|
|
728
|
+
// MCP-003: Check for secrets in env vars
|
|
729
|
+
let hasHardcodedSecrets = false;
|
|
730
|
+
let mcp003Fixed = false;
|
|
731
|
+
if (mcpConfig?.servers) {
|
|
732
|
+
for (const [, server] of Object.entries(mcpConfig.servers)) {
|
|
733
|
+
if (server.env) {
|
|
734
|
+
for (const [key, value] of Object.entries(server.env)) {
|
|
735
|
+
// Check if value is a hardcoded secret (not a reference)
|
|
736
|
+
if (typeof value === 'string' && !value.includes('${')) {
|
|
737
|
+
for (const { pattern, envVar } of credPatterns) {
|
|
738
|
+
if (pattern.test(value)) {
|
|
739
|
+
hasHardcodedSecrets = true;
|
|
740
|
+
if (autoFix) {
|
|
741
|
+
// Replace with env var reference
|
|
742
|
+
server.env[key] = '${' + envVar + '}';
|
|
743
|
+
mcp003Fixed = true;
|
|
744
|
+
}
|
|
745
|
+
break;
|
|
746
|
+
}
|
|
747
|
+
}
|
|
748
|
+
}
|
|
749
|
+
}
|
|
750
|
+
}
|
|
751
|
+
}
|
|
752
|
+
// Save fixed config
|
|
753
|
+
if (mcp003Fixed) {
|
|
754
|
+
await fs.writeFile(mcpConfigPath, JSON.stringify(mcpConfig, null, 2));
|
|
755
|
+
}
|
|
756
|
+
}
|
|
757
|
+
findings.push({
|
|
758
|
+
checkId: 'MCP-003',
|
|
759
|
+
name: 'MCP Hardcoded Secrets',
|
|
760
|
+
description: 'MCP server configuration contains hardcoded secrets in environment variables',
|
|
761
|
+
category: 'mcp',
|
|
762
|
+
severity: 'critical',
|
|
763
|
+
passed: !hasHardcodedSecrets || mcp003Fixed,
|
|
764
|
+
message: mcp003Fixed
|
|
765
|
+
? 'Replaced hardcoded secrets with environment variable references'
|
|
766
|
+
: hasHardcodedSecrets
|
|
767
|
+
? 'MCP server has hardcoded secrets in env vars - use environment variable references instead'
|
|
768
|
+
: 'No hardcoded secrets in MCP env vars',
|
|
769
|
+
fixable: true,
|
|
770
|
+
fixed: mcp003Fixed,
|
|
771
|
+
fixMessage: mcp003Fixed ? 'Replaced with ${ENV_VAR} references' : undefined,
|
|
772
|
+
});
|
|
773
|
+
// MCP-004: Check for default credentials
|
|
774
|
+
const defaultPasswords = ['postgres', 'password', 'admin', 'root', '123456', 'default'];
|
|
775
|
+
let hasDefaultCreds = false;
|
|
776
|
+
if (mcpConfig?.servers) {
|
|
777
|
+
for (const [, server] of Object.entries(mcpConfig.servers)) {
|
|
778
|
+
if (server.args) {
|
|
779
|
+
const argsStr = server.args.join(' ').toLowerCase();
|
|
780
|
+
for (const pwd of defaultPasswords) {
|
|
781
|
+
if (argsStr.includes(`password`) && argsStr.includes(pwd)) {
|
|
782
|
+
hasDefaultCreds = true;
|
|
783
|
+
break;
|
|
784
|
+
}
|
|
785
|
+
}
|
|
786
|
+
}
|
|
787
|
+
}
|
|
788
|
+
}
|
|
789
|
+
findings.push({
|
|
790
|
+
checkId: 'MCP-004',
|
|
791
|
+
name: 'MCP Default Credentials',
|
|
792
|
+
description: 'MCP server using default or weak credentials',
|
|
793
|
+
category: 'mcp',
|
|
794
|
+
severity: 'critical',
|
|
795
|
+
passed: !hasDefaultCreds,
|
|
796
|
+
message: hasDefaultCreds
|
|
797
|
+
? 'MCP server using default credentials - change to strong unique passwords'
|
|
798
|
+
: 'No default credentials detected in MCP config',
|
|
799
|
+
fixable: false,
|
|
800
|
+
});
|
|
801
|
+
// MCP-005: Check for wildcard tool access
|
|
802
|
+
let hasWildcardTools = false;
|
|
803
|
+
if (mcpConfig?.servers) {
|
|
804
|
+
for (const [, server] of Object.entries(mcpConfig.servers)) {
|
|
805
|
+
if (server.allowedTools?.includes('*')) {
|
|
806
|
+
hasWildcardTools = true;
|
|
807
|
+
break;
|
|
808
|
+
}
|
|
809
|
+
}
|
|
810
|
+
}
|
|
811
|
+
findings.push({
|
|
812
|
+
checkId: 'MCP-005',
|
|
813
|
+
name: 'MCP Wildcard Tools',
|
|
814
|
+
description: 'MCP server allows all tools without restrictions',
|
|
815
|
+
category: 'mcp',
|
|
816
|
+
severity: 'high',
|
|
817
|
+
passed: !hasWildcardTools,
|
|
818
|
+
message: hasWildcardTools
|
|
819
|
+
? 'MCP server allows all tools (*) - restrict to specific tools needed'
|
|
820
|
+
: 'MCP tools are properly scoped',
|
|
821
|
+
fixable: false,
|
|
822
|
+
});
|
|
823
|
+
return findings;
|
|
824
|
+
}
|
|
825
|
+
async checkClaudeAdvanced(targetDir, autoFix) {
|
|
826
|
+
const findings = [];
|
|
827
|
+
const claudeSettingsPath = path.join(targetDir, '.claude', 'settings.json');
|
|
828
|
+
let claudeSettings = null;
|
|
829
|
+
try {
|
|
830
|
+
const content = await fs.readFile(claudeSettingsPath, 'utf-8');
|
|
831
|
+
claudeSettings = JSON.parse(content);
|
|
832
|
+
}
|
|
833
|
+
catch { }
|
|
834
|
+
// CLAUDE-002: Check for overly permissive allowed commands
|
|
835
|
+
let hasOverlyPermissive = false;
|
|
836
|
+
const permissions = claudeSettings?.permissions;
|
|
837
|
+
if (permissions?.allow) {
|
|
838
|
+
for (const perm of permissions.allow) {
|
|
839
|
+
if (perm.includes('(*)') || perm === 'Bash(*)' || perm === 'Read(*)' || perm === 'Write(*)') {
|
|
840
|
+
hasOverlyPermissive = true;
|
|
841
|
+
break;
|
|
842
|
+
}
|
|
843
|
+
}
|
|
844
|
+
}
|
|
845
|
+
findings.push({
|
|
846
|
+
checkId: 'CLAUDE-002',
|
|
847
|
+
name: 'Overly Permissive Claude Permissions',
|
|
848
|
+
description: 'Claude Code settings allow unrestricted tool access',
|
|
849
|
+
category: 'claude-code',
|
|
850
|
+
severity: 'high',
|
|
851
|
+
passed: !hasOverlyPermissive,
|
|
852
|
+
message: hasOverlyPermissive
|
|
853
|
+
? 'Claude Code has overly permissive permissions (wildcards) - scope to specific paths/commands'
|
|
854
|
+
: 'Claude Code permissions are appropriately scoped',
|
|
855
|
+
fixable: false,
|
|
856
|
+
});
|
|
857
|
+
// CLAUDE-003: Check for dangerous Bash patterns
|
|
858
|
+
let hasDangerousBash = false;
|
|
859
|
+
const dangerousPatterns = ['rm -rf', 'rm -r', 'chmod 777', 'curl | sh', 'wget | sh', 'sudo'];
|
|
860
|
+
if (permissions?.allow) {
|
|
861
|
+
for (const perm of permissions.allow) {
|
|
862
|
+
if (perm.startsWith('Bash(')) {
|
|
863
|
+
for (const dangerous of dangerousPatterns) {
|
|
864
|
+
if (perm.includes(dangerous)) {
|
|
865
|
+
hasDangerousBash = true;
|
|
866
|
+
break;
|
|
867
|
+
}
|
|
868
|
+
}
|
|
869
|
+
}
|
|
870
|
+
}
|
|
871
|
+
}
|
|
872
|
+
findings.push({
|
|
873
|
+
checkId: 'CLAUDE-003',
|
|
874
|
+
name: 'Dangerous Bash Permissions',
|
|
875
|
+
description: 'Claude Code allows dangerous shell commands',
|
|
876
|
+
category: 'claude-code',
|
|
877
|
+
severity: 'critical',
|
|
878
|
+
passed: !hasDangerousBash,
|
|
879
|
+
message: hasDangerousBash
|
|
880
|
+
? 'Claude Code allows dangerous Bash commands (rm -rf, sudo, etc.) - remove or deny these'
|
|
881
|
+
: 'No dangerous Bash patterns in Claude permissions',
|
|
882
|
+
fixable: false,
|
|
883
|
+
});
|
|
884
|
+
return findings;
|
|
885
|
+
}
|
|
886
|
+
async checkCursorConfig(targetDir, autoFix) {
|
|
887
|
+
const findings = [];
|
|
888
|
+
// Check multiple Cursor config locations
|
|
889
|
+
const cursorPaths = [
|
|
890
|
+
path.join(targetDir, '.cursor', 'rules'),
|
|
891
|
+
path.join(targetDir, '.cursorrules'),
|
|
892
|
+
];
|
|
893
|
+
let hasCredentialsInRules = false;
|
|
894
|
+
for (const cursorPath of cursorPaths) {
|
|
895
|
+
try {
|
|
896
|
+
const content = await fs.readFile(cursorPath, 'utf-8');
|
|
897
|
+
for (const { pattern } of CREDENTIAL_PATTERNS) {
|
|
898
|
+
if (pattern.test(content)) {
|
|
899
|
+
hasCredentialsInRules = true;
|
|
900
|
+
break;
|
|
901
|
+
}
|
|
902
|
+
}
|
|
903
|
+
}
|
|
904
|
+
catch { }
|
|
905
|
+
}
|
|
906
|
+
findings.push({
|
|
907
|
+
checkId: 'CURSOR-001',
|
|
908
|
+
name: 'Cursor Rules Contain Credentials',
|
|
909
|
+
description: 'Cursor configuration files contain exposed credentials',
|
|
910
|
+
category: 'cursor',
|
|
911
|
+
severity: 'critical',
|
|
912
|
+
passed: !hasCredentialsInRules,
|
|
913
|
+
message: hasCredentialsInRules
|
|
914
|
+
? 'Cursor rules contain exposed credentials - remove and use environment variables'
|
|
915
|
+
: 'No credentials found in Cursor rules',
|
|
916
|
+
fixable: false,
|
|
917
|
+
});
|
|
918
|
+
return findings;
|
|
919
|
+
}
|
|
920
|
+
async checkVscodeConfig(targetDir, autoFix) {
|
|
921
|
+
const findings = [];
|
|
922
|
+
const vscodeMcpPath = path.join(targetDir, '.vscode', 'mcp.json');
|
|
923
|
+
let vscodeConfig = null;
|
|
924
|
+
let vscodeContent = '';
|
|
925
|
+
try {
|
|
926
|
+
vscodeContent = await fs.readFile(vscodeMcpPath, 'utf-8');
|
|
927
|
+
vscodeConfig = JSON.parse(vscodeContent);
|
|
928
|
+
}
|
|
929
|
+
catch { }
|
|
930
|
+
// VSCODE-001: Check for credentials in VSCode MCP config
|
|
931
|
+
let hasCredentials = false;
|
|
932
|
+
for (const { pattern } of CREDENTIAL_PATTERNS) {
|
|
933
|
+
if (pattern.test(vscodeContent)) {
|
|
934
|
+
hasCredentials = true;
|
|
935
|
+
break;
|
|
936
|
+
}
|
|
937
|
+
}
|
|
938
|
+
findings.push({
|
|
939
|
+
checkId: 'VSCODE-001',
|
|
940
|
+
name: 'VSCode MCP Config Credentials',
|
|
941
|
+
description: 'VSCode MCP configuration contains exposed credentials',
|
|
942
|
+
category: 'vscode',
|
|
943
|
+
severity: 'critical',
|
|
944
|
+
passed: !hasCredentials,
|
|
945
|
+
message: hasCredentials
|
|
946
|
+
? 'VSCode MCP config contains exposed credentials'
|
|
947
|
+
: 'No credentials in VSCode MCP config',
|
|
948
|
+
fixable: false,
|
|
949
|
+
});
|
|
950
|
+
// VSCODE-002: Check for overly permissive paths
|
|
951
|
+
let hasRootAccess = false;
|
|
952
|
+
if (vscodeConfig?.servers) {
|
|
953
|
+
for (const [, server] of Object.entries(vscodeConfig.servers)) {
|
|
954
|
+
if (server.args?.some((arg) => arg === '/' || arg === '~')) {
|
|
955
|
+
hasRootAccess = true;
|
|
956
|
+
break;
|
|
957
|
+
}
|
|
958
|
+
}
|
|
959
|
+
}
|
|
960
|
+
findings.push({
|
|
961
|
+
checkId: 'VSCODE-002',
|
|
962
|
+
name: 'VSCode MCP Root Access',
|
|
963
|
+
description: 'VSCode MCP server has root or home directory access',
|
|
964
|
+
category: 'vscode',
|
|
965
|
+
severity: 'high',
|
|
966
|
+
passed: !hasRootAccess,
|
|
967
|
+
message: hasRootAccess
|
|
968
|
+
? 'VSCode MCP server has dangerous filesystem access'
|
|
969
|
+
: 'VSCode MCP filesystem access is scoped',
|
|
970
|
+
fixable: false,
|
|
971
|
+
});
|
|
972
|
+
return findings;
|
|
973
|
+
}
|
|
974
|
+
async checkCredentialsAdvanced(targetDir, autoFix) {
|
|
975
|
+
const findings = [];
|
|
976
|
+
// CRED-002: Check for private key files
|
|
977
|
+
const keyExtensions = ['.key', '.pem'];
|
|
978
|
+
const foundKeys = [];
|
|
979
|
+
try {
|
|
980
|
+
const files = await fs.readdir(targetDir);
|
|
981
|
+
for (const file of files) {
|
|
982
|
+
if (keyExtensions.some((ext) => file.endsWith(ext))) {
|
|
983
|
+
foundKeys.push(file);
|
|
984
|
+
}
|
|
985
|
+
}
|
|
986
|
+
}
|
|
987
|
+
catch { }
|
|
988
|
+
findings.push({
|
|
989
|
+
checkId: 'CRED-002',
|
|
990
|
+
name: 'Private Key Files',
|
|
991
|
+
description: 'Private key or certificate files found in project directory',
|
|
992
|
+
category: 'credentials',
|
|
993
|
+
severity: 'critical',
|
|
994
|
+
passed: foundKeys.length === 0,
|
|
995
|
+
message: foundKeys.length === 0
|
|
996
|
+
? 'No private key files found in project root'
|
|
997
|
+
: `Private key files found: ${foundKeys.join(', ')} - move to secure location`,
|
|
998
|
+
fixable: false,
|
|
999
|
+
details: foundKeys.length > 0 ? { files: foundKeys } : undefined,
|
|
1000
|
+
});
|
|
1001
|
+
// CRED-003: Check package.json for hardcoded secrets
|
|
1002
|
+
let hasSecretsInPackageJson = false;
|
|
1003
|
+
try {
|
|
1004
|
+
const content = await fs.readFile(path.join(targetDir, 'package.json'), 'utf-8');
|
|
1005
|
+
for (const { pattern } of CREDENTIAL_PATTERNS) {
|
|
1006
|
+
if (pattern.test(content)) {
|
|
1007
|
+
hasSecretsInPackageJson = true;
|
|
1008
|
+
break;
|
|
1009
|
+
}
|
|
1010
|
+
}
|
|
1011
|
+
}
|
|
1012
|
+
catch { }
|
|
1013
|
+
findings.push({
|
|
1014
|
+
checkId: 'CRED-003',
|
|
1015
|
+
name: 'Secrets in package.json',
|
|
1016
|
+
description: 'package.json contains hardcoded secrets',
|
|
1017
|
+
category: 'credentials',
|
|
1018
|
+
severity: 'critical',
|
|
1019
|
+
passed: !hasSecretsInPackageJson,
|
|
1020
|
+
message: hasSecretsInPackageJson
|
|
1021
|
+
? 'package.json contains hardcoded secrets - move to environment variables'
|
|
1022
|
+
: 'No secrets found in package.json',
|
|
1023
|
+
fixable: false,
|
|
1024
|
+
});
|
|
1025
|
+
// CRED-004: Check for JWT secrets in config
|
|
1026
|
+
let hasJwtSecret = false;
|
|
1027
|
+
const configFiles = ['config.json', 'config.yaml', 'config.yml', 'settings.json'];
|
|
1028
|
+
for (const file of configFiles) {
|
|
1029
|
+
try {
|
|
1030
|
+
const content = await fs.readFile(path.join(targetDir, file), 'utf-8');
|
|
1031
|
+
if (content.includes('jwt') && (content.includes('secret') || content.includes('key'))) {
|
|
1032
|
+
// Check if it's a hardcoded value (not env reference)
|
|
1033
|
+
if (!content.includes('${') && !content.includes('process.env')) {
|
|
1034
|
+
hasJwtSecret = true;
|
|
1035
|
+
break;
|
|
1036
|
+
}
|
|
1037
|
+
}
|
|
1038
|
+
}
|
|
1039
|
+
catch { }
|
|
1040
|
+
}
|
|
1041
|
+
findings.push({
|
|
1042
|
+
checkId: 'CRED-004',
|
|
1043
|
+
name: 'JWT Secret in Config',
|
|
1044
|
+
description: 'JWT secret found hardcoded in configuration file',
|
|
1045
|
+
category: 'credentials',
|
|
1046
|
+
severity: 'critical',
|
|
1047
|
+
passed: !hasJwtSecret,
|
|
1048
|
+
message: hasJwtSecret
|
|
1049
|
+
? 'JWT secret hardcoded in config - use environment variable'
|
|
1050
|
+
: 'No hardcoded JWT secrets found',
|
|
1051
|
+
fixable: false,
|
|
1052
|
+
});
|
|
1053
|
+
return findings;
|
|
1054
|
+
}
|
|
1055
|
+
async checkPermissionsAdvanced(targetDir, autoFix) {
|
|
1056
|
+
const findings = [];
|
|
1057
|
+
// PERM-002: Check for executable config files
|
|
1058
|
+
const configFiles = ['config.json', 'mcp.json', 'settings.json', '.env'];
|
|
1059
|
+
const executableConfigs = [];
|
|
1060
|
+
for (const file of configFiles) {
|
|
1061
|
+
try {
|
|
1062
|
+
const stats = await fs.stat(path.join(targetDir, file));
|
|
1063
|
+
const mode = stats.mode & 0o777;
|
|
1064
|
+
if (mode & 0o111) {
|
|
1065
|
+
executableConfigs.push(file);
|
|
1066
|
+
}
|
|
1067
|
+
}
|
|
1068
|
+
catch { }
|
|
1069
|
+
}
|
|
1070
|
+
findings.push({
|
|
1071
|
+
checkId: 'PERM-002',
|
|
1072
|
+
name: 'Executable Config Files',
|
|
1073
|
+
description: 'Configuration files have executable permission',
|
|
1074
|
+
category: 'permissions',
|
|
1075
|
+
severity: 'medium',
|
|
1076
|
+
passed: executableConfigs.length === 0,
|
|
1077
|
+
message: executableConfigs.length === 0
|
|
1078
|
+
? 'No config files have executable permissions'
|
|
1079
|
+
: `Config files with executable permission: ${executableConfigs.join(', ')}`,
|
|
1080
|
+
fixable: true,
|
|
1081
|
+
fixed: false,
|
|
1082
|
+
details: executableConfigs.length > 0 ? { files: executableConfigs } : undefined,
|
|
1083
|
+
});
|
|
1084
|
+
// PERM-003: Check for group-writable sensitive files
|
|
1085
|
+
const sensitiveFiles = ['.env', '.env.local', 'secrets.json', 'credentials.json'];
|
|
1086
|
+
const groupWritable = [];
|
|
1087
|
+
for (const file of sensitiveFiles) {
|
|
1088
|
+
try {
|
|
1089
|
+
const stats = await fs.stat(path.join(targetDir, file));
|
|
1090
|
+
const mode = stats.mode & 0o777;
|
|
1091
|
+
if (mode & 0o020) {
|
|
1092
|
+
groupWritable.push(file);
|
|
1093
|
+
}
|
|
1094
|
+
}
|
|
1095
|
+
catch { }
|
|
1096
|
+
}
|
|
1097
|
+
findings.push({
|
|
1098
|
+
checkId: 'PERM-003',
|
|
1099
|
+
name: 'Group-Writable Sensitive Files',
|
|
1100
|
+
description: 'Sensitive files have group write permission',
|
|
1101
|
+
category: 'permissions',
|
|
1102
|
+
severity: 'high',
|
|
1103
|
+
passed: groupWritable.length === 0,
|
|
1104
|
+
message: groupWritable.length === 0
|
|
1105
|
+
? 'No sensitive files have group write permission'
|
|
1106
|
+
: `Group-writable sensitive files: ${groupWritable.join(', ')}`,
|
|
1107
|
+
fixable: true,
|
|
1108
|
+
fixed: false,
|
|
1109
|
+
details: groupWritable.length > 0 ? { files: groupWritable } : undefined,
|
|
1110
|
+
});
|
|
1111
|
+
return findings;
|
|
1112
|
+
}
|
|
1113
|
+
async checkEnvironmentSecurity(targetDir, autoFix) {
|
|
1114
|
+
const findings = [];
|
|
1115
|
+
// ENV-001: Check for development mode indicators
|
|
1116
|
+
let devModeEnabled = false;
|
|
1117
|
+
const envIndicators = ['NODE_ENV=development', 'DEBUG=true', 'DEV_MODE=true'];
|
|
1118
|
+
const envFiles = ['.env', '.env.local', 'config.json'];
|
|
1119
|
+
for (const file of envFiles) {
|
|
1120
|
+
try {
|
|
1121
|
+
const content = await fs.readFile(path.join(targetDir, file), 'utf-8');
|
|
1122
|
+
for (const indicator of envIndicators) {
|
|
1123
|
+
if (content.includes(indicator)) {
|
|
1124
|
+
devModeEnabled = true;
|
|
1125
|
+
break;
|
|
1126
|
+
}
|
|
1127
|
+
}
|
|
1128
|
+
}
|
|
1129
|
+
catch { }
|
|
1130
|
+
}
|
|
1131
|
+
findings.push({
|
|
1132
|
+
checkId: 'ENV-001',
|
|
1133
|
+
name: 'Development Mode Enabled',
|
|
1134
|
+
description: 'Development mode indicators found in configuration',
|
|
1135
|
+
category: 'environment',
|
|
1136
|
+
severity: 'medium',
|
|
1137
|
+
passed: !devModeEnabled,
|
|
1138
|
+
message: devModeEnabled
|
|
1139
|
+
? 'Development mode enabled - ensure this is disabled in production'
|
|
1140
|
+
: 'No development mode indicators found',
|
|
1141
|
+
fixable: false,
|
|
1142
|
+
});
|
|
1143
|
+
// ENV-002: Check for debug flags
|
|
1144
|
+
let hasDebugFlags = false;
|
|
1145
|
+
const debugPatterns = ['DEBUG=', 'VERBOSE=true', 'LOG_LEVEL=debug', 'TRACE=true'];
|
|
1146
|
+
for (const file of envFiles) {
|
|
1147
|
+
try {
|
|
1148
|
+
const content = await fs.readFile(path.join(targetDir, file), 'utf-8');
|
|
1149
|
+
for (const pattern of debugPatterns) {
|
|
1150
|
+
if (content.includes(pattern)) {
|
|
1151
|
+
hasDebugFlags = true;
|
|
1152
|
+
break;
|
|
1153
|
+
}
|
|
1154
|
+
}
|
|
1155
|
+
}
|
|
1156
|
+
catch { }
|
|
1157
|
+
}
|
|
1158
|
+
findings.push({
|
|
1159
|
+
checkId: 'ENV-002',
|
|
1160
|
+
name: 'Debug Flags Enabled',
|
|
1161
|
+
description: 'Debug or verbose logging flags are enabled',
|
|
1162
|
+
category: 'environment',
|
|
1163
|
+
severity: 'low',
|
|
1164
|
+
passed: !hasDebugFlags,
|
|
1165
|
+
message: hasDebugFlags
|
|
1166
|
+
? 'Debug flags enabled - may expose sensitive information in logs'
|
|
1167
|
+
: 'No debug flags detected',
|
|
1168
|
+
fixable: false,
|
|
1169
|
+
});
|
|
1170
|
+
// ENV-003: Check for error verbosity settings
|
|
1171
|
+
let verboseErrors = false;
|
|
1172
|
+
const errorPatterns = ['SHOW_ERRORS=true', 'DISPLAY_ERRORS=true', 'STACK_TRACE=true'];
|
|
1173
|
+
for (const file of envFiles) {
|
|
1174
|
+
try {
|
|
1175
|
+
const content = await fs.readFile(path.join(targetDir, file), 'utf-8');
|
|
1176
|
+
for (const pattern of errorPatterns) {
|
|
1177
|
+
if (content.includes(pattern)) {
|
|
1178
|
+
verboseErrors = true;
|
|
1179
|
+
break;
|
|
1180
|
+
}
|
|
1181
|
+
}
|
|
1182
|
+
}
|
|
1183
|
+
catch { }
|
|
1184
|
+
}
|
|
1185
|
+
findings.push({
|
|
1186
|
+
checkId: 'ENV-003',
|
|
1187
|
+
name: 'Verbose Error Messages',
|
|
1188
|
+
description: 'Configuration enables verbose error messages',
|
|
1189
|
+
category: 'environment',
|
|
1190
|
+
severity: 'medium',
|
|
1191
|
+
passed: !verboseErrors,
|
|
1192
|
+
message: verboseErrors
|
|
1193
|
+
? 'Verbose error messages enabled - may leak sensitive information'
|
|
1194
|
+
: 'Error verbosity settings are appropriate',
|
|
1195
|
+
fixable: false,
|
|
1196
|
+
});
|
|
1197
|
+
// ENV-004: Check for production environment validation
|
|
1198
|
+
let hasEnvValidation = false;
|
|
1199
|
+
try {
|
|
1200
|
+
const pkgJson = await fs.readFile(path.join(targetDir, 'package.json'), 'utf-8');
|
|
1201
|
+
hasEnvValidation = pkgJson.includes('dotenv') || pkgJson.includes('env-var') || pkgJson.includes('envalid');
|
|
1202
|
+
}
|
|
1203
|
+
catch { }
|
|
1204
|
+
findings.push({
|
|
1205
|
+
checkId: 'ENV-004',
|
|
1206
|
+
name: 'Environment Validation',
|
|
1207
|
+
description: 'No environment variable validation library detected',
|
|
1208
|
+
category: 'environment',
|
|
1209
|
+
severity: 'low',
|
|
1210
|
+
passed: hasEnvValidation,
|
|
1211
|
+
message: hasEnvValidation
|
|
1212
|
+
? 'Environment validation library detected'
|
|
1213
|
+
: 'Consider using env validation (dotenv, envalid) to catch misconfigurations',
|
|
1214
|
+
fixable: false,
|
|
1215
|
+
});
|
|
1216
|
+
return findings;
|
|
1217
|
+
}
|
|
1218
|
+
async checkLoggingSecurity(targetDir, autoFix) {
|
|
1219
|
+
const findings = [];
|
|
1220
|
+
// LOG-001: Check for logging configuration
|
|
1221
|
+
let hasLoggingConfig = false;
|
|
1222
|
+
try {
|
|
1223
|
+
const pkgJson = await fs.readFile(path.join(targetDir, 'package.json'), 'utf-8');
|
|
1224
|
+
hasLoggingConfig = pkgJson.includes('winston') || pkgJson.includes('pino') || pkgJson.includes('bunyan');
|
|
1225
|
+
}
|
|
1226
|
+
catch { }
|
|
1227
|
+
findings.push({
|
|
1228
|
+
checkId: 'LOG-001',
|
|
1229
|
+
name: 'Structured Logging',
|
|
1230
|
+
description: 'No structured logging library detected',
|
|
1231
|
+
category: 'logging',
|
|
1232
|
+
severity: 'low',
|
|
1233
|
+
passed: hasLoggingConfig,
|
|
1234
|
+
message: hasLoggingConfig
|
|
1235
|
+
? 'Structured logging library detected'
|
|
1236
|
+
: 'Consider using structured logging (winston, pino) for better security auditing',
|
|
1237
|
+
fixable: false,
|
|
1238
|
+
});
|
|
1239
|
+
// LOG-002: Check for sensitive data in log patterns
|
|
1240
|
+
let sensitiveInLogs = false;
|
|
1241
|
+
const logPatterns = ['console.log(password', 'console.log(apiKey', 'console.log(secret', 'console.log(token'];
|
|
1242
|
+
try {
|
|
1243
|
+
const files = await fs.readdir(targetDir);
|
|
1244
|
+
for (const file of files) {
|
|
1245
|
+
if (file.endsWith('.ts') || file.endsWith('.js')) {
|
|
1246
|
+
try {
|
|
1247
|
+
const content = await fs.readFile(path.join(targetDir, file), 'utf-8');
|
|
1248
|
+
for (const pattern of logPatterns) {
|
|
1249
|
+
if (content.toLowerCase().includes(pattern.toLowerCase())) {
|
|
1250
|
+
sensitiveInLogs = true;
|
|
1251
|
+
break;
|
|
1252
|
+
}
|
|
1253
|
+
}
|
|
1254
|
+
}
|
|
1255
|
+
catch { }
|
|
1256
|
+
}
|
|
1257
|
+
}
|
|
1258
|
+
}
|
|
1259
|
+
catch { }
|
|
1260
|
+
findings.push({
|
|
1261
|
+
checkId: 'LOG-002',
|
|
1262
|
+
name: 'Sensitive Data in Logs',
|
|
1263
|
+
description: 'Potential sensitive data being logged',
|
|
1264
|
+
category: 'logging',
|
|
1265
|
+
severity: 'high',
|
|
1266
|
+
passed: !sensitiveInLogs,
|
|
1267
|
+
message: sensitiveInLogs
|
|
1268
|
+
? 'Code may be logging sensitive data - review console.log statements'
|
|
1269
|
+
: 'No obvious sensitive data logging patterns found',
|
|
1270
|
+
fixable: false,
|
|
1271
|
+
});
|
|
1272
|
+
// LOG-003: Check for log file permissions
|
|
1273
|
+
const logFiles = ['app.log', 'error.log', 'debug.log', 'access.log'];
|
|
1274
|
+
const worldReadableLogs = [];
|
|
1275
|
+
for (const logFile of logFiles) {
|
|
1276
|
+
try {
|
|
1277
|
+
const stats = await fs.stat(path.join(targetDir, logFile));
|
|
1278
|
+
const mode = stats.mode & 0o777;
|
|
1279
|
+
if (mode & 0o004) {
|
|
1280
|
+
worldReadableLogs.push(logFile);
|
|
1281
|
+
}
|
|
1282
|
+
}
|
|
1283
|
+
catch { }
|
|
1284
|
+
}
|
|
1285
|
+
findings.push({
|
|
1286
|
+
checkId: 'LOG-003',
|
|
1287
|
+
name: 'Log File Permissions',
|
|
1288
|
+
description: 'Log files have overly permissive permissions',
|
|
1289
|
+
category: 'logging',
|
|
1290
|
+
severity: 'medium',
|
|
1291
|
+
passed: worldReadableLogs.length === 0,
|
|
1292
|
+
message: worldReadableLogs.length === 0
|
|
1293
|
+
? 'No world-readable log files found'
|
|
1294
|
+
: `World-readable log files: ${worldReadableLogs.join(', ')}`,
|
|
1295
|
+
fixable: true,
|
|
1296
|
+
fixed: false,
|
|
1297
|
+
});
|
|
1298
|
+
// LOG-004: Check for audit logging capability
|
|
1299
|
+
let hasAuditLogging = false;
|
|
1300
|
+
try {
|
|
1301
|
+
const pkgJson = await fs.readFile(path.join(targetDir, 'package.json'), 'utf-8');
|
|
1302
|
+
hasAuditLogging = pkgJson.includes('audit') || pkgJson.includes('morgan') || pkgJson.includes('express-winston');
|
|
1303
|
+
}
|
|
1304
|
+
catch { }
|
|
1305
|
+
findings.push({
|
|
1306
|
+
checkId: 'LOG-004',
|
|
1307
|
+
name: 'Audit Logging',
|
|
1308
|
+
description: 'No audit logging capability detected',
|
|
1309
|
+
category: 'logging',
|
|
1310
|
+
severity: 'medium',
|
|
1311
|
+
passed: hasAuditLogging,
|
|
1312
|
+
message: hasAuditLogging
|
|
1313
|
+
? 'Audit logging capability detected'
|
|
1314
|
+
: 'Consider implementing audit logging for security events',
|
|
1315
|
+
fixable: false,
|
|
1316
|
+
});
|
|
1317
|
+
return findings;
|
|
1318
|
+
}
|
|
1319
|
+
async checkDependencySecurity(targetDir, autoFix) {
|
|
1320
|
+
const findings = [];
|
|
1321
|
+
// DEP-001: Check for package-lock.json
|
|
1322
|
+
let hasLockFile = false;
|
|
1323
|
+
try {
|
|
1324
|
+
await fs.access(path.join(targetDir, 'package-lock.json'));
|
|
1325
|
+
hasLockFile = true;
|
|
1326
|
+
}
|
|
1327
|
+
catch {
|
|
1328
|
+
try {
|
|
1329
|
+
await fs.access(path.join(targetDir, 'yarn.lock'));
|
|
1330
|
+
hasLockFile = true;
|
|
1331
|
+
}
|
|
1332
|
+
catch {
|
|
1333
|
+
try {
|
|
1334
|
+
await fs.access(path.join(targetDir, 'pnpm-lock.yaml'));
|
|
1335
|
+
hasLockFile = true;
|
|
1336
|
+
}
|
|
1337
|
+
catch { }
|
|
1338
|
+
}
|
|
1339
|
+
}
|
|
1340
|
+
findings.push({
|
|
1341
|
+
checkId: 'DEP-001',
|
|
1342
|
+
name: 'Dependency Lock File',
|
|
1343
|
+
description: 'No dependency lock file found',
|
|
1344
|
+
category: 'dependencies',
|
|
1345
|
+
severity: 'medium',
|
|
1346
|
+
passed: hasLockFile,
|
|
1347
|
+
message: hasLockFile
|
|
1348
|
+
? 'Dependency lock file present'
|
|
1349
|
+
: 'No lock file found - dependency versions may vary between installs',
|
|
1350
|
+
fixable: false,
|
|
1351
|
+
});
|
|
1352
|
+
// DEP-002: Check for known vulnerable packages
|
|
1353
|
+
const vulnerablePackages = ['event-stream', 'flatmap-stream', 'eslint-scope@3.7.2'];
|
|
1354
|
+
let hasVulnerablePackage = false;
|
|
1355
|
+
try {
|
|
1356
|
+
const pkgJson = await fs.readFile(path.join(targetDir, 'package.json'), 'utf-8');
|
|
1357
|
+
for (const pkg of vulnerablePackages) {
|
|
1358
|
+
if (pkgJson.includes(pkg.split('@')[0])) {
|
|
1359
|
+
hasVulnerablePackage = true;
|
|
1360
|
+
break;
|
|
1361
|
+
}
|
|
1362
|
+
}
|
|
1363
|
+
}
|
|
1364
|
+
catch { }
|
|
1365
|
+
findings.push({
|
|
1366
|
+
checkId: 'DEP-002',
|
|
1367
|
+
name: 'Known Vulnerable Packages',
|
|
1368
|
+
description: 'Package.json may contain known vulnerable packages',
|
|
1369
|
+
category: 'dependencies',
|
|
1370
|
+
severity: 'critical',
|
|
1371
|
+
passed: !hasVulnerablePackage,
|
|
1372
|
+
message: hasVulnerablePackage
|
|
1373
|
+
? 'Potentially vulnerable package detected - run npm audit'
|
|
1374
|
+
: 'No known vulnerable packages in direct dependencies',
|
|
1375
|
+
fixable: false,
|
|
1376
|
+
});
|
|
1377
|
+
// DEP-003: Check for wildcard versions
|
|
1378
|
+
let hasWildcardVersions = false;
|
|
1379
|
+
try {
|
|
1380
|
+
const pkgJson = await fs.readFile(path.join(targetDir, 'package.json'), 'utf-8');
|
|
1381
|
+
const pkg = JSON.parse(pkgJson);
|
|
1382
|
+
const allDeps = { ...pkg.dependencies, ...pkg.devDependencies };
|
|
1383
|
+
for (const [, version] of Object.entries(allDeps)) {
|
|
1384
|
+
if (version === '*' || version === 'latest') {
|
|
1385
|
+
hasWildcardVersions = true;
|
|
1386
|
+
break;
|
|
1387
|
+
}
|
|
1388
|
+
}
|
|
1389
|
+
}
|
|
1390
|
+
catch { }
|
|
1391
|
+
findings.push({
|
|
1392
|
+
checkId: 'DEP-003',
|
|
1393
|
+
name: 'Wildcard Dependency Versions',
|
|
1394
|
+
description: 'Package.json uses wildcard or latest versions',
|
|
1395
|
+
category: 'dependencies',
|
|
1396
|
+
severity: 'high',
|
|
1397
|
+
passed: !hasWildcardVersions,
|
|
1398
|
+
message: hasWildcardVersions
|
|
1399
|
+
? 'Wildcard versions detected - pin dependencies for reproducible builds'
|
|
1400
|
+
: 'All dependency versions are properly specified',
|
|
1401
|
+
fixable: false,
|
|
1402
|
+
});
|
|
1403
|
+
// DEP-004: Check for npm scripts security
|
|
1404
|
+
let hasDangerousScripts = false;
|
|
1405
|
+
const dangerousScriptPatterns = ['curl | sh', 'wget | bash', 'eval(', '$(curl'];
|
|
1406
|
+
try {
|
|
1407
|
+
const pkgJson = await fs.readFile(path.join(targetDir, 'package.json'), 'utf-8');
|
|
1408
|
+
const pkg = JSON.parse(pkgJson);
|
|
1409
|
+
if (pkg.scripts) {
|
|
1410
|
+
for (const [, script] of Object.entries(pkg.scripts)) {
|
|
1411
|
+
if (typeof script === 'string') {
|
|
1412
|
+
for (const pattern of dangerousScriptPatterns) {
|
|
1413
|
+
if (script.includes(pattern)) {
|
|
1414
|
+
hasDangerousScripts = true;
|
|
1415
|
+
break;
|
|
1416
|
+
}
|
|
1417
|
+
}
|
|
1418
|
+
}
|
|
1419
|
+
}
|
|
1420
|
+
}
|
|
1421
|
+
}
|
|
1422
|
+
catch { }
|
|
1423
|
+
findings.push({
|
|
1424
|
+
checkId: 'DEP-004',
|
|
1425
|
+
name: 'Dangerous npm Scripts',
|
|
1426
|
+
description: 'npm scripts contain potentially dangerous commands',
|
|
1427
|
+
category: 'dependencies',
|
|
1428
|
+
severity: 'critical',
|
|
1429
|
+
passed: !hasDangerousScripts,
|
|
1430
|
+
message: hasDangerousScripts
|
|
1431
|
+
? 'Dangerous patterns in npm scripts (curl|sh, eval) - review carefully'
|
|
1432
|
+
: 'npm scripts appear safe',
|
|
1433
|
+
fixable: false,
|
|
1434
|
+
});
|
|
1435
|
+
return findings;
|
|
1436
|
+
}
|
|
1437
|
+
async checkAuthSecurity(targetDir, autoFix) {
|
|
1438
|
+
const findings = [];
|
|
1439
|
+
// AUTH-001: Check for auth configuration
|
|
1440
|
+
let hasAuthConfig = false;
|
|
1441
|
+
const authIndicators = ['auth', 'authentication', 'passport', 'jwt', 'session'];
|
|
1442
|
+
try {
|
|
1443
|
+
const pkgJson = await fs.readFile(path.join(targetDir, 'package.json'), 'utf-8');
|
|
1444
|
+
for (const indicator of authIndicators) {
|
|
1445
|
+
if (pkgJson.toLowerCase().includes(indicator)) {
|
|
1446
|
+
hasAuthConfig = true;
|
|
1447
|
+
break;
|
|
1448
|
+
}
|
|
1449
|
+
}
|
|
1450
|
+
}
|
|
1451
|
+
catch { }
|
|
1452
|
+
findings.push({
|
|
1453
|
+
checkId: 'AUTH-001',
|
|
1454
|
+
name: 'Authentication Configuration',
|
|
1455
|
+
description: 'No authentication library or configuration detected',
|
|
1456
|
+
category: 'authentication',
|
|
1457
|
+
severity: 'medium',
|
|
1458
|
+
passed: hasAuthConfig,
|
|
1459
|
+
message: hasAuthConfig
|
|
1460
|
+
? 'Authentication configuration detected'
|
|
1461
|
+
: 'No authentication library detected - ensure endpoints are protected',
|
|
1462
|
+
fixable: false,
|
|
1463
|
+
});
|
|
1464
|
+
// AUTH-002: Check for rate limiting
|
|
1465
|
+
let hasRateLimiting = false;
|
|
1466
|
+
try {
|
|
1467
|
+
const pkgJson = await fs.readFile(path.join(targetDir, 'package.json'), 'utf-8');
|
|
1468
|
+
hasRateLimiting = pkgJson.includes('rate-limit') || pkgJson.includes('express-rate-limit') || pkgJson.includes('bottleneck');
|
|
1469
|
+
}
|
|
1470
|
+
catch { }
|
|
1471
|
+
findings.push({
|
|
1472
|
+
checkId: 'AUTH-002',
|
|
1473
|
+
name: 'Rate Limiting',
|
|
1474
|
+
description: 'No rate limiting library detected',
|
|
1475
|
+
category: 'authentication',
|
|
1476
|
+
severity: 'high',
|
|
1477
|
+
passed: hasRateLimiting,
|
|
1478
|
+
message: hasRateLimiting
|
|
1479
|
+
? 'Rate limiting library detected'
|
|
1480
|
+
: 'No rate limiting detected - API may be vulnerable to abuse',
|
|
1481
|
+
fixable: false,
|
|
1482
|
+
});
|
|
1483
|
+
// AUTH-003: Check for session security
|
|
1484
|
+
let hasSecureSessions = false;
|
|
1485
|
+
const sessionIndicators = ['express-session', 'cookie-session', 'secure: true', 'httpOnly: true'];
|
|
1486
|
+
try {
|
|
1487
|
+
const files = await fs.readdir(targetDir);
|
|
1488
|
+
for (const file of files) {
|
|
1489
|
+
if (file.endsWith('.ts') || file.endsWith('.js') || file.endsWith('.json')) {
|
|
1490
|
+
try {
|
|
1491
|
+
const content = await fs.readFile(path.join(targetDir, file), 'utf-8');
|
|
1492
|
+
for (const indicator of sessionIndicators) {
|
|
1493
|
+
if (content.includes(indicator)) {
|
|
1494
|
+
hasSecureSessions = true;
|
|
1495
|
+
break;
|
|
1496
|
+
}
|
|
1497
|
+
}
|
|
1498
|
+
}
|
|
1499
|
+
catch { }
|
|
1500
|
+
}
|
|
1501
|
+
}
|
|
1502
|
+
}
|
|
1503
|
+
catch { }
|
|
1504
|
+
findings.push({
|
|
1505
|
+
checkId: 'AUTH-003',
|
|
1506
|
+
name: 'Secure Session Configuration',
|
|
1507
|
+
description: 'Session security configuration not detected',
|
|
1508
|
+
category: 'authentication',
|
|
1509
|
+
severity: 'medium',
|
|
1510
|
+
passed: hasSecureSessions,
|
|
1511
|
+
message: hasSecureSessions
|
|
1512
|
+
? 'Secure session configuration detected'
|
|
1513
|
+
: 'Ensure sessions use secure, httpOnly cookies',
|
|
1514
|
+
fixable: false,
|
|
1515
|
+
});
|
|
1516
|
+
// AUTH-004: Check for CORS configuration
|
|
1517
|
+
let hasCorsConfig = false;
|
|
1518
|
+
try {
|
|
1519
|
+
const pkgJson = await fs.readFile(path.join(targetDir, 'package.json'), 'utf-8');
|
|
1520
|
+
hasCorsConfig = pkgJson.includes('cors');
|
|
1521
|
+
}
|
|
1522
|
+
catch { }
|
|
1523
|
+
findings.push({
|
|
1524
|
+
checkId: 'AUTH-004',
|
|
1525
|
+
name: 'CORS Configuration',
|
|
1526
|
+
description: 'No CORS library detected',
|
|
1527
|
+
category: 'authentication',
|
|
1528
|
+
severity: 'medium',
|
|
1529
|
+
passed: hasCorsConfig,
|
|
1530
|
+
message: hasCorsConfig
|
|
1531
|
+
? 'CORS library detected'
|
|
1532
|
+
: 'No CORS configuration detected - ensure cross-origin requests are properly handled',
|
|
1533
|
+
fixable: false,
|
|
1534
|
+
});
|
|
1535
|
+
return findings;
|
|
1536
|
+
}
|
|
1537
|
+
async checkProcessSecurity(targetDir, autoFix) {
|
|
1538
|
+
const findings = [];
|
|
1539
|
+
// PROC-001: Check for Dockerfile security
|
|
1540
|
+
let hasSecureDockerfile = true;
|
|
1541
|
+
try {
|
|
1542
|
+
const dockerfile = await fs.readFile(path.join(targetDir, 'Dockerfile'), 'utf-8');
|
|
1543
|
+
if (dockerfile.includes('USER root') || !dockerfile.includes('USER ')) {
|
|
1544
|
+
hasSecureDockerfile = false;
|
|
1545
|
+
}
|
|
1546
|
+
}
|
|
1547
|
+
catch {
|
|
1548
|
+
// No Dockerfile, that's fine
|
|
1549
|
+
}
|
|
1550
|
+
findings.push({
|
|
1551
|
+
checkId: 'PROC-001',
|
|
1552
|
+
name: 'Container User',
|
|
1553
|
+
description: 'Dockerfile runs as root or has no USER directive',
|
|
1554
|
+
category: 'process',
|
|
1555
|
+
severity: 'high',
|
|
1556
|
+
passed: hasSecureDockerfile,
|
|
1557
|
+
message: hasSecureDockerfile
|
|
1558
|
+
? 'Container runs as non-root user or no Dockerfile present'
|
|
1559
|
+
: 'Dockerfile runs as root - add USER directive for non-root user',
|
|
1560
|
+
fixable: false,
|
|
1561
|
+
});
|
|
1562
|
+
// PROC-002: Check for security headers middleware
|
|
1563
|
+
let hasSecurityHeaders = false;
|
|
1564
|
+
try {
|
|
1565
|
+
const pkgJson = await fs.readFile(path.join(targetDir, 'package.json'), 'utf-8');
|
|
1566
|
+
hasSecurityHeaders = pkgJson.includes('helmet') || pkgJson.includes('security-headers');
|
|
1567
|
+
}
|
|
1568
|
+
catch { }
|
|
1569
|
+
findings.push({
|
|
1570
|
+
checkId: 'PROC-002',
|
|
1571
|
+
name: 'Security Headers',
|
|
1572
|
+
description: 'No security headers middleware detected',
|
|
1573
|
+
category: 'process',
|
|
1574
|
+
severity: 'medium',
|
|
1575
|
+
passed: hasSecurityHeaders,
|
|
1576
|
+
message: hasSecurityHeaders
|
|
1577
|
+
? 'Security headers middleware detected (helmet)'
|
|
1578
|
+
: 'Consider using helmet or similar for security headers',
|
|
1579
|
+
fixable: false,
|
|
1580
|
+
});
|
|
1581
|
+
// PROC-003: Check for input validation
|
|
1582
|
+
let hasInputValidation = false;
|
|
1583
|
+
try {
|
|
1584
|
+
const pkgJson = await fs.readFile(path.join(targetDir, 'package.json'), 'utf-8');
|
|
1585
|
+
hasInputValidation = pkgJson.includes('joi') || pkgJson.includes('zod') || pkgJson.includes('yup') || pkgJson.includes('class-validator');
|
|
1586
|
+
}
|
|
1587
|
+
catch { }
|
|
1588
|
+
findings.push({
|
|
1589
|
+
checkId: 'PROC-003',
|
|
1590
|
+
name: 'Input Validation',
|
|
1591
|
+
description: 'No input validation library detected',
|
|
1592
|
+
category: 'process',
|
|
1593
|
+
severity: 'high',
|
|
1594
|
+
passed: hasInputValidation,
|
|
1595
|
+
message: hasInputValidation
|
|
1596
|
+
? 'Input validation library detected'
|
|
1597
|
+
: 'No input validation library found - validate all user inputs',
|
|
1598
|
+
fixable: false,
|
|
1599
|
+
});
|
|
1600
|
+
// PROC-004: Check for error handling
|
|
1601
|
+
let hasErrorHandling = false;
|
|
1602
|
+
try {
|
|
1603
|
+
const files = await fs.readdir(targetDir);
|
|
1604
|
+
for (const file of files) {
|
|
1605
|
+
if (file.endsWith('.ts') || file.endsWith('.js')) {
|
|
1606
|
+
try {
|
|
1607
|
+
const content = await fs.readFile(path.join(targetDir, file), 'utf-8');
|
|
1608
|
+
if (content.includes('try') && content.includes('catch')) {
|
|
1609
|
+
hasErrorHandling = true;
|
|
1610
|
+
break;
|
|
1611
|
+
}
|
|
1612
|
+
}
|
|
1613
|
+
catch { }
|
|
1614
|
+
}
|
|
1615
|
+
}
|
|
1616
|
+
}
|
|
1617
|
+
catch { }
|
|
1618
|
+
findings.push({
|
|
1619
|
+
checkId: 'PROC-004',
|
|
1620
|
+
name: 'Error Handling',
|
|
1621
|
+
description: 'No error handling patterns detected',
|
|
1622
|
+
category: 'process',
|
|
1623
|
+
severity: 'medium',
|
|
1624
|
+
passed: hasErrorHandling,
|
|
1625
|
+
message: hasErrorHandling
|
|
1626
|
+
? 'Error handling patterns detected'
|
|
1627
|
+
: 'Ensure proper error handling to prevent information disclosure',
|
|
1628
|
+
fixable: false,
|
|
1629
|
+
});
|
|
1630
|
+
return findings;
|
|
1631
|
+
}
|
|
1632
|
+
async checkClaudeExtended(targetDir, autoFix) {
|
|
1633
|
+
const findings = [];
|
|
1634
|
+
const claudeSettingsPath = path.join(targetDir, '.claude', 'settings.json');
|
|
1635
|
+
let claudeSettings = null;
|
|
1636
|
+
try {
|
|
1637
|
+
const content = await fs.readFile(claudeSettingsPath, 'utf-8');
|
|
1638
|
+
claudeSettings = JSON.parse(content);
|
|
1639
|
+
}
|
|
1640
|
+
catch { }
|
|
1641
|
+
// CLAUDE-004: Check for deny rules
|
|
1642
|
+
const permissions = claudeSettings?.permissions;
|
|
1643
|
+
const hasDenyRules = permissions?.deny && permissions.deny.length > 0;
|
|
1644
|
+
findings.push({
|
|
1645
|
+
checkId: 'CLAUDE-004',
|
|
1646
|
+
name: 'Claude Deny Rules',
|
|
1647
|
+
description: 'No deny rules configured for Claude Code',
|
|
1648
|
+
category: 'claude-code',
|
|
1649
|
+
severity: 'medium',
|
|
1650
|
+
passed: hasDenyRules || !claudeSettings,
|
|
1651
|
+
message: hasDenyRules
|
|
1652
|
+
? 'Claude Code has deny rules configured'
|
|
1653
|
+
: claudeSettings
|
|
1654
|
+
? 'Consider adding deny rules to block dangerous operations'
|
|
1655
|
+
: 'No Claude settings file found',
|
|
1656
|
+
fixable: false,
|
|
1657
|
+
});
|
|
1658
|
+
// CLAUDE-005: Check for memory/context persistence
|
|
1659
|
+
const memorySettings = claudeSettings?.memory;
|
|
1660
|
+
const hasMemoryEnabled = memorySettings?.enabled === true;
|
|
1661
|
+
findings.push({
|
|
1662
|
+
checkId: 'CLAUDE-005',
|
|
1663
|
+
name: 'Claude Memory Persistence',
|
|
1664
|
+
description: 'Claude memory persistence may store sensitive context',
|
|
1665
|
+
category: 'claude-code',
|
|
1666
|
+
severity: 'low',
|
|
1667
|
+
passed: !hasMemoryEnabled,
|
|
1668
|
+
message: hasMemoryEnabled
|
|
1669
|
+
? 'Claude memory enabled - be aware sensitive data may persist'
|
|
1670
|
+
: 'Claude memory not explicitly enabled',
|
|
1671
|
+
fixable: false,
|
|
1672
|
+
});
|
|
1673
|
+
// CLAUDE-006: Check CLAUDE.md for sensitive instructions
|
|
1674
|
+
let hasSensitiveInstructions = false;
|
|
1675
|
+
const sensitivePatterns = ['never share', 'confidential', 'internal only', 'do not disclose'];
|
|
1676
|
+
try {
|
|
1677
|
+
const claudeMd = await fs.readFile(path.join(targetDir, 'CLAUDE.md'), 'utf-8');
|
|
1678
|
+
for (const pattern of sensitivePatterns) {
|
|
1679
|
+
if (claudeMd.toLowerCase().includes(pattern)) {
|
|
1680
|
+
hasSensitiveInstructions = true;
|
|
1681
|
+
break;
|
|
1682
|
+
}
|
|
1683
|
+
}
|
|
1684
|
+
}
|
|
1685
|
+
catch { }
|
|
1686
|
+
findings.push({
|
|
1687
|
+
checkId: 'CLAUDE-006',
|
|
1688
|
+
name: 'Sensitive Instructions in CLAUDE.md',
|
|
1689
|
+
description: 'CLAUDE.md may contain sensitive instructions that could be extracted',
|
|
1690
|
+
category: 'claude-code',
|
|
1691
|
+
severity: 'medium',
|
|
1692
|
+
passed: !hasSensitiveInstructions,
|
|
1693
|
+
message: hasSensitiveInstructions
|
|
1694
|
+
? 'CLAUDE.md contains sensitive instructions - these may be extractable via prompt injection'
|
|
1695
|
+
: 'No obviously sensitive instructions detected in CLAUDE.md',
|
|
1696
|
+
fixable: false,
|
|
1697
|
+
});
|
|
1698
|
+
// CLAUDE-007: Check for tool timeout configuration
|
|
1699
|
+
const hasToolTimeout = claudeSettings?.toolTimeout !== undefined;
|
|
1700
|
+
findings.push({
|
|
1701
|
+
checkId: 'CLAUDE-007',
|
|
1702
|
+
name: 'Tool Timeout Configuration',
|
|
1703
|
+
description: 'No tool timeout configured for Claude operations',
|
|
1704
|
+
category: 'claude-code',
|
|
1705
|
+
severity: 'low',
|
|
1706
|
+
passed: hasToolTimeout || !claudeSettings,
|
|
1707
|
+
message: hasToolTimeout
|
|
1708
|
+
? 'Tool timeout is configured'
|
|
1709
|
+
: claudeSettings
|
|
1710
|
+
? 'Consider setting tool timeouts to prevent runaway operations'
|
|
1711
|
+
: 'No Claude settings found',
|
|
1712
|
+
fixable: false,
|
|
1713
|
+
});
|
|
1714
|
+
return findings;
|
|
1715
|
+
}
|
|
1716
|
+
async checkMcpExtended(targetDir, autoFix) {
|
|
1717
|
+
const findings = [];
|
|
1718
|
+
const mcpConfigPath = path.join(targetDir, 'mcp.json');
|
|
1719
|
+
let mcpConfig = null;
|
|
1720
|
+
try {
|
|
1721
|
+
const content = await fs.readFile(mcpConfigPath, 'utf-8');
|
|
1722
|
+
mcpConfig = JSON.parse(content);
|
|
1723
|
+
}
|
|
1724
|
+
catch { }
|
|
1725
|
+
// MCP-006: Check for request timeout
|
|
1726
|
+
const hasTimeout = mcpConfig?.timeout !== undefined;
|
|
1727
|
+
findings.push({
|
|
1728
|
+
checkId: 'MCP-006',
|
|
1729
|
+
name: 'MCP Request Timeout',
|
|
1730
|
+
description: 'No request timeout configured for MCP servers',
|
|
1731
|
+
category: 'mcp',
|
|
1732
|
+
severity: 'medium',
|
|
1733
|
+
passed: hasTimeout || !mcpConfig,
|
|
1734
|
+
message: hasTimeout
|
|
1735
|
+
? 'MCP timeout is configured'
|
|
1736
|
+
: mcpConfig
|
|
1737
|
+
? 'Consider setting request timeouts for MCP servers'
|
|
1738
|
+
: 'No MCP config found',
|
|
1739
|
+
fixable: false,
|
|
1740
|
+
});
|
|
1741
|
+
// MCP-007: Check for retry limits
|
|
1742
|
+
const hasRetryConfig = mcpConfig?.retries !== undefined;
|
|
1743
|
+
findings.push({
|
|
1744
|
+
checkId: 'MCP-007',
|
|
1745
|
+
name: 'MCP Retry Limits',
|
|
1746
|
+
description: 'No retry limits configured for MCP servers',
|
|
1747
|
+
category: 'mcp',
|
|
1748
|
+
severity: 'low',
|
|
1749
|
+
passed: hasRetryConfig || !mcpConfig,
|
|
1750
|
+
message: hasRetryConfig
|
|
1751
|
+
? 'MCP retry limits configured'
|
|
1752
|
+
: mcpConfig
|
|
1753
|
+
? 'Consider setting retry limits to prevent infinite loops'
|
|
1754
|
+
: 'No MCP config found',
|
|
1755
|
+
fixable: false,
|
|
1756
|
+
});
|
|
1757
|
+
// MCP-008: Check for localhost binding
|
|
1758
|
+
let allLocalhostBound = true;
|
|
1759
|
+
if (mcpConfig?.servers) {
|
|
1760
|
+
for (const [, server] of Object.entries(mcpConfig.servers)) {
|
|
1761
|
+
if (server.url && !server.url.includes('localhost') && !server.url.includes('127.0.0.1')) {
|
|
1762
|
+
// Remote server is fine if using HTTPS
|
|
1763
|
+
continue;
|
|
1764
|
+
}
|
|
1765
|
+
if (server.args?.some((arg) => arg.includes('0.0.0.0'))) {
|
|
1766
|
+
allLocalhostBound = false;
|
|
1767
|
+
}
|
|
1768
|
+
}
|
|
1769
|
+
}
|
|
1770
|
+
findings.push({
|
|
1771
|
+
checkId: 'MCP-008',
|
|
1772
|
+
name: 'MCP Localhost Binding',
|
|
1773
|
+
description: 'MCP servers should bind to localhost only',
|
|
1774
|
+
category: 'mcp',
|
|
1775
|
+
severity: 'high',
|
|
1776
|
+
passed: allLocalhostBound,
|
|
1777
|
+
message: allLocalhostBound
|
|
1778
|
+
? 'MCP servers properly bound to localhost'
|
|
1779
|
+
: 'Some MCP servers not bound to localhost - may be network accessible',
|
|
1780
|
+
fixable: false,
|
|
1781
|
+
});
|
|
1782
|
+
// MCP-009: Check for sensitive tool names
|
|
1783
|
+
const sensitiveTools = ['execute', 'shell', 'eval', 'system', 'exec', 'spawn'];
|
|
1784
|
+
let hasSensitiveTools = false;
|
|
1785
|
+
if (mcpConfig?.servers) {
|
|
1786
|
+
for (const [name] of Object.entries(mcpConfig.servers)) {
|
|
1787
|
+
for (const tool of sensitiveTools) {
|
|
1788
|
+
if (name.toLowerCase().includes(tool)) {
|
|
1789
|
+
hasSensitiveTools = true;
|
|
1790
|
+
break;
|
|
1791
|
+
}
|
|
1792
|
+
}
|
|
1793
|
+
}
|
|
1794
|
+
}
|
|
1795
|
+
findings.push({
|
|
1796
|
+
checkId: 'MCP-009',
|
|
1797
|
+
name: 'Sensitive MCP Tools',
|
|
1798
|
+
description: 'MCP configuration includes potentially dangerous tools',
|
|
1799
|
+
category: 'mcp',
|
|
1800
|
+
severity: 'high',
|
|
1801
|
+
passed: !hasSensitiveTools,
|
|
1802
|
+
message: hasSensitiveTools
|
|
1803
|
+
? 'Sensitive tool names detected (shell, exec, eval) - ensure proper restrictions'
|
|
1804
|
+
: 'No obviously sensitive tool names in MCP config',
|
|
1805
|
+
fixable: false,
|
|
1806
|
+
});
|
|
1807
|
+
// MCP-010: Check for logging configuration
|
|
1808
|
+
let hasLogging = false;
|
|
1809
|
+
if (mcpConfig?.servers) {
|
|
1810
|
+
for (const [, server] of Object.entries(mcpConfig.servers)) {
|
|
1811
|
+
if (server.args?.some((arg) => arg.includes('log') || arg.includes('verbose'))) {
|
|
1812
|
+
hasLogging = true;
|
|
1813
|
+
break;
|
|
1814
|
+
}
|
|
1815
|
+
}
|
|
1816
|
+
}
|
|
1817
|
+
findings.push({
|
|
1818
|
+
checkId: 'MCP-010',
|
|
1819
|
+
name: 'MCP Logging',
|
|
1820
|
+
description: 'MCP server logging configuration',
|
|
1821
|
+
category: 'mcp',
|
|
1822
|
+
severity: 'low',
|
|
1823
|
+
passed: true, // Informational
|
|
1824
|
+
message: hasLogging
|
|
1825
|
+
? 'MCP logging appears to be configured - ensure sensitive data is not logged'
|
|
1826
|
+
: 'No explicit MCP logging configuration detected',
|
|
1827
|
+
fixable: false,
|
|
1828
|
+
});
|
|
1829
|
+
return findings;
|
|
1830
|
+
}
|
|
1831
|
+
async checkNetworkExtended(targetDir, autoFix) {
|
|
1832
|
+
const findings = [];
|
|
1833
|
+
// NET-003: Check for HTTPS enforcement
|
|
1834
|
+
let hasHttpsEnforcement = false;
|
|
1835
|
+
try {
|
|
1836
|
+
const files = await fs.readdir(targetDir);
|
|
1837
|
+
for (const file of files) {
|
|
1838
|
+
if (file.endsWith('.ts') || file.endsWith('.js')) {
|
|
1839
|
+
try {
|
|
1840
|
+
const content = await fs.readFile(path.join(targetDir, file), 'utf-8');
|
|
1841
|
+
if (content.includes('https') || content.includes('SSL') || content.includes('TLS')) {
|
|
1842
|
+
hasHttpsEnforcement = true;
|
|
1843
|
+
break;
|
|
1844
|
+
}
|
|
1845
|
+
}
|
|
1846
|
+
catch { }
|
|
1847
|
+
}
|
|
1848
|
+
}
|
|
1849
|
+
}
|
|
1850
|
+
catch { }
|
|
1851
|
+
findings.push({
|
|
1852
|
+
checkId: 'NET-003',
|
|
1853
|
+
name: 'HTTPS Configuration',
|
|
1854
|
+
description: 'No HTTPS/TLS configuration detected',
|
|
1855
|
+
category: 'network',
|
|
1856
|
+
severity: 'high',
|
|
1857
|
+
passed: hasHttpsEnforcement,
|
|
1858
|
+
message: hasHttpsEnforcement
|
|
1859
|
+
? 'HTTPS/TLS configuration detected'
|
|
1860
|
+
: 'No HTTPS configuration found - ensure production uses TLS',
|
|
1861
|
+
fixable: false,
|
|
1862
|
+
});
|
|
1863
|
+
// NET-004: Check for exposed debug endpoints
|
|
1864
|
+
let hasDebugEndpoints = false;
|
|
1865
|
+
const debugEndpoints = ['/debug', '/admin', '/metrics', '/health', '/status', '/__debug'];
|
|
1866
|
+
try {
|
|
1867
|
+
const files = await fs.readdir(targetDir);
|
|
1868
|
+
for (const file of files) {
|
|
1869
|
+
if (file.endsWith('.ts') || file.endsWith('.js')) {
|
|
1870
|
+
try {
|
|
1871
|
+
const content = await fs.readFile(path.join(targetDir, file), 'utf-8');
|
|
1872
|
+
for (const endpoint of debugEndpoints) {
|
|
1873
|
+
if (content.includes(endpoint)) {
|
|
1874
|
+
hasDebugEndpoints = true;
|
|
1875
|
+
break;
|
|
1876
|
+
}
|
|
1877
|
+
}
|
|
1878
|
+
}
|
|
1879
|
+
catch { }
|
|
1880
|
+
}
|
|
1881
|
+
}
|
|
1882
|
+
}
|
|
1883
|
+
catch { }
|
|
1884
|
+
findings.push({
|
|
1885
|
+
checkId: 'NET-004',
|
|
1886
|
+
name: 'Debug Endpoints',
|
|
1887
|
+
description: 'Debug or admin endpoints may be exposed',
|
|
1888
|
+
category: 'network',
|
|
1889
|
+
severity: 'medium',
|
|
1890
|
+
passed: !hasDebugEndpoints,
|
|
1891
|
+
message: hasDebugEndpoints
|
|
1892
|
+
? 'Debug/admin endpoints detected - ensure they are protected or disabled in production'
|
|
1893
|
+
: 'No obvious debug endpoints found',
|
|
1894
|
+
fixable: false,
|
|
1895
|
+
});
|
|
1896
|
+
// NET-005: Check for WebSocket security
|
|
1897
|
+
let hasWebsocket = false;
|
|
1898
|
+
let hasWsAuth = false;
|
|
1899
|
+
try {
|
|
1900
|
+
const pkgJson = await fs.readFile(path.join(targetDir, 'package.json'), 'utf-8');
|
|
1901
|
+
hasWebsocket = pkgJson.includes('ws') || pkgJson.includes('socket.io') || pkgJson.includes('websocket');
|
|
1902
|
+
if (hasWebsocket) {
|
|
1903
|
+
const files = await fs.readdir(targetDir);
|
|
1904
|
+
for (const file of files) {
|
|
1905
|
+
if (file.endsWith('.ts') || file.endsWith('.js')) {
|
|
1906
|
+
try {
|
|
1907
|
+
const content = await fs.readFile(path.join(targetDir, file), 'utf-8');
|
|
1908
|
+
if (content.includes('verifyClient') || content.includes('handleUpgrade') || (content.includes('connection') && content.includes('auth'))) {
|
|
1909
|
+
hasWsAuth = true;
|
|
1910
|
+
break;
|
|
1911
|
+
}
|
|
1912
|
+
}
|
|
1913
|
+
catch { }
|
|
1914
|
+
}
|
|
1915
|
+
}
|
|
1916
|
+
}
|
|
1917
|
+
}
|
|
1918
|
+
catch { }
|
|
1919
|
+
findings.push({
|
|
1920
|
+
checkId: 'NET-005',
|
|
1921
|
+
name: 'WebSocket Security',
|
|
1922
|
+
description: 'WebSocket connections may lack authentication',
|
|
1923
|
+
category: 'network',
|
|
1924
|
+
severity: 'high',
|
|
1925
|
+
passed: !hasWebsocket || hasWsAuth,
|
|
1926
|
+
message: !hasWebsocket
|
|
1927
|
+
? 'No WebSocket usage detected'
|
|
1928
|
+
: hasWsAuth
|
|
1929
|
+
? 'WebSocket authentication detected'
|
|
1930
|
+
: 'WebSocket without obvious authentication - ensure connections are verified',
|
|
1931
|
+
fixable: false,
|
|
1932
|
+
});
|
|
1933
|
+
// NET-006: Check for proxy configuration
|
|
1934
|
+
let hasProxyConfig = false;
|
|
1935
|
+
try {
|
|
1936
|
+
const pkgJson = await fs.readFile(path.join(targetDir, 'package.json'), 'utf-8');
|
|
1937
|
+
hasProxyConfig = pkgJson.includes('http-proxy') || pkgJson.includes('express-http-proxy');
|
|
1938
|
+
}
|
|
1939
|
+
catch { }
|
|
1940
|
+
if (hasProxyConfig) {
|
|
1941
|
+
findings.push({
|
|
1942
|
+
checkId: 'NET-006',
|
|
1943
|
+
name: 'Proxy Configuration',
|
|
1944
|
+
description: 'HTTP proxy detected - ensure proper access controls',
|
|
1945
|
+
category: 'network',
|
|
1946
|
+
severity: 'medium',
|
|
1947
|
+
passed: true, // Informational
|
|
1948
|
+
message: 'HTTP proxy library detected - verify SSRF protections are in place',
|
|
1949
|
+
fixable: false,
|
|
1950
|
+
});
|
|
1951
|
+
}
|
|
1952
|
+
return findings;
|
|
1953
|
+
}
|
|
1954
|
+
async checkAPISecurity(targetDir, autoFix) {
|
|
1955
|
+
const findings = [];
|
|
1956
|
+
// API-001: Check for API versioning
|
|
1957
|
+
let hasApiVersioning = false;
|
|
1958
|
+
try {
|
|
1959
|
+
const files = await fs.readdir(targetDir);
|
|
1960
|
+
for (const file of files) {
|
|
1961
|
+
if (file.endsWith('.ts') || file.endsWith('.js')) {
|
|
1962
|
+
try {
|
|
1963
|
+
const content = await fs.readFile(path.join(targetDir, file), 'utf-8');
|
|
1964
|
+
if (content.includes('/api/v1') || content.includes('/api/v2') || content.includes('version')) {
|
|
1965
|
+
hasApiVersioning = true;
|
|
1966
|
+
break;
|
|
1967
|
+
}
|
|
1968
|
+
}
|
|
1969
|
+
catch { }
|
|
1970
|
+
}
|
|
1971
|
+
}
|
|
1972
|
+
}
|
|
1973
|
+
catch { }
|
|
1974
|
+
findings.push({
|
|
1975
|
+
checkId: 'API-001',
|
|
1976
|
+
name: 'API Versioning',
|
|
1977
|
+
description: 'API versioning not detected',
|
|
1978
|
+
category: 'api',
|
|
1979
|
+
severity: 'low',
|
|
1980
|
+
passed: hasApiVersioning,
|
|
1981
|
+
message: hasApiVersioning
|
|
1982
|
+
? 'API versioning pattern detected'
|
|
1983
|
+
: 'Consider implementing API versioning for backwards compatibility',
|
|
1984
|
+
fixable: false,
|
|
1985
|
+
});
|
|
1986
|
+
// API-002: Check for API documentation
|
|
1987
|
+
let hasApiDocs = false;
|
|
1988
|
+
try {
|
|
1989
|
+
const pkgJson = await fs.readFile(path.join(targetDir, 'package.json'), 'utf-8');
|
|
1990
|
+
hasApiDocs = pkgJson.includes('swagger') || pkgJson.includes('openapi') || pkgJson.includes('@apidevtools');
|
|
1991
|
+
}
|
|
1992
|
+
catch { }
|
|
1993
|
+
findings.push({
|
|
1994
|
+
checkId: 'API-002',
|
|
1995
|
+
name: 'API Documentation',
|
|
1996
|
+
description: 'No API documentation library detected',
|
|
1997
|
+
category: 'api',
|
|
1998
|
+
severity: 'low',
|
|
1999
|
+
passed: hasApiDocs,
|
|
2000
|
+
message: hasApiDocs
|
|
2001
|
+
? 'API documentation library detected'
|
|
2002
|
+
: 'Consider adding OpenAPI/Swagger documentation',
|
|
2003
|
+
fixable: false,
|
|
2004
|
+
});
|
|
2005
|
+
// API-003: Check for API key in URL
|
|
2006
|
+
let hasKeyInUrl = false;
|
|
2007
|
+
try {
|
|
2008
|
+
const files = await fs.readdir(targetDir);
|
|
2009
|
+
for (const file of files) {
|
|
2010
|
+
if (file.endsWith('.ts') || file.endsWith('.js')) {
|
|
2011
|
+
try {
|
|
2012
|
+
const content = await fs.readFile(path.join(targetDir, file), 'utf-8');
|
|
2013
|
+
if (content.includes('apiKey=') || content.includes('api_key=') || content.includes('key=')) {
|
|
2014
|
+
if (content.includes('query') || content.includes('req.query')) {
|
|
2015
|
+
hasKeyInUrl = true;
|
|
2016
|
+
break;
|
|
2017
|
+
}
|
|
2018
|
+
}
|
|
2019
|
+
}
|
|
2020
|
+
catch { }
|
|
2021
|
+
}
|
|
2022
|
+
}
|
|
2023
|
+
}
|
|
2024
|
+
catch { }
|
|
2025
|
+
findings.push({
|
|
2026
|
+
checkId: 'API-003',
|
|
2027
|
+
name: 'API Key in URL',
|
|
2028
|
+
description: 'API keys may be passed in URL query parameters',
|
|
2029
|
+
category: 'api',
|
|
2030
|
+
severity: 'high',
|
|
2031
|
+
passed: !hasKeyInUrl,
|
|
2032
|
+
message: hasKeyInUrl
|
|
2033
|
+
? 'API key in URL pattern detected - use headers instead'
|
|
2034
|
+
: 'No obvious API key in URL patterns found',
|
|
2035
|
+
fixable: false,
|
|
2036
|
+
});
|
|
2037
|
+
// API-004: Check for response headers security
|
|
2038
|
+
let hasSecurityHeaders = false;
|
|
2039
|
+
try {
|
|
2040
|
+
const files = await fs.readdir(targetDir);
|
|
2041
|
+
for (const file of files) {
|
|
2042
|
+
if (file.endsWith('.ts') || file.endsWith('.js')) {
|
|
2043
|
+
try {
|
|
2044
|
+
const content = await fs.readFile(path.join(targetDir, file), 'utf-8');
|
|
2045
|
+
if (content.includes('X-Content-Type-Options') || content.includes('X-Frame-Options') || content.includes('Content-Security-Policy')) {
|
|
2046
|
+
hasSecurityHeaders = true;
|
|
2047
|
+
break;
|
|
2048
|
+
}
|
|
2049
|
+
}
|
|
2050
|
+
catch { }
|
|
2051
|
+
}
|
|
2052
|
+
}
|
|
2053
|
+
}
|
|
2054
|
+
catch { }
|
|
2055
|
+
findings.push({
|
|
2056
|
+
checkId: 'API-004',
|
|
2057
|
+
name: 'API Security Headers',
|
|
2058
|
+
description: 'Security headers not explicitly set',
|
|
2059
|
+
category: 'api',
|
|
2060
|
+
severity: 'medium',
|
|
2061
|
+
passed: hasSecurityHeaders,
|
|
2062
|
+
message: hasSecurityHeaders
|
|
2063
|
+
? 'Security headers detected in responses'
|
|
2064
|
+
: 'Add security headers (X-Content-Type-Options, X-Frame-Options, CSP)',
|
|
2065
|
+
fixable: false,
|
|
2066
|
+
});
|
|
2067
|
+
return findings;
|
|
2068
|
+
}
|
|
2069
|
+
async checkSecretManagement(targetDir, autoFix) {
|
|
2070
|
+
const findings = [];
|
|
2071
|
+
// SEC-001: Check for secret management tools
|
|
2072
|
+
let hasSecretManager = false;
|
|
2073
|
+
try {
|
|
2074
|
+
const pkgJson = await fs.readFile(path.join(targetDir, 'package.json'), 'utf-8');
|
|
2075
|
+
hasSecretManager = pkgJson.includes('vault') || pkgJson.includes('aws-sdk') || pkgJson.includes('dotenv-vault') || pkgJson.includes('1password');
|
|
2076
|
+
}
|
|
2077
|
+
catch { }
|
|
2078
|
+
findings.push({
|
|
2079
|
+
checkId: 'SEC-001',
|
|
2080
|
+
name: 'Secret Management',
|
|
2081
|
+
description: 'No secret management tool detected',
|
|
2082
|
+
category: 'secrets',
|
|
2083
|
+
severity: 'medium',
|
|
2084
|
+
passed: hasSecretManager,
|
|
2085
|
+
message: hasSecretManager
|
|
2086
|
+
? 'Secret management capability detected'
|
|
2087
|
+
: 'Consider using a secret manager (Vault, AWS Secrets Manager, doppler)',
|
|
2088
|
+
fixable: false,
|
|
2089
|
+
});
|
|
2090
|
+
// SEC-002: Check for encryption library
|
|
2091
|
+
let hasEncryption = false;
|
|
2092
|
+
try {
|
|
2093
|
+
const pkgJson = await fs.readFile(path.join(targetDir, 'package.json'), 'utf-8');
|
|
2094
|
+
hasEncryption = pkgJson.includes('crypto') || pkgJson.includes('bcrypt') || pkgJson.includes('argon2') || pkgJson.includes('sodium');
|
|
2095
|
+
}
|
|
2096
|
+
catch { }
|
|
2097
|
+
findings.push({
|
|
2098
|
+
checkId: 'SEC-002',
|
|
2099
|
+
name: 'Encryption Library',
|
|
2100
|
+
description: 'No encryption library detected',
|
|
2101
|
+
category: 'secrets',
|
|
2102
|
+
severity: 'medium',
|
|
2103
|
+
passed: hasEncryption,
|
|
2104
|
+
message: hasEncryption
|
|
2105
|
+
? 'Encryption library detected'
|
|
2106
|
+
: 'Consider using encryption for sensitive data (bcrypt, argon2)',
|
|
2107
|
+
fixable: false,
|
|
2108
|
+
});
|
|
2109
|
+
// SEC-003: Check for key rotation support
|
|
2110
|
+
let hasKeyRotation = false;
|
|
2111
|
+
try {
|
|
2112
|
+
const files = await fs.readdir(targetDir);
|
|
2113
|
+
for (const file of files) {
|
|
2114
|
+
try {
|
|
2115
|
+
const content = await fs.readFile(path.join(targetDir, file), 'utf-8');
|
|
2116
|
+
if (content.includes('rotation') || content.includes('rotate') || content.includes('KEY_VERSION')) {
|
|
2117
|
+
hasKeyRotation = true;
|
|
2118
|
+
break;
|
|
2119
|
+
}
|
|
2120
|
+
}
|
|
2121
|
+
catch { }
|
|
2122
|
+
}
|
|
2123
|
+
}
|
|
2124
|
+
catch { }
|
|
2125
|
+
findings.push({
|
|
2126
|
+
checkId: 'SEC-003',
|
|
2127
|
+
name: 'Key Rotation Support',
|
|
2128
|
+
description: 'No key rotation mechanism detected',
|
|
2129
|
+
category: 'secrets',
|
|
2130
|
+
severity: 'low',
|
|
2131
|
+
passed: hasKeyRotation,
|
|
2132
|
+
message: hasKeyRotation
|
|
2133
|
+
? 'Key rotation support detected'
|
|
2134
|
+
: 'Consider implementing key rotation for long-lived secrets',
|
|
2135
|
+
fixable: false,
|
|
2136
|
+
});
|
|
2137
|
+
// SEC-004: Check for hardcoded connection strings
|
|
2138
|
+
let hasHardcodedConnStr = false;
|
|
2139
|
+
const connPatterns = ['mongodb://', 'postgres://', 'mysql://', 'redis://', 'amqp://'];
|
|
2140
|
+
try {
|
|
2141
|
+
const files = await fs.readdir(targetDir);
|
|
2142
|
+
for (const file of files) {
|
|
2143
|
+
if (file.endsWith('.ts') || file.endsWith('.js') || file.endsWith('.json')) {
|
|
2144
|
+
try {
|
|
2145
|
+
const content = await fs.readFile(path.join(targetDir, file), 'utf-8');
|
|
2146
|
+
for (const pattern of connPatterns) {
|
|
2147
|
+
if (content.includes(pattern) && !content.includes('${') && !content.includes('process.env')) {
|
|
2148
|
+
hasHardcodedConnStr = true;
|
|
2149
|
+
break;
|
|
2150
|
+
}
|
|
2151
|
+
}
|
|
2152
|
+
}
|
|
2153
|
+
catch { }
|
|
2154
|
+
}
|
|
2155
|
+
}
|
|
2156
|
+
}
|
|
2157
|
+
catch { }
|
|
2158
|
+
findings.push({
|
|
2159
|
+
checkId: 'SEC-004',
|
|
2160
|
+
name: 'Hardcoded Connection Strings',
|
|
2161
|
+
description: 'Connection strings may be hardcoded',
|
|
2162
|
+
category: 'secrets',
|
|
2163
|
+
severity: 'critical',
|
|
2164
|
+
passed: !hasHardcodedConnStr,
|
|
2165
|
+
message: hasHardcodedConnStr
|
|
2166
|
+
? 'Hardcoded connection strings detected - use environment variables'
|
|
2167
|
+
: 'No hardcoded connection strings found',
|
|
2168
|
+
fixable: false,
|
|
2169
|
+
});
|
|
2170
|
+
return findings;
|
|
2171
|
+
}
|
|
2172
|
+
async checkIOSecurity(targetDir, autoFix) {
|
|
2173
|
+
const findings = [];
|
|
2174
|
+
// IO-001: Check for file upload handling
|
|
2175
|
+
let hasFileUpload = false;
|
|
2176
|
+
let hasUploadSecurity = false;
|
|
2177
|
+
try {
|
|
2178
|
+
const pkgJson = await fs.readFile(path.join(targetDir, 'package.json'), 'utf-8');
|
|
2179
|
+
hasFileUpload = pkgJson.includes('multer') || pkgJson.includes('formidable') || pkgJson.includes('busboy');
|
|
2180
|
+
if (hasFileUpload) {
|
|
2181
|
+
const files = await fs.readdir(targetDir);
|
|
2182
|
+
for (const file of files) {
|
|
2183
|
+
if (file.endsWith('.ts') || file.endsWith('.js')) {
|
|
2184
|
+
try {
|
|
2185
|
+
const content = await fs.readFile(path.join(targetDir, file), 'utf-8');
|
|
2186
|
+
if (content.includes('fileFilter') || content.includes('limits') || content.includes('mimetype')) {
|
|
2187
|
+
hasUploadSecurity = true;
|
|
2188
|
+
break;
|
|
2189
|
+
}
|
|
2190
|
+
}
|
|
2191
|
+
catch { }
|
|
2192
|
+
}
|
|
2193
|
+
}
|
|
2194
|
+
}
|
|
2195
|
+
}
|
|
2196
|
+
catch { }
|
|
2197
|
+
findings.push({
|
|
2198
|
+
checkId: 'IO-001',
|
|
2199
|
+
name: 'File Upload Security',
|
|
2200
|
+
description: 'File upload without proper validation',
|
|
2201
|
+
category: 'io',
|
|
2202
|
+
severity: 'high',
|
|
2203
|
+
passed: !hasFileUpload || hasUploadSecurity,
|
|
2204
|
+
message: !hasFileUpload
|
|
2205
|
+
? 'No file upload handling detected'
|
|
2206
|
+
: hasUploadSecurity
|
|
2207
|
+
? 'File upload validation detected'
|
|
2208
|
+
: 'File upload without obvious validation - add file type/size limits',
|
|
2209
|
+
fixable: false,
|
|
2210
|
+
});
|
|
2211
|
+
// IO-002: Check for SQL/NoSQL injection protection
|
|
2212
|
+
let hasDbLibrary = false;
|
|
2213
|
+
let hasParameterization = false;
|
|
2214
|
+
try {
|
|
2215
|
+
const pkgJson = await fs.readFile(path.join(targetDir, 'package.json'), 'utf-8');
|
|
2216
|
+
hasDbLibrary = pkgJson.includes('pg') || pkgJson.includes('mysql') || pkgJson.includes('mongodb') || pkgJson.includes('prisma') || pkgJson.includes('sequelize');
|
|
2217
|
+
if (hasDbLibrary) {
|
|
2218
|
+
// ORMs and query builders generally handle parameterization
|
|
2219
|
+
hasParameterization = pkgJson.includes('prisma') || pkgJson.includes('sequelize') || pkgJson.includes('typeorm') || pkgJson.includes('knex');
|
|
2220
|
+
}
|
|
2221
|
+
}
|
|
2222
|
+
catch { }
|
|
2223
|
+
findings.push({
|
|
2224
|
+
checkId: 'IO-002',
|
|
2225
|
+
name: 'Query Parameterization',
|
|
2226
|
+
description: 'Database queries may be vulnerable to injection',
|
|
2227
|
+
category: 'io',
|
|
2228
|
+
severity: 'critical',
|
|
2229
|
+
passed: !hasDbLibrary || hasParameterization,
|
|
2230
|
+
message: !hasDbLibrary
|
|
2231
|
+
? 'No database library detected'
|
|
2232
|
+
: hasParameterization
|
|
2233
|
+
? 'ORM/query builder detected - provides parameterization'
|
|
2234
|
+
: 'Raw database driver detected - ensure parameterized queries are used',
|
|
2235
|
+
fixable: false,
|
|
2236
|
+
});
|
|
2237
|
+
// IO-003: Check for XSS protection
|
|
2238
|
+
let hasXssProtection = false;
|
|
2239
|
+
try {
|
|
2240
|
+
const pkgJson = await fs.readFile(path.join(targetDir, 'package.json'), 'utf-8');
|
|
2241
|
+
hasXssProtection = pkgJson.includes('xss') || pkgJson.includes('sanitize') || pkgJson.includes('DOMPurify') || pkgJson.includes('helmet');
|
|
2242
|
+
}
|
|
2243
|
+
catch { }
|
|
2244
|
+
findings.push({
|
|
2245
|
+
checkId: 'IO-003',
|
|
2246
|
+
name: 'XSS Protection',
|
|
2247
|
+
description: 'No XSS protection library detected',
|
|
2248
|
+
category: 'io',
|
|
2249
|
+
severity: 'high',
|
|
2250
|
+
passed: hasXssProtection,
|
|
2251
|
+
message: hasXssProtection
|
|
2252
|
+
? 'XSS protection library detected'
|
|
2253
|
+
: 'No XSS protection library found - sanitize user input before rendering',
|
|
2254
|
+
fixable: false,
|
|
2255
|
+
});
|
|
2256
|
+
// IO-004: Check for path traversal protection
|
|
2257
|
+
let hasPathTraversal = false;
|
|
2258
|
+
try {
|
|
2259
|
+
const files = await fs.readdir(targetDir);
|
|
2260
|
+
for (const file of files) {
|
|
2261
|
+
if (file.endsWith('.ts') || file.endsWith('.js')) {
|
|
2262
|
+
try {
|
|
2263
|
+
const content = await fs.readFile(path.join(targetDir, file), 'utf-8');
|
|
2264
|
+
// Check for dangerous patterns
|
|
2265
|
+
if (content.includes('req.params') && content.includes('readFile')) {
|
|
2266
|
+
if (!content.includes('path.normalize') && !content.includes('path.resolve')) {
|
|
2267
|
+
hasPathTraversal = true;
|
|
2268
|
+
}
|
|
2269
|
+
}
|
|
2270
|
+
}
|
|
2271
|
+
catch { }
|
|
2272
|
+
}
|
|
2273
|
+
}
|
|
2274
|
+
}
|
|
2275
|
+
catch { }
|
|
2276
|
+
findings.push({
|
|
2277
|
+
checkId: 'IO-004',
|
|
2278
|
+
name: 'Path Traversal Protection',
|
|
2279
|
+
description: 'Potential path traversal vulnerability',
|
|
2280
|
+
category: 'io',
|
|
2281
|
+
severity: 'high',
|
|
2282
|
+
passed: !hasPathTraversal,
|
|
2283
|
+
message: hasPathTraversal
|
|
2284
|
+
? 'Potential path traversal detected - use path.resolve/normalize'
|
|
2285
|
+
: 'No obvious path traversal vulnerabilities found',
|
|
2286
|
+
fixable: false,
|
|
2287
|
+
});
|
|
2288
|
+
return findings;
|
|
2289
|
+
}
|
|
2290
|
+
/**
|
|
2291
|
+
* Prompt injection defense checks
|
|
2292
|
+
*/
|
|
2293
|
+
async checkPromptSecurity(targetDir, autoFix) {
|
|
2294
|
+
const findings = [];
|
|
2295
|
+
// PROMPT-001: Check for system prompt boundary markers
|
|
2296
|
+
let hasPromptBoundaries = false;
|
|
2297
|
+
const claudeMdPath = path.join(targetDir, 'CLAUDE.md');
|
|
2298
|
+
try {
|
|
2299
|
+
const content = await fs.readFile(claudeMdPath, 'utf-8');
|
|
2300
|
+
hasPromptBoundaries =
|
|
2301
|
+
content.includes('SYSTEM:') ||
|
|
2302
|
+
content.includes('USER:') ||
|
|
2303
|
+
content.includes('---') ||
|
|
2304
|
+
content.includes('###') ||
|
|
2305
|
+
content.toLowerCase().includes('do not follow instructions') ||
|
|
2306
|
+
content.toLowerCase().includes('ignore attempts to');
|
|
2307
|
+
}
|
|
2308
|
+
catch { }
|
|
2309
|
+
findings.push({
|
|
2310
|
+
checkId: 'PROMPT-001',
|
|
2311
|
+
name: 'Prompt Boundary Markers',
|
|
2312
|
+
description: 'System prompts should have clear boundary markers to prevent injection',
|
|
2313
|
+
category: 'prompt-security',
|
|
2314
|
+
severity: 'high',
|
|
2315
|
+
passed: hasPromptBoundaries,
|
|
2316
|
+
message: hasPromptBoundaries
|
|
2317
|
+
? 'Prompt boundaries detected in CLAUDE.md'
|
|
2318
|
+
: 'Consider adding prompt boundary markers to prevent injection attacks',
|
|
2319
|
+
fixable: false,
|
|
2320
|
+
});
|
|
2321
|
+
// PROMPT-002: Check for injection defense instructions
|
|
2322
|
+
let hasInjectionDefense = false;
|
|
2323
|
+
try {
|
|
2324
|
+
const content = await fs.readFile(claudeMdPath, 'utf-8');
|
|
2325
|
+
hasInjectionDefense =
|
|
2326
|
+
content.toLowerCase().includes('injection') ||
|
|
2327
|
+
content.toLowerCase().includes('malicious') ||
|
|
2328
|
+
content.toLowerCase().includes('untrusted') ||
|
|
2329
|
+
content.toLowerCase().includes('sanitize') ||
|
|
2330
|
+
content.toLowerCase().includes('validate input');
|
|
2331
|
+
}
|
|
2332
|
+
catch { }
|
|
2333
|
+
findings.push({
|
|
2334
|
+
checkId: 'PROMPT-002',
|
|
2335
|
+
name: 'Injection Defense Instructions',
|
|
2336
|
+
description: 'System prompts should include injection defense guidance',
|
|
2337
|
+
category: 'prompt-security',
|
|
2338
|
+
severity: 'medium',
|
|
2339
|
+
passed: hasInjectionDefense,
|
|
2340
|
+
message: hasInjectionDefense
|
|
2341
|
+
? 'Injection defense instructions found'
|
|
2342
|
+
: 'Consider adding injection defense instructions to system prompts',
|
|
2343
|
+
fixable: false,
|
|
2344
|
+
});
|
|
2345
|
+
// PROMPT-003: Check for output constraints
|
|
2346
|
+
let hasOutputConstraints = false;
|
|
2347
|
+
try {
|
|
2348
|
+
const content = await fs.readFile(claudeMdPath, 'utf-8');
|
|
2349
|
+
hasOutputConstraints =
|
|
2350
|
+
content.toLowerCase().includes('never output') ||
|
|
2351
|
+
content.toLowerCase().includes('do not reveal') ||
|
|
2352
|
+
content.toLowerCase().includes('do not disclose') ||
|
|
2353
|
+
content.toLowerCase().includes('keep confidential') ||
|
|
2354
|
+
content.toLowerCase().includes('do not share');
|
|
2355
|
+
}
|
|
2356
|
+
catch { }
|
|
2357
|
+
findings.push({
|
|
2358
|
+
checkId: 'PROMPT-003',
|
|
2359
|
+
name: 'Output Confidentiality Rules',
|
|
2360
|
+
description: 'System prompts should define output confidentiality constraints',
|
|
2361
|
+
category: 'prompt-security',
|
|
2362
|
+
severity: 'medium',
|
|
2363
|
+
passed: hasOutputConstraints,
|
|
2364
|
+
message: hasOutputConstraints
|
|
2365
|
+
? 'Output confidentiality rules defined'
|
|
2366
|
+
: 'Consider defining what information should not be disclosed',
|
|
2367
|
+
fixable: false,
|
|
2368
|
+
});
|
|
2369
|
+
// PROMPT-004: Check for role confusion protection
|
|
2370
|
+
let hasRoleProtection = false;
|
|
2371
|
+
try {
|
|
2372
|
+
const content = await fs.readFile(claudeMdPath, 'utf-8');
|
|
2373
|
+
hasRoleProtection =
|
|
2374
|
+
content.toLowerCase().includes('you are') ||
|
|
2375
|
+
content.toLowerCase().includes('your role') ||
|
|
2376
|
+
content.toLowerCase().includes('as an assistant') ||
|
|
2377
|
+
content.toLowerCase().includes('maintain your role');
|
|
2378
|
+
}
|
|
2379
|
+
catch { }
|
|
2380
|
+
findings.push({
|
|
2381
|
+
checkId: 'PROMPT-004',
|
|
2382
|
+
name: 'Role Definition Protection',
|
|
2383
|
+
description: 'System prompts should clearly define the AI role to prevent confusion attacks',
|
|
2384
|
+
category: 'prompt-security',
|
|
2385
|
+
severity: 'low',
|
|
2386
|
+
passed: hasRoleProtection,
|
|
2387
|
+
message: hasRoleProtection
|
|
2388
|
+
? 'Role definition found in prompts'
|
|
2389
|
+
: 'Consider clearly defining the AI role to prevent role confusion attacks',
|
|
2390
|
+
fixable: false,
|
|
2391
|
+
});
|
|
2392
|
+
return findings;
|
|
2393
|
+
}
|
|
2394
|
+
/**
|
|
2395
|
+
* Input validation and sanitization checks
|
|
2396
|
+
*/
|
|
2397
|
+
async checkInputValidation(targetDir, autoFix) {
|
|
2398
|
+
const findings = [];
|
|
2399
|
+
// INJ-001: Check for input validation in MCP handlers
|
|
2400
|
+
let hasInputValidation = false;
|
|
2401
|
+
try {
|
|
2402
|
+
const files = await fs.readdir(targetDir);
|
|
2403
|
+
for (const file of files) {
|
|
2404
|
+
if (file.endsWith('.ts') || file.endsWith('.js')) {
|
|
2405
|
+
try {
|
|
2406
|
+
const content = await fs.readFile(path.join(targetDir, file), 'utf-8');
|
|
2407
|
+
if (content.includes('zod') ||
|
|
2408
|
+
content.includes('joi') ||
|
|
2409
|
+
content.includes('yup') ||
|
|
2410
|
+
content.includes('validate(') ||
|
|
2411
|
+
content.includes('sanitize(') ||
|
|
2412
|
+
content.includes('schema.')) {
|
|
2413
|
+
hasInputValidation = true;
|
|
2414
|
+
break;
|
|
2415
|
+
}
|
|
2416
|
+
}
|
|
2417
|
+
catch { }
|
|
2418
|
+
}
|
|
2419
|
+
}
|
|
2420
|
+
}
|
|
2421
|
+
catch { }
|
|
2422
|
+
findings.push({
|
|
2423
|
+
checkId: 'INJ-001',
|
|
2424
|
+
name: 'Input Validation Library',
|
|
2425
|
+
description: 'Applications should use schema validation for inputs',
|
|
2426
|
+
category: 'input-validation',
|
|
2427
|
+
severity: 'high',
|
|
2428
|
+
passed: hasInputValidation,
|
|
2429
|
+
message: hasInputValidation
|
|
2430
|
+
? 'Input validation library detected'
|
|
2431
|
+
: 'Consider using zod, joi, or similar for input validation',
|
|
2432
|
+
fixable: false,
|
|
2433
|
+
});
|
|
2434
|
+
// INJ-002: Check for XSS protection patterns
|
|
2435
|
+
let hasXssProtection = false;
|
|
2436
|
+
try {
|
|
2437
|
+
const files = await fs.readdir(targetDir);
|
|
2438
|
+
for (const file of files) {
|
|
2439
|
+
if (file.endsWith('.ts') || file.endsWith('.js')) {
|
|
2440
|
+
try {
|
|
2441
|
+
const content = await fs.readFile(path.join(targetDir, file), 'utf-8');
|
|
2442
|
+
if (content.includes('escapeHtml') ||
|
|
2443
|
+
content.includes('sanitizeHtml') ||
|
|
2444
|
+
content.includes('DOMPurify') ||
|
|
2445
|
+
content.includes('xss(') ||
|
|
2446
|
+
content.includes('encode(')) {
|
|
2447
|
+
hasXssProtection = true;
|
|
2448
|
+
break;
|
|
2449
|
+
}
|
|
2450
|
+
}
|
|
2451
|
+
catch { }
|
|
2452
|
+
}
|
|
2453
|
+
}
|
|
2454
|
+
}
|
|
2455
|
+
catch { }
|
|
2456
|
+
findings.push({
|
|
2457
|
+
checkId: 'INJ-002',
|
|
2458
|
+
name: 'XSS Protection',
|
|
2459
|
+
description: 'Output should be properly escaped to prevent XSS',
|
|
2460
|
+
category: 'input-validation',
|
|
2461
|
+
severity: 'high',
|
|
2462
|
+
passed: hasXssProtection,
|
|
2463
|
+
message: hasXssProtection
|
|
2464
|
+
? 'XSS protection patterns detected'
|
|
2465
|
+
: 'Consider implementing output escaping for user-facing content',
|
|
2466
|
+
fixable: false,
|
|
2467
|
+
});
|
|
2468
|
+
// INJ-003: Check for SQL injection protection
|
|
2469
|
+
let hasSqlProtection = false;
|
|
2470
|
+
try {
|
|
2471
|
+
const files = await fs.readdir(targetDir);
|
|
2472
|
+
for (const file of files) {
|
|
2473
|
+
if (file.endsWith('.ts') || file.endsWith('.js')) {
|
|
2474
|
+
try {
|
|
2475
|
+
const content = await fs.readFile(path.join(targetDir, file), 'utf-8');
|
|
2476
|
+
if (content.includes('parameterized') ||
|
|
2477
|
+
content.includes('prepared') ||
|
|
2478
|
+
content.includes('$1') ||
|
|
2479
|
+
content.includes('?') && content.includes('query(') ||
|
|
2480
|
+
content.includes('prisma') ||
|
|
2481
|
+
content.includes('knex') ||
|
|
2482
|
+
content.includes('sequelize')) {
|
|
2483
|
+
hasSqlProtection = true;
|
|
2484
|
+
break;
|
|
2485
|
+
}
|
|
2486
|
+
}
|
|
2487
|
+
catch { }
|
|
2488
|
+
}
|
|
2489
|
+
}
|
|
2490
|
+
}
|
|
2491
|
+
catch { }
|
|
2492
|
+
findings.push({
|
|
2493
|
+
checkId: 'INJ-003',
|
|
2494
|
+
name: 'SQL Injection Protection',
|
|
2495
|
+
description: 'Database queries should use parameterized statements',
|
|
2496
|
+
category: 'input-validation',
|
|
2497
|
+
severity: 'critical',
|
|
2498
|
+
passed: hasSqlProtection,
|
|
2499
|
+
message: hasSqlProtection
|
|
2500
|
+
? 'Parameterized queries or ORM detected'
|
|
2501
|
+
: 'Ensure all database queries use parameterized statements',
|
|
2502
|
+
fixable: false,
|
|
2503
|
+
});
|
|
2504
|
+
// INJ-004: Check for command injection protection
|
|
2505
|
+
let hasCmdProtection = false;
|
|
2506
|
+
try {
|
|
2507
|
+
const files = await fs.readdir(targetDir);
|
|
2508
|
+
let hasExec = false;
|
|
2509
|
+
for (const file of files) {
|
|
2510
|
+
if (file.endsWith('.ts') || file.endsWith('.js')) {
|
|
2511
|
+
try {
|
|
2512
|
+
const content = await fs.readFile(path.join(targetDir, file), 'utf-8');
|
|
2513
|
+
if (content.includes('exec(') || content.includes('spawn(')) {
|
|
2514
|
+
hasExec = true;
|
|
2515
|
+
if (content.includes('execFile') ||
|
|
2516
|
+
content.includes('shell: false') ||
|
|
2517
|
+
content.includes('shellEscape') ||
|
|
2518
|
+
!content.includes('${')) {
|
|
2519
|
+
hasCmdProtection = true;
|
|
2520
|
+
}
|
|
2521
|
+
}
|
|
2522
|
+
}
|
|
2523
|
+
catch { }
|
|
2524
|
+
}
|
|
2525
|
+
}
|
|
2526
|
+
if (!hasExec)
|
|
2527
|
+
hasCmdProtection = true; // No exec calls found
|
|
2528
|
+
}
|
|
2529
|
+
catch {
|
|
2530
|
+
hasCmdProtection = true;
|
|
2531
|
+
}
|
|
2532
|
+
findings.push({
|
|
2533
|
+
checkId: 'INJ-004',
|
|
2534
|
+
name: 'Command Injection Protection',
|
|
2535
|
+
description: 'Shell commands should use safe execution patterns',
|
|
2536
|
+
category: 'input-validation',
|
|
2537
|
+
severity: 'critical',
|
|
2538
|
+
passed: hasCmdProtection,
|
|
2539
|
+
message: hasCmdProtection
|
|
2540
|
+
? 'Safe command execution patterns detected or no shell commands found'
|
|
2541
|
+
: 'Use execFile instead of exec, or disable shell interpolation',
|
|
2542
|
+
fixable: false,
|
|
2543
|
+
});
|
|
2544
|
+
return findings;
|
|
2545
|
+
}
|
|
2546
|
+
/**
|
|
2547
|
+
* Rate limiting and throttling checks
|
|
2548
|
+
*/
|
|
2549
|
+
async checkRateLimiting(targetDir, autoFix) {
|
|
2550
|
+
const findings = [];
|
|
2551
|
+
// RATE-001: Check for rate limiting configuration
|
|
2552
|
+
let hasRateLimiting = false;
|
|
2553
|
+
try {
|
|
2554
|
+
const pkgPath = path.join(targetDir, 'package.json');
|
|
2555
|
+
const content = await fs.readFile(pkgPath, 'utf-8');
|
|
2556
|
+
const pkg = JSON.parse(content);
|
|
2557
|
+
const deps = { ...pkg.dependencies, ...pkg.devDependencies };
|
|
2558
|
+
hasRateLimiting =
|
|
2559
|
+
'express-rate-limit' in deps ||
|
|
2560
|
+
'rate-limiter-flexible' in deps ||
|
|
2561
|
+
'bottleneck' in deps ||
|
|
2562
|
+
'@upstash/ratelimit' in deps;
|
|
2563
|
+
}
|
|
2564
|
+
catch { }
|
|
2565
|
+
findings.push({
|
|
2566
|
+
checkId: 'RATE-001',
|
|
2567
|
+
name: 'Rate Limiting Configuration',
|
|
2568
|
+
description: 'API endpoints should have rate limiting',
|
|
2569
|
+
category: 'rate-limiting',
|
|
2570
|
+
severity: 'medium',
|
|
2571
|
+
passed: hasRateLimiting,
|
|
2572
|
+
message: hasRateLimiting
|
|
2573
|
+
? 'Rate limiting library detected'
|
|
2574
|
+
: 'Consider implementing rate limiting to prevent abuse',
|
|
2575
|
+
fixable: false,
|
|
2576
|
+
});
|
|
2577
|
+
// RATE-002: Check for retry/backoff patterns
|
|
2578
|
+
let hasBackoff = false;
|
|
2579
|
+
try {
|
|
2580
|
+
const files = await fs.readdir(targetDir);
|
|
2581
|
+
for (const file of files) {
|
|
2582
|
+
if (file.endsWith('.ts') || file.endsWith('.js')) {
|
|
2583
|
+
try {
|
|
2584
|
+
const content = await fs.readFile(path.join(targetDir, file), 'utf-8');
|
|
2585
|
+
if (content.includes('retry') ||
|
|
2586
|
+
content.includes('backoff') ||
|
|
2587
|
+
content.includes('exponential') ||
|
|
2588
|
+
content.includes('p-retry')) {
|
|
2589
|
+
hasBackoff = true;
|
|
2590
|
+
break;
|
|
2591
|
+
}
|
|
2592
|
+
}
|
|
2593
|
+
catch { }
|
|
2594
|
+
}
|
|
2595
|
+
}
|
|
2596
|
+
}
|
|
2597
|
+
catch { }
|
|
2598
|
+
findings.push({
|
|
2599
|
+
checkId: 'RATE-002',
|
|
2600
|
+
name: 'Retry with Backoff',
|
|
2601
|
+
description: 'External calls should implement exponential backoff',
|
|
2602
|
+
category: 'rate-limiting',
|
|
2603
|
+
severity: 'low',
|
|
2604
|
+
passed: hasBackoff,
|
|
2605
|
+
message: hasBackoff
|
|
2606
|
+
? 'Retry/backoff patterns detected'
|
|
2607
|
+
: 'Consider implementing exponential backoff for external calls',
|
|
2608
|
+
fixable: false,
|
|
2609
|
+
});
|
|
2610
|
+
// RATE-003: Check for timeout configurations
|
|
2611
|
+
let hasTimeouts = false;
|
|
2612
|
+
try {
|
|
2613
|
+
const files = await fs.readdir(targetDir);
|
|
2614
|
+
for (const file of files) {
|
|
2615
|
+
if (file.endsWith('.ts') || file.endsWith('.js') || file.endsWith('.json')) {
|
|
2616
|
+
try {
|
|
2617
|
+
const content = await fs.readFile(path.join(targetDir, file), 'utf-8');
|
|
2618
|
+
if (content.includes('timeout') ||
|
|
2619
|
+
content.includes('Timeout') ||
|
|
2620
|
+
content.includes('TIMEOUT')) {
|
|
2621
|
+
hasTimeouts = true;
|
|
2622
|
+
break;
|
|
2623
|
+
}
|
|
2624
|
+
}
|
|
2625
|
+
catch { }
|
|
2626
|
+
}
|
|
2627
|
+
}
|
|
2628
|
+
}
|
|
2629
|
+
catch { }
|
|
2630
|
+
findings.push({
|
|
2631
|
+
checkId: 'RATE-003',
|
|
2632
|
+
name: 'Timeout Configuration',
|
|
2633
|
+
description: 'Operations should have appropriate timeouts',
|
|
2634
|
+
category: 'rate-limiting',
|
|
2635
|
+
severity: 'medium',
|
|
2636
|
+
passed: hasTimeouts,
|
|
2637
|
+
message: hasTimeouts
|
|
2638
|
+
? 'Timeout configurations detected'
|
|
2639
|
+
: 'Consider setting timeouts for external calls and long-running operations',
|
|
2640
|
+
fixable: false,
|
|
2641
|
+
});
|
|
2642
|
+
// RATE-004: Check for concurrent request limiting
|
|
2643
|
+
let hasConcurrencyLimit = false;
|
|
2644
|
+
try {
|
|
2645
|
+
const files = await fs.readdir(targetDir);
|
|
2646
|
+
for (const file of files) {
|
|
2647
|
+
if (file.endsWith('.ts') || file.endsWith('.js')) {
|
|
2648
|
+
try {
|
|
2649
|
+
const content = await fs.readFile(path.join(targetDir, file), 'utf-8');
|
|
2650
|
+
if (content.includes('p-limit') ||
|
|
2651
|
+
content.includes('semaphore') ||
|
|
2652
|
+
content.includes('concurrency') ||
|
|
2653
|
+
content.includes('maxConcurrent')) {
|
|
2654
|
+
hasConcurrencyLimit = true;
|
|
2655
|
+
break;
|
|
2656
|
+
}
|
|
2657
|
+
}
|
|
2658
|
+
catch { }
|
|
2659
|
+
}
|
|
2660
|
+
}
|
|
2661
|
+
}
|
|
2662
|
+
catch { }
|
|
2663
|
+
findings.push({
|
|
2664
|
+
checkId: 'RATE-004',
|
|
2665
|
+
name: 'Concurrency Limits',
|
|
2666
|
+
description: 'Concurrent operations should be limited',
|
|
2667
|
+
category: 'rate-limiting',
|
|
2668
|
+
severity: 'low',
|
|
2669
|
+
passed: hasConcurrencyLimit,
|
|
2670
|
+
message: hasConcurrencyLimit
|
|
2671
|
+
? 'Concurrency limiting detected'
|
|
2672
|
+
: 'Consider limiting concurrent operations to prevent resource exhaustion',
|
|
2673
|
+
fixable: false,
|
|
2674
|
+
});
|
|
2675
|
+
return findings;
|
|
2676
|
+
}
|
|
2677
|
+
/**
|
|
2678
|
+
* Session and timeout security checks
|
|
2679
|
+
*/
|
|
2680
|
+
async checkSessionSecurity(targetDir, autoFix) {
|
|
2681
|
+
const findings = [];
|
|
2682
|
+
// SESSION-001: Check for secure session configuration
|
|
2683
|
+
let hasSecureSessions = false;
|
|
2684
|
+
try {
|
|
2685
|
+
const files = await fs.readdir(targetDir);
|
|
2686
|
+
for (const file of files) {
|
|
2687
|
+
if (file.endsWith('.ts') || file.endsWith('.js')) {
|
|
2688
|
+
try {
|
|
2689
|
+
const content = await fs.readFile(path.join(targetDir, file), 'utf-8');
|
|
2690
|
+
if (content.includes('httpOnly') ||
|
|
2691
|
+
content.includes('secure: true') ||
|
|
2692
|
+
content.includes('sameSite')) {
|
|
2693
|
+
hasSecureSessions = true;
|
|
2694
|
+
break;
|
|
2695
|
+
}
|
|
2696
|
+
}
|
|
2697
|
+
catch { }
|
|
2698
|
+
}
|
|
2699
|
+
}
|
|
2700
|
+
}
|
|
2701
|
+
catch { }
|
|
2702
|
+
findings.push({
|
|
2703
|
+
checkId: 'SESSION-001',
|
|
2704
|
+
name: 'Secure Cookie Settings',
|
|
2705
|
+
description: 'Session cookies should have secure flags',
|
|
2706
|
+
category: 'session-security',
|
|
2707
|
+
severity: 'high',
|
|
2708
|
+
passed: hasSecureSessions,
|
|
2709
|
+
message: hasSecureSessions
|
|
2710
|
+
? 'Secure cookie flags detected'
|
|
2711
|
+
: 'Set httpOnly, secure, and sameSite on session cookies',
|
|
2712
|
+
fixable: false,
|
|
2713
|
+
});
|
|
2714
|
+
// SESSION-002: Check for session expiry
|
|
2715
|
+
let hasSessionExpiry = false;
|
|
2716
|
+
try {
|
|
2717
|
+
const files = await fs.readdir(targetDir);
|
|
2718
|
+
for (const file of files) {
|
|
2719
|
+
if (file.endsWith('.ts') || file.endsWith('.js') || file.endsWith('.json')) {
|
|
2720
|
+
try {
|
|
2721
|
+
const content = await fs.readFile(path.join(targetDir, file), 'utf-8');
|
|
2722
|
+
if (content.includes('maxAge') ||
|
|
2723
|
+
content.includes('expiresIn') ||
|
|
2724
|
+
content.includes('ttl') ||
|
|
2725
|
+
content.includes('sessionTimeout')) {
|
|
2726
|
+
hasSessionExpiry = true;
|
|
2727
|
+
break;
|
|
2728
|
+
}
|
|
2729
|
+
}
|
|
2730
|
+
catch { }
|
|
2731
|
+
}
|
|
2732
|
+
}
|
|
2733
|
+
}
|
|
2734
|
+
catch { }
|
|
2735
|
+
findings.push({
|
|
2736
|
+
checkId: 'SESSION-002',
|
|
2737
|
+
name: 'Session Expiry',
|
|
2738
|
+
description: 'Sessions should have appropriate expiry times',
|
|
2739
|
+
category: 'session-security',
|
|
2740
|
+
severity: 'medium',
|
|
2741
|
+
passed: hasSessionExpiry,
|
|
2742
|
+
message: hasSessionExpiry
|
|
2743
|
+
? 'Session expiry configuration detected'
|
|
2744
|
+
: 'Configure appropriate session expiry times',
|
|
2745
|
+
fixable: false,
|
|
2746
|
+
});
|
|
2747
|
+
// SESSION-003: Check for CSRF protection
|
|
2748
|
+
let hasCsrfProtection = false;
|
|
2749
|
+
try {
|
|
2750
|
+
const pkgPath = path.join(targetDir, 'package.json');
|
|
2751
|
+
const content = await fs.readFile(pkgPath, 'utf-8');
|
|
2752
|
+
const pkg = JSON.parse(content);
|
|
2753
|
+
const deps = { ...pkg.dependencies, ...pkg.devDependencies };
|
|
2754
|
+
hasCsrfProtection = 'csurf' in deps || 'csrf' in deps || '@fastify/csrf-protection' in deps;
|
|
2755
|
+
}
|
|
2756
|
+
catch { }
|
|
2757
|
+
findings.push({
|
|
2758
|
+
checkId: 'SESSION-003',
|
|
2759
|
+
name: 'CSRF Protection',
|
|
2760
|
+
description: 'Forms should have CSRF protection',
|
|
2761
|
+
category: 'session-security',
|
|
2762
|
+
severity: 'high',
|
|
2763
|
+
passed: hasCsrfProtection,
|
|
2764
|
+
message: hasCsrfProtection
|
|
2765
|
+
? 'CSRF protection library detected'
|
|
2766
|
+
: 'Consider implementing CSRF protection for state-changing operations',
|
|
2767
|
+
fixable: false,
|
|
2768
|
+
});
|
|
2769
|
+
// SESSION-004: Check for secure token storage
|
|
2770
|
+
let hasSecureStorage = false;
|
|
2771
|
+
try {
|
|
2772
|
+
const files = await fs.readdir(targetDir);
|
|
2773
|
+
for (const file of files) {
|
|
2774
|
+
if (file.endsWith('.ts') || file.endsWith('.js')) {
|
|
2775
|
+
try {
|
|
2776
|
+
const content = await fs.readFile(path.join(targetDir, file), 'utf-8');
|
|
2777
|
+
if (content.includes('keytar') ||
|
|
2778
|
+
content.includes('secure-store') ||
|
|
2779
|
+
content.includes('keychain') ||
|
|
2780
|
+
content.includes('credential-store')) {
|
|
2781
|
+
hasSecureStorage = true;
|
|
2782
|
+
break;
|
|
2783
|
+
}
|
|
2784
|
+
}
|
|
2785
|
+
catch { }
|
|
2786
|
+
}
|
|
2787
|
+
}
|
|
2788
|
+
}
|
|
2789
|
+
catch { }
|
|
2790
|
+
findings.push({
|
|
2791
|
+
checkId: 'SESSION-004',
|
|
2792
|
+
name: 'Secure Token Storage',
|
|
2793
|
+
description: 'Tokens should be stored securely',
|
|
2794
|
+
category: 'session-security',
|
|
2795
|
+
severity: 'medium',
|
|
2796
|
+
passed: hasSecureStorage,
|
|
2797
|
+
message: hasSecureStorage
|
|
2798
|
+
? 'Secure token storage detected'
|
|
2799
|
+
: 'Consider using secure storage for sensitive tokens',
|
|
2800
|
+
fixable: false,
|
|
2801
|
+
});
|
|
2802
|
+
return findings;
|
|
2803
|
+
}
|
|
2804
|
+
/**
|
|
2805
|
+
* Data encryption checks
|
|
2806
|
+
*/
|
|
2807
|
+
async checkEncryption(targetDir, autoFix) {
|
|
2808
|
+
const findings = [];
|
|
2809
|
+
// ENCRYPT-001: Check for encryption at rest
|
|
2810
|
+
let hasEncryption = false;
|
|
2811
|
+
try {
|
|
2812
|
+
const files = await fs.readdir(targetDir);
|
|
2813
|
+
for (const file of files) {
|
|
2814
|
+
if (file.endsWith('.ts') || file.endsWith('.js')) {
|
|
2815
|
+
try {
|
|
2816
|
+
const content = await fs.readFile(path.join(targetDir, file), 'utf-8');
|
|
2817
|
+
if (content.includes('crypto') ||
|
|
2818
|
+
content.includes('encrypt') ||
|
|
2819
|
+
content.includes('aes-') ||
|
|
2820
|
+
content.includes('sodium')) {
|
|
2821
|
+
hasEncryption = true;
|
|
2822
|
+
break;
|
|
2823
|
+
}
|
|
2824
|
+
}
|
|
2825
|
+
catch { }
|
|
2826
|
+
}
|
|
2827
|
+
}
|
|
2828
|
+
}
|
|
2829
|
+
catch { }
|
|
2830
|
+
findings.push({
|
|
2831
|
+
checkId: 'ENCRYPT-001',
|
|
2832
|
+
name: 'Encryption Implementation',
|
|
2833
|
+
description: 'Sensitive data should be encrypted at rest',
|
|
2834
|
+
category: 'encryption',
|
|
2835
|
+
severity: 'high',
|
|
2836
|
+
passed: hasEncryption,
|
|
2837
|
+
message: hasEncryption
|
|
2838
|
+
? 'Encryption implementation detected'
|
|
2839
|
+
: 'Consider encrypting sensitive data at rest',
|
|
2840
|
+
fixable: false,
|
|
2841
|
+
});
|
|
2842
|
+
// ENCRYPT-002: Check for secure hashing
|
|
2843
|
+
let hasSecureHashing = false;
|
|
2844
|
+
try {
|
|
2845
|
+
const files = await fs.readdir(targetDir);
|
|
2846
|
+
for (const file of files) {
|
|
2847
|
+
if (file.endsWith('.ts') || file.endsWith('.js')) {
|
|
2848
|
+
try {
|
|
2849
|
+
const content = await fs.readFile(path.join(targetDir, file), 'utf-8');
|
|
2850
|
+
if (content.includes('bcrypt') ||
|
|
2851
|
+
content.includes('argon2') ||
|
|
2852
|
+
content.includes('scrypt') ||
|
|
2853
|
+
content.includes('pbkdf2')) {
|
|
2854
|
+
hasSecureHashing = true;
|
|
2855
|
+
break;
|
|
2856
|
+
}
|
|
2857
|
+
}
|
|
2858
|
+
catch { }
|
|
2859
|
+
}
|
|
2860
|
+
}
|
|
2861
|
+
}
|
|
2862
|
+
catch { }
|
|
2863
|
+
findings.push({
|
|
2864
|
+
checkId: 'ENCRYPT-002',
|
|
2865
|
+
name: 'Secure Password Hashing',
|
|
2866
|
+
description: 'Passwords should use secure hashing algorithms',
|
|
2867
|
+
category: 'encryption',
|
|
2868
|
+
severity: 'critical',
|
|
2869
|
+
passed: hasSecureHashing,
|
|
2870
|
+
message: hasSecureHashing
|
|
2871
|
+
? 'Secure hashing algorithm detected (bcrypt/argon2/scrypt)'
|
|
2872
|
+
: 'Use bcrypt, argon2, or scrypt for password hashing',
|
|
2873
|
+
fixable: false,
|
|
2874
|
+
});
|
|
2875
|
+
// ENCRYPT-003: Check for weak algorithms
|
|
2876
|
+
let hasWeakAlgorithms = false;
|
|
2877
|
+
try {
|
|
2878
|
+
const files = await fs.readdir(targetDir);
|
|
2879
|
+
for (const file of files) {
|
|
2880
|
+
if (file.endsWith('.ts') || file.endsWith('.js')) {
|
|
2881
|
+
try {
|
|
2882
|
+
const content = await fs.readFile(path.join(targetDir, file), 'utf-8');
|
|
2883
|
+
if (content.includes('md5') ||
|
|
2884
|
+
content.includes('sha1') ||
|
|
2885
|
+
content.includes("'des'") ||
|
|
2886
|
+
content.includes('"des"')) {
|
|
2887
|
+
hasWeakAlgorithms = true;
|
|
2888
|
+
break;
|
|
2889
|
+
}
|
|
2890
|
+
}
|
|
2891
|
+
catch { }
|
|
2892
|
+
}
|
|
2893
|
+
}
|
|
2894
|
+
}
|
|
2895
|
+
catch { }
|
|
2896
|
+
findings.push({
|
|
2897
|
+
checkId: 'ENCRYPT-003',
|
|
2898
|
+
name: 'Weak Cryptographic Algorithms',
|
|
2899
|
+
description: 'Avoid using weak cryptographic algorithms',
|
|
2900
|
+
category: 'encryption',
|
|
2901
|
+
severity: 'high',
|
|
2902
|
+
passed: !hasWeakAlgorithms,
|
|
2903
|
+
message: hasWeakAlgorithms
|
|
2904
|
+
? 'Weak algorithms detected (MD5/SHA1/DES) - use SHA-256+ and AES'
|
|
2905
|
+
: 'No weak cryptographic algorithms detected',
|
|
2906
|
+
fixable: false,
|
|
2907
|
+
});
|
|
2908
|
+
// ENCRYPT-004: Check for TLS configuration
|
|
2909
|
+
let hasTlsConfig = false;
|
|
2910
|
+
try {
|
|
2911
|
+
const files = await fs.readdir(targetDir);
|
|
2912
|
+
for (const file of files) {
|
|
2913
|
+
if (file.endsWith('.ts') || file.endsWith('.js') || file.endsWith('.json')) {
|
|
2914
|
+
try {
|
|
2915
|
+
const content = await fs.readFile(path.join(targetDir, file), 'utf-8');
|
|
2916
|
+
if (content.includes('https') ||
|
|
2917
|
+
content.includes('tls') ||
|
|
2918
|
+
content.includes('ssl') ||
|
|
2919
|
+
content.includes('rejectUnauthorized')) {
|
|
2920
|
+
hasTlsConfig = true;
|
|
2921
|
+
break;
|
|
2922
|
+
}
|
|
2923
|
+
}
|
|
2924
|
+
catch { }
|
|
2925
|
+
}
|
|
2926
|
+
}
|
|
2927
|
+
}
|
|
2928
|
+
catch { }
|
|
2929
|
+
findings.push({
|
|
2930
|
+
checkId: 'ENCRYPT-004',
|
|
2931
|
+
name: 'TLS Configuration',
|
|
2932
|
+
description: 'Communications should use TLS',
|
|
2933
|
+
category: 'encryption',
|
|
2934
|
+
severity: 'high',
|
|
2935
|
+
passed: hasTlsConfig,
|
|
2936
|
+
message: hasTlsConfig
|
|
2937
|
+
? 'TLS/HTTPS configuration detected'
|
|
2938
|
+
: 'Ensure all communications use TLS',
|
|
2939
|
+
fixable: false,
|
|
2940
|
+
});
|
|
2941
|
+
return findings;
|
|
2942
|
+
}
|
|
2943
|
+
/**
|
|
2944
|
+
* Audit trail and logging security checks
|
|
2945
|
+
*/
|
|
2946
|
+
async checkAuditTrail(targetDir, autoFix) {
|
|
2947
|
+
const findings = [];
|
|
2948
|
+
// AUDIT-001: Check for audit logging
|
|
2949
|
+
let hasAuditLogging = false;
|
|
2950
|
+
try {
|
|
2951
|
+
const files = await fs.readdir(targetDir);
|
|
2952
|
+
for (const file of files) {
|
|
2953
|
+
if (file.endsWith('.ts') || file.endsWith('.js')) {
|
|
2954
|
+
try {
|
|
2955
|
+
const content = await fs.readFile(path.join(targetDir, file), 'utf-8');
|
|
2956
|
+
if (content.includes('audit') ||
|
|
2957
|
+
content.includes('winston') ||
|
|
2958
|
+
content.includes('pino') ||
|
|
2959
|
+
content.includes('bunyan')) {
|
|
2960
|
+
hasAuditLogging = true;
|
|
2961
|
+
break;
|
|
2962
|
+
}
|
|
2963
|
+
}
|
|
2964
|
+
catch { }
|
|
2965
|
+
}
|
|
2966
|
+
}
|
|
2967
|
+
}
|
|
2968
|
+
catch { }
|
|
2969
|
+
findings.push({
|
|
2970
|
+
checkId: 'AUDIT-001',
|
|
2971
|
+
name: 'Audit Logging',
|
|
2972
|
+
description: 'Security-relevant events should be logged',
|
|
2973
|
+
category: 'audit',
|
|
2974
|
+
severity: 'medium',
|
|
2975
|
+
passed: hasAuditLogging,
|
|
2976
|
+
message: hasAuditLogging
|
|
2977
|
+
? 'Audit logging implementation detected'
|
|
2978
|
+
: 'Consider implementing audit logging for security events',
|
|
2979
|
+
fixable: false,
|
|
2980
|
+
});
|
|
2981
|
+
// AUDIT-002: Check for log rotation
|
|
2982
|
+
let hasLogRotation = false;
|
|
2983
|
+
try {
|
|
2984
|
+
const files = await fs.readdir(targetDir);
|
|
2985
|
+
for (const file of files) {
|
|
2986
|
+
if (file.endsWith('.ts') || file.endsWith('.js') || file.endsWith('.json')) {
|
|
2987
|
+
try {
|
|
2988
|
+
const content = await fs.readFile(path.join(targetDir, file), 'utf-8');
|
|
2989
|
+
if (content.includes('rotate') ||
|
|
2990
|
+
content.includes('maxFiles') ||
|
|
2991
|
+
content.includes('maxSize')) {
|
|
2992
|
+
hasLogRotation = true;
|
|
2993
|
+
break;
|
|
2994
|
+
}
|
|
2995
|
+
}
|
|
2996
|
+
catch { }
|
|
2997
|
+
}
|
|
2998
|
+
}
|
|
2999
|
+
}
|
|
3000
|
+
catch { }
|
|
3001
|
+
findings.push({
|
|
3002
|
+
checkId: 'AUDIT-002',
|
|
3003
|
+
name: 'Log Rotation',
|
|
3004
|
+
description: 'Logs should have rotation configured',
|
|
3005
|
+
category: 'audit',
|
|
3006
|
+
severity: 'low',
|
|
3007
|
+
passed: hasLogRotation,
|
|
3008
|
+
message: hasLogRotation
|
|
3009
|
+
? 'Log rotation configuration detected'
|
|
3010
|
+
: 'Consider configuring log rotation to manage disk space',
|
|
3011
|
+
fixable: false,
|
|
3012
|
+
});
|
|
3013
|
+
// AUDIT-003: Check for error tracking
|
|
3014
|
+
let hasErrorTracking = false;
|
|
3015
|
+
try {
|
|
3016
|
+
const pkgPath = path.join(targetDir, 'package.json');
|
|
3017
|
+
const content = await fs.readFile(pkgPath, 'utf-8');
|
|
3018
|
+
const pkg = JSON.parse(content);
|
|
3019
|
+
const deps = { ...pkg.dependencies, ...pkg.devDependencies };
|
|
3020
|
+
hasErrorTracking = '@sentry/node' in deps || 'bugsnag' in deps || 'rollbar' in deps;
|
|
3021
|
+
}
|
|
3022
|
+
catch { }
|
|
3023
|
+
findings.push({
|
|
3024
|
+
checkId: 'AUDIT-003',
|
|
3025
|
+
name: 'Error Tracking',
|
|
3026
|
+
description: 'Errors should be tracked for monitoring',
|
|
3027
|
+
category: 'audit',
|
|
3028
|
+
severity: 'low',
|
|
3029
|
+
passed: hasErrorTracking,
|
|
3030
|
+
message: hasErrorTracking
|
|
3031
|
+
? 'Error tracking service detected'
|
|
3032
|
+
: 'Consider using an error tracking service for production',
|
|
3033
|
+
fixable: false,
|
|
3034
|
+
});
|
|
3035
|
+
// AUDIT-004: Check for no sensitive data in logs
|
|
3036
|
+
let hasLogSanitization = false;
|
|
3037
|
+
try {
|
|
3038
|
+
const files = await fs.readdir(targetDir);
|
|
3039
|
+
for (const file of files) {
|
|
3040
|
+
if (file.endsWith('.ts') || file.endsWith('.js')) {
|
|
3041
|
+
try {
|
|
3042
|
+
const content = await fs.readFile(path.join(targetDir, file), 'utf-8');
|
|
3043
|
+
if (content.includes('redact') ||
|
|
3044
|
+
content.includes('mask') ||
|
|
3045
|
+
content.includes('sanitize')) {
|
|
3046
|
+
hasLogSanitization = true;
|
|
3047
|
+
break;
|
|
3048
|
+
}
|
|
3049
|
+
}
|
|
3050
|
+
catch { }
|
|
3051
|
+
}
|
|
3052
|
+
}
|
|
3053
|
+
}
|
|
3054
|
+
catch { }
|
|
3055
|
+
findings.push({
|
|
3056
|
+
checkId: 'AUDIT-004',
|
|
3057
|
+
name: 'Log Sanitization',
|
|
3058
|
+
description: 'Sensitive data should be redacted from logs',
|
|
3059
|
+
category: 'audit',
|
|
3060
|
+
severity: 'high',
|
|
3061
|
+
passed: hasLogSanitization,
|
|
3062
|
+
message: hasLogSanitization
|
|
3063
|
+
? 'Log sanitization patterns detected'
|
|
3064
|
+
: 'Consider redacting sensitive data (passwords, tokens) from logs',
|
|
3065
|
+
fixable: false,
|
|
3066
|
+
});
|
|
3067
|
+
return findings;
|
|
3068
|
+
}
|
|
3069
|
+
/**
|
|
3070
|
+
* Process isolation and sandboxing checks
|
|
3071
|
+
*/
|
|
3072
|
+
async checkSandboxing(targetDir, autoFix) {
|
|
3073
|
+
const findings = [];
|
|
3074
|
+
// SANDBOX-001: Check for Docker/container usage
|
|
3075
|
+
let hasContainerization = false;
|
|
3076
|
+
try {
|
|
3077
|
+
await fs.access(path.join(targetDir, 'Dockerfile'));
|
|
3078
|
+
hasContainerization = true;
|
|
3079
|
+
}
|
|
3080
|
+
catch { }
|
|
3081
|
+
try {
|
|
3082
|
+
await fs.access(path.join(targetDir, 'docker-compose.yml'));
|
|
3083
|
+
hasContainerization = true;
|
|
3084
|
+
}
|
|
3085
|
+
catch { }
|
|
3086
|
+
try {
|
|
3087
|
+
await fs.access(path.join(targetDir, 'docker-compose.yaml'));
|
|
3088
|
+
hasContainerization = true;
|
|
3089
|
+
}
|
|
3090
|
+
catch { }
|
|
3091
|
+
findings.push({
|
|
3092
|
+
checkId: 'SANDBOX-001',
|
|
3093
|
+
name: 'Container Isolation',
|
|
3094
|
+
description: 'Applications should run in isolated containers',
|
|
3095
|
+
category: 'sandboxing',
|
|
3096
|
+
severity: 'medium',
|
|
3097
|
+
passed: hasContainerization,
|
|
3098
|
+
message: hasContainerization
|
|
3099
|
+
? 'Container configuration detected'
|
|
3100
|
+
: 'Consider running in Docker containers for isolation',
|
|
3101
|
+
fixable: false,
|
|
3102
|
+
});
|
|
3103
|
+
// SANDBOX-002: Check for non-root execution
|
|
3104
|
+
let hasNonRootConfig = false;
|
|
3105
|
+
try {
|
|
3106
|
+
const dockerPath = path.join(targetDir, 'Dockerfile');
|
|
3107
|
+
const content = await fs.readFile(dockerPath, 'utf-8');
|
|
3108
|
+
hasNonRootConfig = content.includes('USER ') && !content.includes('USER root');
|
|
3109
|
+
}
|
|
3110
|
+
catch { }
|
|
3111
|
+
findings.push({
|
|
3112
|
+
checkId: 'SANDBOX-002',
|
|
3113
|
+
name: 'Non-Root Execution',
|
|
3114
|
+
description: 'Containers should not run as root',
|
|
3115
|
+
category: 'sandboxing',
|
|
3116
|
+
severity: 'high',
|
|
3117
|
+
passed: hasNonRootConfig,
|
|
3118
|
+
message: hasNonRootConfig
|
|
3119
|
+
? 'Non-root user configured in Dockerfile'
|
|
3120
|
+
: 'Configure containers to run as non-root user',
|
|
3121
|
+
fixable: false,
|
|
3122
|
+
});
|
|
3123
|
+
// SANDBOX-003: Check for resource limits
|
|
3124
|
+
let hasResourceLimits = false;
|
|
3125
|
+
try {
|
|
3126
|
+
const composePath = path.join(targetDir, 'docker-compose.yml');
|
|
3127
|
+
const content = await fs.readFile(composePath, 'utf-8');
|
|
3128
|
+
hasResourceLimits = content.includes('mem_limit') || content.includes('cpus') || content.includes('deploy:');
|
|
3129
|
+
}
|
|
3130
|
+
catch { }
|
|
3131
|
+
try {
|
|
3132
|
+
const composePath = path.join(targetDir, 'docker-compose.yaml');
|
|
3133
|
+
const content = await fs.readFile(composePath, 'utf-8');
|
|
3134
|
+
hasResourceLimits = content.includes('mem_limit') || content.includes('cpus') || content.includes('deploy:');
|
|
3135
|
+
}
|
|
3136
|
+
catch { }
|
|
3137
|
+
findings.push({
|
|
3138
|
+
checkId: 'SANDBOX-003',
|
|
3139
|
+
name: 'Resource Limits',
|
|
3140
|
+
description: 'Containers should have resource limits',
|
|
3141
|
+
category: 'sandboxing',
|
|
3142
|
+
severity: 'medium',
|
|
3143
|
+
passed: hasResourceLimits,
|
|
3144
|
+
message: hasResourceLimits
|
|
3145
|
+
? 'Resource limits configured'
|
|
3146
|
+
: 'Consider setting CPU and memory limits for containers',
|
|
3147
|
+
fixable: false,
|
|
3148
|
+
});
|
|
3149
|
+
// SANDBOX-004: Check for read-only filesystem
|
|
3150
|
+
let hasReadOnlyFs = false;
|
|
3151
|
+
try {
|
|
3152
|
+
const composePath = path.join(targetDir, 'docker-compose.yml');
|
|
3153
|
+
const content = await fs.readFile(composePath, 'utf-8');
|
|
3154
|
+
hasReadOnlyFs = content.includes('read_only: true');
|
|
3155
|
+
}
|
|
3156
|
+
catch { }
|
|
3157
|
+
findings.push({
|
|
3158
|
+
checkId: 'SANDBOX-004',
|
|
3159
|
+
name: 'Read-Only Filesystem',
|
|
3160
|
+
description: 'Containers should use read-only filesystem where possible',
|
|
3161
|
+
category: 'sandboxing',
|
|
3162
|
+
severity: 'low',
|
|
3163
|
+
passed: hasReadOnlyFs,
|
|
3164
|
+
message: hasReadOnlyFs
|
|
3165
|
+
? 'Read-only filesystem configured'
|
|
3166
|
+
: 'Consider using read-only filesystem for containers',
|
|
3167
|
+
fixable: false,
|
|
3168
|
+
});
|
|
3169
|
+
return findings;
|
|
3170
|
+
}
|
|
3171
|
+
/**
|
|
3172
|
+
* MCP tool permission boundary checks
|
|
3173
|
+
*/
|
|
3174
|
+
async checkToolBoundaries(targetDir, autoFix) {
|
|
3175
|
+
const findings = [];
|
|
3176
|
+
const mcpConfigPath = path.join(targetDir, 'mcp.json');
|
|
3177
|
+
let mcpConfig = null;
|
|
3178
|
+
try {
|
|
3179
|
+
const content = await fs.readFile(mcpConfigPath, 'utf-8');
|
|
3180
|
+
mcpConfig = JSON.parse(content);
|
|
3181
|
+
}
|
|
3182
|
+
catch { }
|
|
3183
|
+
// TOOL-001: Check for tool whitelisting
|
|
3184
|
+
let hasToolWhitelist = false;
|
|
3185
|
+
if (mcpConfig?.servers) {
|
|
3186
|
+
for (const [, server] of Object.entries(mcpConfig.servers)) {
|
|
3187
|
+
if (server.allowedTools && server.allowedTools.length > 0) {
|
|
3188
|
+
hasToolWhitelist = true;
|
|
3189
|
+
break;
|
|
3190
|
+
}
|
|
3191
|
+
}
|
|
3192
|
+
}
|
|
3193
|
+
findings.push({
|
|
3194
|
+
checkId: 'TOOL-001',
|
|
3195
|
+
name: 'Tool Whitelisting',
|
|
3196
|
+
description: 'MCP servers should have explicit tool whitelists',
|
|
3197
|
+
category: 'tool-boundaries',
|
|
3198
|
+
severity: 'high',
|
|
3199
|
+
passed: hasToolWhitelist,
|
|
3200
|
+
message: hasToolWhitelist
|
|
3201
|
+
? 'Tool whitelisting configured'
|
|
3202
|
+
: 'Configure allowedTools to restrict MCP server capabilities',
|
|
3203
|
+
fixable: false,
|
|
3204
|
+
});
|
|
3205
|
+
// TOOL-002: Check for resource constraints
|
|
3206
|
+
let hasResourceConstraints = false;
|
|
3207
|
+
if (mcpConfig?.servers) {
|
|
3208
|
+
for (const [, server] of Object.entries(mcpConfig.servers)) {
|
|
3209
|
+
if (server.maxTokens || server.timeout) {
|
|
3210
|
+
hasResourceConstraints = true;
|
|
3211
|
+
break;
|
|
3212
|
+
}
|
|
3213
|
+
}
|
|
3214
|
+
}
|
|
3215
|
+
findings.push({
|
|
3216
|
+
checkId: 'TOOL-002',
|
|
3217
|
+
name: 'Tool Resource Constraints',
|
|
3218
|
+
description: 'MCP tools should have resource constraints',
|
|
3219
|
+
category: 'tool-boundaries',
|
|
3220
|
+
severity: 'medium',
|
|
3221
|
+
passed: hasResourceConstraints,
|
|
3222
|
+
message: hasResourceConstraints
|
|
3223
|
+
? 'Resource constraints configured'
|
|
3224
|
+
: 'Consider setting maxTokens and timeout for MCP tools',
|
|
3225
|
+
fixable: false,
|
|
3226
|
+
});
|
|
3227
|
+
// TOOL-003: Check for dangerous tool usage
|
|
3228
|
+
let hasDangerousTools = false;
|
|
3229
|
+
if (mcpConfig?.servers) {
|
|
3230
|
+
const dangerousTools = ['shell', 'exec', 'system', 'eval', 'run_command'];
|
|
3231
|
+
for (const [name] of Object.entries(mcpConfig.servers)) {
|
|
3232
|
+
for (const dangerous of dangerousTools) {
|
|
3233
|
+
if (name.toLowerCase().includes(dangerous)) {
|
|
3234
|
+
hasDangerousTools = true;
|
|
3235
|
+
break;
|
|
3236
|
+
}
|
|
3237
|
+
}
|
|
3238
|
+
}
|
|
3239
|
+
}
|
|
3240
|
+
findings.push({
|
|
3241
|
+
checkId: 'TOOL-003',
|
|
3242
|
+
name: 'Dangerous Tool Detection',
|
|
3243
|
+
description: 'Identify potentially dangerous MCP tools',
|
|
3244
|
+
category: 'tool-boundaries',
|
|
3245
|
+
severity: 'high',
|
|
3246
|
+
passed: !hasDangerousTools,
|
|
3247
|
+
message: hasDangerousTools
|
|
3248
|
+
? 'Potentially dangerous tools detected (shell/exec) - ensure proper restrictions'
|
|
3249
|
+
: 'No obvious dangerous tools detected',
|
|
3250
|
+
fixable: false,
|
|
3251
|
+
});
|
|
3252
|
+
// TOOL-004: Check for tool confirmation requirements
|
|
3253
|
+
let hasConfirmation = false;
|
|
3254
|
+
try {
|
|
3255
|
+
const claudePath = path.join(targetDir, 'CLAUDE.md');
|
|
3256
|
+
const content = await fs.readFile(claudePath, 'utf-8');
|
|
3257
|
+
hasConfirmation =
|
|
3258
|
+
content.toLowerCase().includes('confirm') ||
|
|
3259
|
+
content.toLowerCase().includes('approval') ||
|
|
3260
|
+
content.toLowerCase().includes('ask before');
|
|
3261
|
+
}
|
|
3262
|
+
catch { }
|
|
3263
|
+
findings.push({
|
|
3264
|
+
checkId: 'TOOL-004',
|
|
3265
|
+
name: 'Tool Confirmation Requirements',
|
|
3266
|
+
description: 'Dangerous operations should require confirmation',
|
|
3267
|
+
category: 'tool-boundaries',
|
|
3268
|
+
severity: 'medium',
|
|
3269
|
+
passed: hasConfirmation,
|
|
3270
|
+
message: hasConfirmation
|
|
3271
|
+
? 'Tool confirmation instructions detected'
|
|
3272
|
+
: 'Consider requiring confirmation for destructive operations',
|
|
3273
|
+
fixable: false,
|
|
3274
|
+
});
|
|
3275
|
+
return findings;
|
|
3276
|
+
}
|
|
3277
|
+
calculateScore(findings) {
|
|
3278
|
+
let score = 100;
|
|
3279
|
+
let maxDeduction = 0;
|
|
3280
|
+
for (const finding of findings) {
|
|
3281
|
+
const weight = SEVERITY_WEIGHTS[finding.severity];
|
|
3282
|
+
maxDeduction += weight;
|
|
3283
|
+
if (!finding.passed && !finding.fixed) {
|
|
3284
|
+
score -= weight;
|
|
3285
|
+
}
|
|
3286
|
+
}
|
|
3287
|
+
// Normalize to 0-100
|
|
3288
|
+
score = Math.max(0, score);
|
|
3289
|
+
const maxScore = 100;
|
|
3290
|
+
return { score, maxScore };
|
|
3291
|
+
}
|
|
3292
|
+
/**
|
|
3293
|
+
* Create a backup of files that may be modified during auto-fix
|
|
3294
|
+
*/
|
|
3295
|
+
async createBackup(targetDir) {
|
|
3296
|
+
const timestamp = new Date()
|
|
3297
|
+
.toISOString()
|
|
3298
|
+
.replace(/[T:]/g, '-')
|
|
3299
|
+
.replace(/\..+/, '')
|
|
3300
|
+
.replace(/-/g, (m, i) => (i < 10 ? '-' : ''));
|
|
3301
|
+
// Format: YYYY-MM-DD-HHMMSS
|
|
3302
|
+
const formattedTimestamp = new Date()
|
|
3303
|
+
.toISOString()
|
|
3304
|
+
.slice(0, 19)
|
|
3305
|
+
.replace('T', '-')
|
|
3306
|
+
.replace(/:/g, '');
|
|
3307
|
+
const backupDir = path.join(targetDir, '.hackmyagent-backup', formattedTimestamp);
|
|
3308
|
+
// Create backup directory
|
|
3309
|
+
await fs.mkdir(backupDir, { recursive: true });
|
|
3310
|
+
// Create manifest to track what existed before
|
|
3311
|
+
const manifest = {
|
|
3312
|
+
existingFiles: [],
|
|
3313
|
+
createdFiles: [],
|
|
3314
|
+
};
|
|
3315
|
+
// Backup each file that exists
|
|
3316
|
+
for (const file of HardeningScanner.BACKUP_FILES) {
|
|
3317
|
+
const sourcePath = path.join(targetDir, file);
|
|
3318
|
+
try {
|
|
3319
|
+
await fs.access(sourcePath);
|
|
3320
|
+
// File exists, back it up
|
|
3321
|
+
const destPath = path.join(backupDir, file);
|
|
3322
|
+
await fs.mkdir(path.dirname(destPath), { recursive: true });
|
|
3323
|
+
await fs.copyFile(sourcePath, destPath);
|
|
3324
|
+
manifest.existingFiles.push(file);
|
|
3325
|
+
}
|
|
3326
|
+
catch {
|
|
3327
|
+
// File doesn't exist, track it for rollback (may be created)
|
|
3328
|
+
manifest.createdFiles.push(file);
|
|
3329
|
+
}
|
|
3330
|
+
}
|
|
3331
|
+
// Save manifest
|
|
3332
|
+
await fs.writeFile(path.join(backupDir, '.manifest.json'), JSON.stringify(manifest, null, 2));
|
|
3333
|
+
return backupDir;
|
|
3334
|
+
}
|
|
3335
|
+
/**
|
|
3336
|
+
* Rollback to the most recent backup
|
|
3337
|
+
*/
|
|
3338
|
+
async rollback(targetDir) {
|
|
3339
|
+
const backupBaseDir = path.join(targetDir, '.hackmyagent-backup');
|
|
3340
|
+
// Check if backup directory exists
|
|
3341
|
+
try {
|
|
3342
|
+
await fs.access(backupBaseDir);
|
|
3343
|
+
}
|
|
3344
|
+
catch {
|
|
3345
|
+
throw new Error('No backup found. Cannot rollback.');
|
|
3346
|
+
}
|
|
3347
|
+
// Find the most recent backup
|
|
3348
|
+
const backups = await fs.readdir(backupBaseDir);
|
|
3349
|
+
const sortedBackups = backups
|
|
3350
|
+
.filter((b) => !b.startsWith('.'))
|
|
3351
|
+
.sort()
|
|
3352
|
+
.reverse();
|
|
3353
|
+
if (sortedBackups.length === 0) {
|
|
3354
|
+
throw new Error('No backup found. Cannot rollback.');
|
|
3355
|
+
}
|
|
3356
|
+
const latestBackup = sortedBackups[0];
|
|
3357
|
+
const backupDir = path.join(backupBaseDir, latestBackup);
|
|
3358
|
+
// Read manifest
|
|
3359
|
+
let manifest;
|
|
3360
|
+
try {
|
|
3361
|
+
const manifestContent = await fs.readFile(path.join(backupDir, '.manifest.json'), 'utf-8');
|
|
3362
|
+
manifest = JSON.parse(manifestContent);
|
|
3363
|
+
}
|
|
3364
|
+
catch {
|
|
3365
|
+
throw new Error('Backup manifest is corrupted. Cannot rollback.');
|
|
3366
|
+
}
|
|
3367
|
+
// Restore existing files from backup
|
|
3368
|
+
for (const file of manifest.existingFiles) {
|
|
3369
|
+
const sourcePath = path.join(backupDir, file);
|
|
3370
|
+
const destPath = path.join(targetDir, file);
|
|
3371
|
+
try {
|
|
3372
|
+
await fs.copyFile(sourcePath, destPath);
|
|
3373
|
+
}
|
|
3374
|
+
catch (err) {
|
|
3375
|
+
// Continue with other files
|
|
3376
|
+
}
|
|
3377
|
+
}
|
|
3378
|
+
// Remove files that were created during auto-fix
|
|
3379
|
+
for (const file of manifest.createdFiles) {
|
|
3380
|
+
const filePath = path.join(targetDir, file);
|
|
3381
|
+
try {
|
|
3382
|
+
await fs.unlink(filePath);
|
|
3383
|
+
}
|
|
3384
|
+
catch {
|
|
3385
|
+
// File may not exist, that's OK
|
|
3386
|
+
}
|
|
3387
|
+
}
|
|
3388
|
+
// Remove the used backup
|
|
3389
|
+
await fs.rm(backupDir, { recursive: true, force: true });
|
|
3390
|
+
}
|
|
3391
|
+
}
|
|
3392
|
+
exports.HardeningScanner = HardeningScanner;
|
|
3393
|
+
// Files that may be created or modified during auto-fix
|
|
3394
|
+
HardeningScanner.BACKUP_FILES = [
|
|
3395
|
+
'config.json',
|
|
3396
|
+
'config.yaml',
|
|
3397
|
+
'config.yml',
|
|
3398
|
+
'mcp.json',
|
|
3399
|
+
'settings.json',
|
|
3400
|
+
'.env',
|
|
3401
|
+
'.env.local',
|
|
3402
|
+
'.gitignore',
|
|
3403
|
+
'.env.example',
|
|
3404
|
+
'CLAUDE.md',
|
|
3405
|
+
'.cursor/mcp.json',
|
|
3406
|
+
'.vscode/mcp.json',
|
|
3407
|
+
'.claude/settings.json',
|
|
3408
|
+
'package.json',
|
|
3409
|
+
];
|
|
3410
|
+
//# sourceMappingURL=scanner.js.map
|