getdoorman 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +181 -0
- package/bin/doorman.js +444 -0
- package/package.json +74 -0
- package/src/ai-fixer.js +559 -0
- package/src/ast-scanner.js +434 -0
- package/src/auth.js +149 -0
- package/src/baseline.js +48 -0
- package/src/compliance.js +539 -0
- package/src/config.js +466 -0
- package/src/custom-rules.js +32 -0
- package/src/dashboard.js +202 -0
- package/src/detector.js +142 -0
- package/src/fix-engine.js +48 -0
- package/src/fix-registry-extra.js +95 -0
- package/src/fix-registry-go-rust.js +77 -0
- package/src/fix-registry-java-csharp.js +77 -0
- package/src/fix-registry-js.js +99 -0
- package/src/fix-registry-mcp-ai.js +57 -0
- package/src/fix-registry-python.js +87 -0
- package/src/fixer-ruby-php.js +608 -0
- package/src/fixer.js +2113 -0
- package/src/hooks.js +115 -0
- package/src/ignore.js +176 -0
- package/src/index.js +384 -0
- package/src/metrics.js +126 -0
- package/src/monorepo.js +65 -0
- package/src/presets.js +54 -0
- package/src/reporter.js +975 -0
- package/src/rule-worker.js +36 -0
- package/src/rules/ast-rules.js +756 -0
- package/src/rules/bugs/accessibility.js +235 -0
- package/src/rules/bugs/ai-codegen-fixable.js +172 -0
- package/src/rules/bugs/ai-codegen.js +365 -0
- package/src/rules/bugs/code-smell-bugs.js +247 -0
- package/src/rules/bugs/crypto-bugs.js +195 -0
- package/src/rules/bugs/docker-bugs.js +158 -0
- package/src/rules/bugs/general.js +361 -0
- package/src/rules/bugs/go-bugs.js +279 -0
- package/src/rules/bugs/index.js +73 -0
- package/src/rules/bugs/js-api.js +257 -0
- package/src/rules/bugs/js-array-object.js +210 -0
- package/src/rules/bugs/js-async-fixable.js +223 -0
- package/src/rules/bugs/js-async.js +211 -0
- package/src/rules/bugs/js-closure-scope.js +182 -0
- package/src/rules/bugs/js-database.js +203 -0
- package/src/rules/bugs/js-error-handling.js +148 -0
- package/src/rules/bugs/js-logic.js +261 -0
- package/src/rules/bugs/js-memory.js +214 -0
- package/src/rules/bugs/js-node.js +361 -0
- package/src/rules/bugs/js-react.js +373 -0
- package/src/rules/bugs/js-regex.js +200 -0
- package/src/rules/bugs/js-state.js +272 -0
- package/src/rules/bugs/js-type-coercion.js +318 -0
- package/src/rules/bugs/nextjs-bugs.js +242 -0
- package/src/rules/bugs/nextjs-fixable.js +120 -0
- package/src/rules/bugs/node-fixable.js +178 -0
- package/src/rules/bugs/python-advanced.js +245 -0
- package/src/rules/bugs/python-fixable.js +98 -0
- package/src/rules/bugs/python.js +284 -0
- package/src/rules/bugs/react-fixable.js +207 -0
- package/src/rules/bugs/ruby-bugs.js +182 -0
- package/src/rules/bugs/shell-bugs.js +181 -0
- package/src/rules/bugs/silent-failures.js +261 -0
- package/src/rules/bugs/ts-bugs.js +235 -0
- package/src/rules/bugs/unused-vars.js +65 -0
- package/src/rules/compliance/accessibility-ext.js +468 -0
- package/src/rules/compliance/education.js +322 -0
- package/src/rules/compliance/financial.js +421 -0
- package/src/rules/compliance/frameworks.js +507 -0
- package/src/rules/compliance/healthcare.js +520 -0
- package/src/rules/compliance/index.js +2714 -0
- package/src/rules/compliance/regional-eu.js +480 -0
- package/src/rules/compliance/regional-international.js +903 -0
- package/src/rules/cost/index.js +1993 -0
- package/src/rules/data/index.js +2503 -0
- package/src/rules/dependencies/index.js +1684 -0
- package/src/rules/deployment/index.js +2050 -0
- package/src/rules/index.js +71 -0
- package/src/rules/infrastructure/index.js +3048 -0
- package/src/rules/performance/index.js +3455 -0
- package/src/rules/quality/index.js +3175 -0
- package/src/rules/reliability/index.js +3040 -0
- package/src/rules/scope-rules.js +815 -0
- package/src/rules/security/ai-api.js +1177 -0
- package/src/rules/security/auth.js +1328 -0
- package/src/rules/security/cors.js +127 -0
- package/src/rules/security/crypto.js +527 -0
- package/src/rules/security/csharp.js +862 -0
- package/src/rules/security/csrf.js +193 -0
- package/src/rules/security/dart.js +835 -0
- package/src/rules/security/deserialization.js +291 -0
- package/src/rules/security/file-upload.js +187 -0
- package/src/rules/security/go.js +850 -0
- package/src/rules/security/headers.js +235 -0
- package/src/rules/security/index.js +65 -0
- package/src/rules/security/injection.js +1639 -0
- package/src/rules/security/mcp-server.js +71 -0
- package/src/rules/security/misconfiguration.js +660 -0
- package/src/rules/security/oauth-jwt.js +329 -0
- package/src/rules/security/path-traversal.js +295 -0
- package/src/rules/security/php.js +1054 -0
- package/src/rules/security/prototype-pollution.js +283 -0
- package/src/rules/security/rate-limiting.js +208 -0
- package/src/rules/security/ruby.js +1061 -0
- package/src/rules/security/rust.js +693 -0
- package/src/rules/security/secrets.js +747 -0
- package/src/rules/security/shell.js +647 -0
- package/src/rules/security/ssrf.js +298 -0
- package/src/rules/security/supply-chain-advanced.js +393 -0
- package/src/rules/security/supply-chain.js +734 -0
- package/src/rules/security/swift.js +835 -0
- package/src/rules/security/taint.js +27 -0
- package/src/rules/security/xss.js +520 -0
- package/src/scan-cache.js +71 -0
- package/src/scanner.js +710 -0
- package/src/scope-analyzer.js +685 -0
- package/src/share.js +88 -0
- package/src/taint.js +300 -0
- package/src/telemetry.js +183 -0
- package/src/tracer.js +190 -0
- package/src/upload.js +35 -0
- package/src/worker.js +31 -0
|
@@ -0,0 +1,3040 @@
|
|
|
1
|
+
const JS_EXTENSIONS = ['.js', '.jsx', '.ts', '.tsx', '.mjs', '.cjs'];
|
|
2
|
+
function isSourceFile(f) { return JS_EXTENSIONS.some(ext => f.endsWith(ext)); }
|
|
3
|
+
|
|
4
|
+
const rules = [
|
|
5
|
+
// REL-001: No error handling on API routes
|
|
6
|
+
{
|
|
7
|
+
id: 'REL-001',
|
|
8
|
+
category: 'reliability',
|
|
9
|
+
severity: 'high',
|
|
10
|
+
confidence: 'likely',
|
|
11
|
+
title: 'API Route Without Error Handling',
|
|
12
|
+
check({ files }) {
|
|
13
|
+
const findings = [];
|
|
14
|
+
for (const [filepath, content] of files) {
|
|
15
|
+
if (!isSourceFile(filepath)) continue;
|
|
16
|
+
if (!filepath.includes('api/') && !filepath.includes('route')) continue;
|
|
17
|
+
|
|
18
|
+
// Check if file has try/catch
|
|
19
|
+
if ((content.includes('export') || content.includes('handler')) &&
|
|
20
|
+
!content.includes('try') && !content.includes('catch')) {
|
|
21
|
+
findings.push({
|
|
22
|
+
ruleId: 'REL-001', category: 'reliability', severity: 'high',
|
|
23
|
+
title: 'API route has no error handling (try/catch)',
|
|
24
|
+
description: 'Unhandled errors will crash your server or return 500 errors with stack traces.',
|
|
25
|
+
file: filepath, fix: null,
|
|
26
|
+
});
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
return findings;
|
|
30
|
+
},
|
|
31
|
+
},
|
|
32
|
+
|
|
33
|
+
// REL-002: No health check endpoint
|
|
34
|
+
{
|
|
35
|
+
id: 'REL-002',
|
|
36
|
+
category: 'reliability',
|
|
37
|
+
severity: 'medium',
|
|
38
|
+
confidence: 'likely',
|
|
39
|
+
title: 'No Health Check Endpoint',
|
|
40
|
+
check({ files }) {
|
|
41
|
+
const findings = [];
|
|
42
|
+
const hasHealth = [...files.entries()].some(([filepath, content]) =>
|
|
43
|
+
filepath.includes('health') ||
|
|
44
|
+
content.includes('/health') ||
|
|
45
|
+
content.includes('/healthz') ||
|
|
46
|
+
content.includes('/readyz') ||
|
|
47
|
+
content.includes('/api/health')
|
|
48
|
+
);
|
|
49
|
+
|
|
50
|
+
if (!hasHealth) {
|
|
51
|
+
const hasApiRoutes = [...files.keys()].some(f => f.includes('/api/') || f.includes('routes'));
|
|
52
|
+
if (hasApiRoutes) {
|
|
53
|
+
findings.push({
|
|
54
|
+
ruleId: 'REL-002', category: 'reliability', severity: 'medium',
|
|
55
|
+
title: 'No /health or /healthz endpoint detected',
|
|
56
|
+
description: 'Health check endpoints let load balancers and monitoring tools know if your app is running.',
|
|
57
|
+
fix: null,
|
|
58
|
+
});
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
return findings;
|
|
62
|
+
},
|
|
63
|
+
},
|
|
64
|
+
|
|
65
|
+
// REL-003: Unhandled promise rejections
|
|
66
|
+
{
|
|
67
|
+
id: 'REL-003',
|
|
68
|
+
category: 'reliability',
|
|
69
|
+
severity: 'high',
|
|
70
|
+
confidence: 'likely',
|
|
71
|
+
title: 'Unhandled Promise Rejection',
|
|
72
|
+
check({ files }) {
|
|
73
|
+
const findings = [];
|
|
74
|
+
for (const [filepath, content] of files) {
|
|
75
|
+
if (!isSourceFile(filepath)) continue;
|
|
76
|
+
const lines = content.split('\n');
|
|
77
|
+
for (let i = 0; i < lines.length; i++) {
|
|
78
|
+
// Async function or promise without catch
|
|
79
|
+
if (lines[i].match(/\.then\s*\(/) && !content.substring(content.indexOf(lines[i])).match(/\.catch\s*\(/)) {
|
|
80
|
+
// Simple heuristic: .then() without .catch() nearby
|
|
81
|
+
const nextLines = lines.slice(i, i + 5).join('\n');
|
|
82
|
+
if (!nextLines.includes('.catch')) {
|
|
83
|
+
findings.push({
|
|
84
|
+
ruleId: 'REL-003', category: 'reliability', severity: 'high',
|
|
85
|
+
title: 'Promise chain without .catch() — unhandled rejection risk',
|
|
86
|
+
file: filepath, line: i + 1, fix: null,
|
|
87
|
+
});
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
return findings;
|
|
93
|
+
},
|
|
94
|
+
},
|
|
95
|
+
|
|
96
|
+
// REL-004: No graceful shutdown
|
|
97
|
+
{
|
|
98
|
+
id: 'REL-004',
|
|
99
|
+
category: 'reliability',
|
|
100
|
+
severity: 'medium',
|
|
101
|
+
confidence: 'likely',
|
|
102
|
+
title: 'No Graceful Shutdown Handler',
|
|
103
|
+
check({ files, stack }) {
|
|
104
|
+
const findings = [];
|
|
105
|
+
if (stack.runtime !== 'node') return findings;
|
|
106
|
+
|
|
107
|
+
const hasGraceful = [...files.values()].some(content =>
|
|
108
|
+
content.includes('SIGTERM') || content.includes('SIGINT') || content.includes('graceful')
|
|
109
|
+
);
|
|
110
|
+
|
|
111
|
+
if (!hasGraceful) {
|
|
112
|
+
const hasServer = [...files.values()].some(content =>
|
|
113
|
+
content.includes('.listen(') || content.includes('createServer')
|
|
114
|
+
);
|
|
115
|
+
if (hasServer) {
|
|
116
|
+
findings.push({
|
|
117
|
+
ruleId: 'REL-004', category: 'reliability', severity: 'medium',
|
|
118
|
+
title: 'No SIGTERM/SIGINT handler — server won\'t shut down gracefully',
|
|
119
|
+
description: 'Handle SIGTERM to drain connections before shutdown. Important for containers and deployments.',
|
|
120
|
+
fix: null,
|
|
121
|
+
});
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
return findings;
|
|
125
|
+
},
|
|
126
|
+
},
|
|
127
|
+
|
|
128
|
+
// REL-005: Empty catch blocks
|
|
129
|
+
{
|
|
130
|
+
id: 'REL-005',
|
|
131
|
+
category: 'reliability',
|
|
132
|
+
severity: 'medium',
|
|
133
|
+
confidence: 'likely',
|
|
134
|
+
title: 'Empty Catch Block — Silent Error',
|
|
135
|
+
check({ files }) {
|
|
136
|
+
const findings = [];
|
|
137
|
+
for (const [filepath, content] of files) {
|
|
138
|
+
if (!isSourceFile(filepath)) continue;
|
|
139
|
+
const lines = content.split('\n');
|
|
140
|
+
for (let i = 0; i < lines.length; i++) {
|
|
141
|
+
if (lines[i].match(/catch\s*\([^)]*\)\s*\{\s*\}/) ||
|
|
142
|
+
(lines[i].match(/catch\s*\([^)]*\)\s*\{/) && lines[i + 1]?.trim() === '}')) {
|
|
143
|
+
findings.push({
|
|
144
|
+
ruleId: 'REL-005', category: 'reliability', severity: 'medium',
|
|
145
|
+
title: 'Empty catch block — errors are silently swallowed',
|
|
146
|
+
description: 'At minimum, log the error. Silent failures make debugging impossible.',
|
|
147
|
+
file: filepath, line: i + 1, fix: null,
|
|
148
|
+
});
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
return findings;
|
|
153
|
+
},
|
|
154
|
+
},
|
|
155
|
+
|
|
156
|
+
// REL-006: No request timeouts
|
|
157
|
+
{
|
|
158
|
+
id: 'REL-006',
|
|
159
|
+
category: 'reliability',
|
|
160
|
+
severity: 'medium',
|
|
161
|
+
confidence: 'likely',
|
|
162
|
+
title: 'No Request Timeouts',
|
|
163
|
+
check({ files, stack }) {
|
|
164
|
+
const findings = [];
|
|
165
|
+
if (stack.runtime !== 'node') return findings;
|
|
166
|
+
|
|
167
|
+
const hasTimeout = [...files.values()].some(content =>
|
|
168
|
+
content.includes('timeout') || content.includes('AbortController') || content.includes('signal')
|
|
169
|
+
);
|
|
170
|
+
|
|
171
|
+
const hasFetch = [...files.values()].some(content =>
|
|
172
|
+
content.includes('fetch(') || content.includes('axios') || content.includes('got(')
|
|
173
|
+
);
|
|
174
|
+
|
|
175
|
+
if (hasFetch && !hasTimeout) {
|
|
176
|
+
findings.push({
|
|
177
|
+
ruleId: 'REL-006', category: 'reliability', severity: 'medium',
|
|
178
|
+
title: 'External API calls without timeout configuration',
|
|
179
|
+
description: 'Requests without timeouts can hang forever, blocking your server. Set timeouts on all external calls.',
|
|
180
|
+
fix: null,
|
|
181
|
+
});
|
|
182
|
+
}
|
|
183
|
+
return findings;
|
|
184
|
+
},
|
|
185
|
+
},
|
|
186
|
+
// REL-007: No process-level uncaught exception handler
|
|
187
|
+
{
|
|
188
|
+
id: 'REL-007',
|
|
189
|
+
category: 'reliability',
|
|
190
|
+
severity: 'high',
|
|
191
|
+
confidence: 'likely',
|
|
192
|
+
title: 'No Uncaught Exception Handler',
|
|
193
|
+
check({ files, stack }) {
|
|
194
|
+
const findings = [];
|
|
195
|
+
if (stack.runtime !== 'node') return findings;
|
|
196
|
+
|
|
197
|
+
const hasHandler = [...files.values()].some(content =>
|
|
198
|
+
content.includes('uncaughtException') || content.includes('unhandledRejection')
|
|
199
|
+
);
|
|
200
|
+
|
|
201
|
+
if (!hasHandler) {
|
|
202
|
+
const hasServer = [...files.values()].some(content =>
|
|
203
|
+
content.includes('.listen(') || content.includes('createServer')
|
|
204
|
+
);
|
|
205
|
+
if (hasServer) {
|
|
206
|
+
findings.push({
|
|
207
|
+
ruleId: 'REL-007', category: 'reliability', severity: 'high',
|
|
208
|
+
title: 'No process.on("uncaughtException") or process.on("unhandledRejection") handler',
|
|
209
|
+
description: 'Uncaught exceptions crash the process. Add handlers to log the error and shut down gracefully.',
|
|
210
|
+
fix: null,
|
|
211
|
+
});
|
|
212
|
+
}
|
|
213
|
+
}
|
|
214
|
+
return findings;
|
|
215
|
+
},
|
|
216
|
+
},
|
|
217
|
+
|
|
218
|
+
// REL-008: No retry logic on external API calls
|
|
219
|
+
{
|
|
220
|
+
id: 'REL-008',
|
|
221
|
+
category: 'reliability',
|
|
222
|
+
severity: 'medium',
|
|
223
|
+
confidence: 'suggestion',
|
|
224
|
+
title: 'No Retry Logic on External Calls',
|
|
225
|
+
check({ files, stack }) {
|
|
226
|
+
const findings = [];
|
|
227
|
+
if (stack.runtime !== 'node') return findings;
|
|
228
|
+
const allDeps = { ...stack.dependencies, ...stack.devDependencies };
|
|
229
|
+
const retryLibs = ['p-retry', 'async-retry', 'retry', 'axios-retry', 'got', 'ky'];
|
|
230
|
+
const hasRetryLib = retryLibs.some(lib => lib in allDeps);
|
|
231
|
+
|
|
232
|
+
if (!hasRetryLib) {
|
|
233
|
+
const hasRetryCode = [...files.values()].some(content =>
|
|
234
|
+
content.includes('retry') || content.includes('retries') || content.includes('backoff')
|
|
235
|
+
);
|
|
236
|
+
const hasFetch = [...files.values()].some(content =>
|
|
237
|
+
content.includes('fetch(') || content.includes('axios')
|
|
238
|
+
);
|
|
239
|
+
if (hasFetch && !hasRetryCode) {
|
|
240
|
+
findings.push({
|
|
241
|
+
ruleId: 'REL-008', category: 'reliability', severity: 'medium',
|
|
242
|
+
title: 'External API calls without retry logic',
|
|
243
|
+
description: 'Network calls can fail transiently. Add retry with exponential backoff (p-retry, axios-retry, etc.).',
|
|
244
|
+
fix: null,
|
|
245
|
+
});
|
|
246
|
+
}
|
|
247
|
+
}
|
|
248
|
+
return findings;
|
|
249
|
+
},
|
|
250
|
+
},
|
|
251
|
+
|
|
252
|
+
// REL-009: No circuit breaker pattern
|
|
253
|
+
{
|
|
254
|
+
id: 'REL-009',
|
|
255
|
+
category: 'reliability',
|
|
256
|
+
severity: 'low',
|
|
257
|
+
confidence: 'suggestion',
|
|
258
|
+
title: 'No Circuit Breaker Pattern',
|
|
259
|
+
check({ files, stack }) {
|
|
260
|
+
const findings = [];
|
|
261
|
+
if (stack.runtime !== 'node') return findings;
|
|
262
|
+
const allDeps = { ...stack.dependencies, ...stack.devDependencies };
|
|
263
|
+
const cbLibs = ['opossum', 'cockatiel', 'brakes', 'circuit-breaker-js', 'mollitia'];
|
|
264
|
+
const hasCB = cbLibs.some(lib => lib in allDeps);
|
|
265
|
+
|
|
266
|
+
if (!hasCB) {
|
|
267
|
+
const hasCBCode = [...files.values()].some(content =>
|
|
268
|
+
content.includes('circuitBreaker') || content.includes('CircuitBreaker')
|
|
269
|
+
);
|
|
270
|
+
const hasMultipleExternalCalls = [...files.values()].filter(content =>
|
|
271
|
+
content.includes('fetch(') || content.includes('axios') || content.includes('got(')
|
|
272
|
+
).length >= 3;
|
|
273
|
+
if (hasMultipleExternalCalls && !hasCBCode) {
|
|
274
|
+
findings.push({
|
|
275
|
+
ruleId: 'REL-009', category: 'reliability', severity: 'low',
|
|
276
|
+
title: 'Multiple external service calls without circuit breaker pattern',
|
|
277
|
+
description: 'Use a circuit breaker (opossum, cockatiel) to prevent cascading failures when external services go down.',
|
|
278
|
+
fix: null,
|
|
279
|
+
});
|
|
280
|
+
}
|
|
281
|
+
}
|
|
282
|
+
return findings;
|
|
283
|
+
},
|
|
284
|
+
},
|
|
285
|
+
|
|
286
|
+
// REL-010: No structured logging
|
|
287
|
+
{
|
|
288
|
+
id: 'REL-010',
|
|
289
|
+
category: 'reliability',
|
|
290
|
+
severity: 'low',
|
|
291
|
+
confidence: 'suggestion',
|
|
292
|
+
title: 'No Structured Logging',
|
|
293
|
+
check({ files, stack }) {
|
|
294
|
+
const findings = [];
|
|
295
|
+
if (stack.runtime !== 'node') return findings;
|
|
296
|
+
const allDeps = { ...stack.dependencies, ...stack.devDependencies };
|
|
297
|
+
const logLibs = ['winston', 'pino', 'bunyan', 'log4js', 'morgan', 'loglevel', 'roarr'];
|
|
298
|
+
const hasLogLib = logLibs.some(lib => lib in allDeps);
|
|
299
|
+
|
|
300
|
+
if (!hasLogLib) {
|
|
301
|
+
const usesConsoleLog = [...files.values()].some(content =>
|
|
302
|
+
content.includes('console.log') || content.includes('console.error')
|
|
303
|
+
);
|
|
304
|
+
const hasServer = [...files.values()].some(content =>
|
|
305
|
+
content.includes('.listen(') || content.includes('createServer')
|
|
306
|
+
);
|
|
307
|
+
if (usesConsoleLog && hasServer) {
|
|
308
|
+
findings.push({
|
|
309
|
+
ruleId: 'REL-010', category: 'reliability', severity: 'low',
|
|
310
|
+
title: 'Using console.log instead of a structured logging library',
|
|
311
|
+
description: 'Use pino, winston, or similar for JSON-formatted logs with levels, timestamps, and request context.',
|
|
312
|
+
fix: null,
|
|
313
|
+
});
|
|
314
|
+
}
|
|
315
|
+
}
|
|
316
|
+
return findings;
|
|
317
|
+
},
|
|
318
|
+
},
|
|
319
|
+
|
|
320
|
+
// REL-011: No database migration system
|
|
321
|
+
{
|
|
322
|
+
id: 'REL-011',
|
|
323
|
+
category: 'reliability',
|
|
324
|
+
severity: 'medium',
|
|
325
|
+
confidence: 'likely',
|
|
326
|
+
title: 'No Database Migration System',
|
|
327
|
+
check({ files, stack }) {
|
|
328
|
+
const findings = [];
|
|
329
|
+
if (stack.runtime !== 'node') return findings;
|
|
330
|
+
if (stack.orm === 'prisma') return findings; // Prisma has built-in migrations
|
|
331
|
+
|
|
332
|
+
const allDeps = { ...stack.dependencies, ...stack.devDependencies };
|
|
333
|
+
const migrationLibs = ['knex', 'sequelize-cli', 'typeorm', 'db-migrate', 'umzug', 'node-pg-migrate', 'drizzle-kit'];
|
|
334
|
+
const hasMigrationLib = migrationLibs.some(lib => lib in allDeps);
|
|
335
|
+
|
|
336
|
+
if (!hasMigrationLib) {
|
|
337
|
+
const hasDbAccess = [...files.values()].some(content =>
|
|
338
|
+
content.includes('CREATE TABLE') || content.includes('db.query') ||
|
|
339
|
+
content.includes('pool.query') || content.includes('.execute(')
|
|
340
|
+
);
|
|
341
|
+
const hasMigrationFiles = [...files.keys()].some(f =>
|
|
342
|
+
f.includes('migration') || f.includes('migrate')
|
|
343
|
+
);
|
|
344
|
+
if (hasDbAccess && !hasMigrationFiles) {
|
|
345
|
+
findings.push({
|
|
346
|
+
ruleId: 'REL-011', category: 'reliability', severity: 'medium',
|
|
347
|
+
title: 'Database access without a migration system',
|
|
348
|
+
description: 'Use a migration tool (knex, Prisma, db-migrate) to version-control schema changes and enable safe rollbacks.',
|
|
349
|
+
fix: null,
|
|
350
|
+
});
|
|
351
|
+
}
|
|
352
|
+
}
|
|
353
|
+
return findings;
|
|
354
|
+
},
|
|
355
|
+
},
|
|
356
|
+
|
|
357
|
+
// REL-012: No monitoring / APM
|
|
358
|
+
{
|
|
359
|
+
id: 'REL-012',
|
|
360
|
+
category: 'reliability',
|
|
361
|
+
severity: 'medium',
|
|
362
|
+
confidence: 'likely',
|
|
363
|
+
title: 'No Application Monitoring',
|
|
364
|
+
check({ files, stack }) {
|
|
365
|
+
const findings = [];
|
|
366
|
+
if (stack.runtime !== 'node') return findings;
|
|
367
|
+
const allDeps = { ...stack.dependencies, ...stack.devDependencies };
|
|
368
|
+
const apmLibs = [
|
|
369
|
+
'newrelic', '@sentry/node', '@sentry/nextjs', 'dd-trace', 'elastic-apm-node',
|
|
370
|
+
'@opentelemetry/sdk-node', '@opentelemetry/api', 'prom-client', 'appmetrics',
|
|
371
|
+
'applicationinsights', '@google-cloud/trace-agent', 'honeycomb-beeline',
|
|
372
|
+
];
|
|
373
|
+
const hasAPM = apmLibs.some(lib => lib in allDeps);
|
|
374
|
+
|
|
375
|
+
if (!hasAPM) {
|
|
376
|
+
const hasServer = [...files.values()].some(content =>
|
|
377
|
+
content.includes('.listen(') || content.includes('createServer')
|
|
378
|
+
);
|
|
379
|
+
if (hasServer) {
|
|
380
|
+
findings.push({
|
|
381
|
+
ruleId: 'REL-012', category: 'reliability', severity: 'medium',
|
|
382
|
+
title: 'No application monitoring or APM detected',
|
|
383
|
+
description: 'Add Sentry, Datadog, New Relic, or OpenTelemetry to track errors and performance in production.',
|
|
384
|
+
fix: null,
|
|
385
|
+
});
|
|
386
|
+
}
|
|
387
|
+
}
|
|
388
|
+
return findings;
|
|
389
|
+
},
|
|
390
|
+
},
|
|
391
|
+
|
|
392
|
+
// REL-EH-001: No React Error Boundary
|
|
393
|
+
{
|
|
394
|
+
id: 'REL-EH-001',
|
|
395
|
+
category: 'reliability',
|
|
396
|
+
severity: 'high',
|
|
397
|
+
confidence: 'likely',
|
|
398
|
+
title: 'No React Error Boundary',
|
|
399
|
+
check({ files, stack }) {
|
|
400
|
+
const findings = [];
|
|
401
|
+
if (!['react', 'nextjs'].includes(stack.framework)) return findings;
|
|
402
|
+
const hasErrorBoundary = [...files.values()].some(c =>
|
|
403
|
+
c.includes('ErrorBoundary') || c.includes('componentDidCatch') || c.includes('getDerivedStateFromError')
|
|
404
|
+
);
|
|
405
|
+
if (!hasErrorBoundary && [...files.keys()].some(f => f.match(/\.(jsx|tsx)$/))) {
|
|
406
|
+
findings.push({ ruleId: 'REL-EH-001', category: 'reliability', severity: 'high',
|
|
407
|
+
title: 'No React Error Boundary — render errors crash the entire app',
|
|
408
|
+
description: 'Add an ErrorBoundary component to show a fallback UI instead of a blank screen on render errors.',
|
|
409
|
+
fix: null });
|
|
410
|
+
}
|
|
411
|
+
return findings;
|
|
412
|
+
},
|
|
413
|
+
},
|
|
414
|
+
|
|
415
|
+
// REL-EH-002: JSON.parse without try/catch
|
|
416
|
+
{
|
|
417
|
+
id: 'REL-EH-002',
|
|
418
|
+
category: 'reliability',
|
|
419
|
+
severity: 'medium',
|
|
420
|
+
confidence: 'likely',
|
|
421
|
+
title: 'JSON.parse Without Error Handling',
|
|
422
|
+
check({ files }) {
|
|
423
|
+
const findings = [];
|
|
424
|
+
for (const [filepath, content] of files) {
|
|
425
|
+
if (!isSourceFile(filepath)) continue;
|
|
426
|
+
const lines = content.split('\n');
|
|
427
|
+
for (let i = 0; i < lines.length; i++) {
|
|
428
|
+
if (lines[i].match(/JSON\.parse\s*\(/) && !lines[i].trim().startsWith('//')) {
|
|
429
|
+
const ctx = lines.slice(Math.max(0, i - 4), i + 4).join('\n');
|
|
430
|
+
if (!ctx.includes('try') && !ctx.includes('catch')) {
|
|
431
|
+
findings.push({ ruleId: 'REL-EH-002', category: 'reliability', severity: 'medium',
|
|
432
|
+
title: 'JSON.parse without try/catch — throws SyntaxError on invalid input',
|
|
433
|
+
description: 'Wrap JSON.parse in try/catch or use a safe-parse utility.',
|
|
434
|
+
file: filepath, line: i + 1, fix: null });
|
|
435
|
+
break;
|
|
436
|
+
}
|
|
437
|
+
}
|
|
438
|
+
}
|
|
439
|
+
}
|
|
440
|
+
return findings;
|
|
441
|
+
},
|
|
442
|
+
},
|
|
443
|
+
|
|
444
|
+
// REL-EH-003: Express missing error handler
|
|
445
|
+
{
|
|
446
|
+
id: 'REL-EH-003',
|
|
447
|
+
category: 'reliability',
|
|
448
|
+
severity: 'high',
|
|
449
|
+
confidence: 'likely',
|
|
450
|
+
title: 'No Express Error Handler Middleware',
|
|
451
|
+
check({ files, stack }) {
|
|
452
|
+
const findings = [];
|
|
453
|
+
if (stack.framework !== 'express') return findings;
|
|
454
|
+
if (![...files.values()].some(c => c.includes('.listen('))) return findings;
|
|
455
|
+
const has4Arg = [...files.values()].some(c =>
|
|
456
|
+
c.match(/app\.use\s*\(\s*(?:function\s*)?\(\s*err\s*,\s*req\s*,\s*res\s*,\s*next\s*\)/) ||
|
|
457
|
+
c.match(/app\.use\s*\(\s*\(\s*err\s*,\s*req\s*,\s*res\s*,\s*next\s*\)\s*=>/)
|
|
458
|
+
);
|
|
459
|
+
if (!has4Arg) {
|
|
460
|
+
findings.push({ ruleId: 'REL-EH-003', category: 'reliability', severity: 'high',
|
|
461
|
+
title: 'No Express error handler (4-argument middleware) registered',
|
|
462
|
+
description: 'Add app.use((err, req, res, next) => { ... }) to centrally handle errors and return proper responses.',
|
|
463
|
+
fix: null });
|
|
464
|
+
}
|
|
465
|
+
return findings;
|
|
466
|
+
},
|
|
467
|
+
},
|
|
468
|
+
|
|
469
|
+
// REL-EH-004: Stream without error handler
|
|
470
|
+
{
|
|
471
|
+
id: 'REL-EH-004',
|
|
472
|
+
category: 'reliability',
|
|
473
|
+
severity: 'high',
|
|
474
|
+
confidence: 'likely',
|
|
475
|
+
title: 'Stream Without Error Handler',
|
|
476
|
+
check({ files }) {
|
|
477
|
+
const findings = [];
|
|
478
|
+
for (const [filepath, content] of files) {
|
|
479
|
+
if (!isSourceFile(filepath)) continue;
|
|
480
|
+
const lines = content.split('\n');
|
|
481
|
+
for (let i = 0; i < lines.length; i++) {
|
|
482
|
+
if (lines[i].match(/createReadStream|createWriteStream/) && !lines[i].includes('pipeline')) {
|
|
483
|
+
const block = lines.slice(i, Math.min(i + 8, lines.length)).join('\n');
|
|
484
|
+
if (!block.includes(".on('error'") && !block.includes('.on("error"')) {
|
|
485
|
+
findings.push({ ruleId: 'REL-EH-004', category: 'reliability', severity: 'high',
|
|
486
|
+
title: "Stream without .on('error') handler — crashes process on error",
|
|
487
|
+
description: "Add .on('error', handler) or use the pipeline() utility which handles errors automatically.",
|
|
488
|
+
file: filepath, line: i + 1, fix: null });
|
|
489
|
+
}
|
|
490
|
+
}
|
|
491
|
+
}
|
|
492
|
+
}
|
|
493
|
+
return findings;
|
|
494
|
+
},
|
|
495
|
+
},
|
|
496
|
+
|
|
497
|
+
// REL-TEST-001: No test files
|
|
498
|
+
{
|
|
499
|
+
id: 'REL-TEST-001',
|
|
500
|
+
category: 'reliability',
|
|
501
|
+
severity: 'high',
|
|
502
|
+
confidence: 'likely',
|
|
503
|
+
title: 'No Test Files Found',
|
|
504
|
+
check({ files, stack }) {
|
|
505
|
+
const findings = [];
|
|
506
|
+
if (stack.runtime !== 'node') return findings;
|
|
507
|
+
const hasTests = [...files.keys()].some(f =>
|
|
508
|
+
f.match(/\.(test|spec)\.(js|ts|jsx|tsx)$/) || f.includes('__tests__') || f.includes('/test/')
|
|
509
|
+
);
|
|
510
|
+
if (!hasTests) {
|
|
511
|
+
findings.push({ ruleId: 'REL-TEST-001', category: 'reliability', severity: 'medium',
|
|
512
|
+
title: 'No test files found', description: 'Add unit tests. Untested code leads to undetected regressions in production.', fix: null });
|
|
513
|
+
}
|
|
514
|
+
return findings;
|
|
515
|
+
},
|
|
516
|
+
},
|
|
517
|
+
|
|
518
|
+
// REL-TEST-002: No e2e tests
|
|
519
|
+
{
|
|
520
|
+
id: 'REL-TEST-002',
|
|
521
|
+
category: 'reliability',
|
|
522
|
+
severity: 'medium',
|
|
523
|
+
confidence: 'likely',
|
|
524
|
+
title: 'No End-to-End Tests',
|
|
525
|
+
check({ files, stack }) {
|
|
526
|
+
const findings = [];
|
|
527
|
+
const allDeps = { ...stack.dependencies, ...stack.devDependencies };
|
|
528
|
+
const hasE2E = ['playwright', '@playwright/test', 'cypress', 'puppeteer'].some(d => d in allDeps);
|
|
529
|
+
if (!hasE2E && [...files.keys()].some(f => f.match(/\.(jsx|tsx)$/))) {
|
|
530
|
+
findings.push({ ruleId: 'REL-TEST-002', category: 'reliability', severity: 'medium',
|
|
531
|
+
title: 'No end-to-end test framework detected',
|
|
532
|
+
description: 'Add Playwright or Cypress to test critical user flows (login, signup, checkout).', fix: null });
|
|
533
|
+
}
|
|
534
|
+
return findings;
|
|
535
|
+
},
|
|
536
|
+
},
|
|
537
|
+
|
|
538
|
+
// REL-MON-001: No error tracking
|
|
539
|
+
{
|
|
540
|
+
id: 'REL-MON-001',
|
|
541
|
+
category: 'reliability',
|
|
542
|
+
severity: 'high',
|
|
543
|
+
confidence: 'likely',
|
|
544
|
+
title: 'No Error Tracking Service',
|
|
545
|
+
check({ files, stack }) {
|
|
546
|
+
const findings = [];
|
|
547
|
+
const allDeps = { ...stack.dependencies, ...stack.devDependencies };
|
|
548
|
+
const trackers = ['@sentry/node', '@sentry/nextjs', '@sentry/react', 'bugsnag', '@bugsnag/js', 'rollbar'];
|
|
549
|
+
if (!trackers.some(d => d in allDeps)) {
|
|
550
|
+
const hasServer = [...files.values()].some(c => c.includes('.listen(') || c.includes('createServer'));
|
|
551
|
+
if (hasServer) {
|
|
552
|
+
findings.push({ ruleId: 'REL-MON-001', category: 'reliability', severity: 'high',
|
|
553
|
+
title: 'No error tracking service (Sentry, Bugsnag, Rollbar) detected',
|
|
554
|
+
description: 'Production bugs will go unnoticed. Add Sentry to capture errors, alert your team, and track resolutions.',
|
|
555
|
+
fix: null });
|
|
556
|
+
}
|
|
557
|
+
}
|
|
558
|
+
return findings;
|
|
559
|
+
},
|
|
560
|
+
},
|
|
561
|
+
|
|
562
|
+
// REL-MON-002: No uptime monitoring
|
|
563
|
+
{
|
|
564
|
+
id: 'REL-MON-002',
|
|
565
|
+
category: 'reliability',
|
|
566
|
+
severity: 'low',
|
|
567
|
+
confidence: 'suggestion',
|
|
568
|
+
confidence: 'likely',
|
|
569
|
+
title: 'No Uptime Monitoring',
|
|
570
|
+
check({ files }) {
|
|
571
|
+
const has = [...files.values()].some(c =>
|
|
572
|
+
c.includes('pingdom') || c.includes('uptimerobot') || c.includes('pagerduty') ||
|
|
573
|
+
c.includes('betteruptime') || c.includes('statuspage')
|
|
574
|
+
);
|
|
575
|
+
if (!has) {
|
|
576
|
+
return [{ ruleId: 'REL-MON-002', category: 'reliability', severity: 'high',
|
|
577
|
+
title: 'No uptime monitoring detected',
|
|
578
|
+
description: 'Add UptimeRobot or Better Uptime to alert you when the service goes down.', fix: null }];
|
|
579
|
+
}
|
|
580
|
+
return [];
|
|
581
|
+
},
|
|
582
|
+
},
|
|
583
|
+
|
|
584
|
+
// REL-MON-003: No correlation IDs in logs
|
|
585
|
+
{
|
|
586
|
+
id: 'REL-MON-003',
|
|
587
|
+
category: 'reliability',
|
|
588
|
+
severity: 'medium',
|
|
589
|
+
confidence: 'likely',
|
|
590
|
+
title: 'No Request Correlation IDs',
|
|
591
|
+
check({ files, stack }) {
|
|
592
|
+
const findings = [];
|
|
593
|
+
if (stack.runtime !== 'node') return findings;
|
|
594
|
+
const has = [...files.values()].some(c =>
|
|
595
|
+
c.includes('requestId') || c.includes('x-request-id') || c.includes('correlationId') || c.includes('traceId')
|
|
596
|
+
);
|
|
597
|
+
if (!has && [...files.values()].some(c => c.includes('.listen('))) {
|
|
598
|
+
findings.push({ ruleId: 'REL-MON-003', category: 'reliability', severity: 'medium',
|
|
599
|
+
title: 'No request correlation IDs in logs',
|
|
600
|
+
description: 'Attach a unique requestId to every log entry to trace requests across distributed systems.',
|
|
601
|
+
fix: null });
|
|
602
|
+
}
|
|
603
|
+
return findings;
|
|
604
|
+
},
|
|
605
|
+
},
|
|
606
|
+
|
|
607
|
+
// REL-DB-001: Related mutations without transaction
|
|
608
|
+
{
|
|
609
|
+
id: 'REL-DB-001',
|
|
610
|
+
category: 'reliability',
|
|
611
|
+
severity: 'high',
|
|
612
|
+
confidence: 'likely',
|
|
613
|
+
title: 'Related DB Mutations Without Transaction',
|
|
614
|
+
check({ files }) {
|
|
615
|
+
const findings = [];
|
|
616
|
+
for (const [filepath, content] of files) {
|
|
617
|
+
if (!isSourceFile(filepath)) continue;
|
|
618
|
+
if (!(filepath.includes('api/') || filepath.includes('service') || filepath.includes('route'))) continue;
|
|
619
|
+
const mutations = (content.match(/\.(create|insert|save|update|delete|remove)\s*\(/g) || []).length;
|
|
620
|
+
const hasTx = content.includes('transaction') || content.includes('$transaction') || content.includes('BEGIN');
|
|
621
|
+
if (mutations >= 3 && !hasTx) {
|
|
622
|
+
findings.push({ ruleId: 'REL-DB-001', category: 'reliability', severity: 'high',
|
|
623
|
+
title: 'Multiple DB mutations without a transaction — inconsistent state risk',
|
|
624
|
+
description: 'Wrap related mutations in a transaction so either all succeed or all roll back.',
|
|
625
|
+
file: filepath, fix: null });
|
|
626
|
+
}
|
|
627
|
+
}
|
|
628
|
+
return findings;
|
|
629
|
+
},
|
|
630
|
+
},
|
|
631
|
+
|
|
632
|
+
// REL-OPS-001: Hardcoded port
|
|
633
|
+
{
|
|
634
|
+
id: 'REL-OPS-001',
|
|
635
|
+
category: 'reliability',
|
|
636
|
+
severity: 'low',
|
|
637
|
+
confidence: 'suggestion',
|
|
638
|
+
title: 'Hardcoded Port Number',
|
|
639
|
+
check({ files }) {
|
|
640
|
+
const findings = [];
|
|
641
|
+
for (const [filepath, content] of files) {
|
|
642
|
+
if (!isSourceFile(filepath)) continue;
|
|
643
|
+
const lines = content.split('\n');
|
|
644
|
+
for (let i = 0; i < lines.length; i++) {
|
|
645
|
+
if (lines[i].match(/\.listen\s*\(\s*\d{4,5}/) && !lines[i].match(/process\.env/)) {
|
|
646
|
+
findings.push({ ruleId: 'REL-OPS-001', category: 'reliability', severity: 'low',
|
|
647
|
+
title: 'Hardcoded port in server.listen()',
|
|
648
|
+
description: 'Use process.env.PORT || 3000 to allow the port to be configured via environment variable.',
|
|
649
|
+
file: filepath, line: i + 1, fix: null });
|
|
650
|
+
}
|
|
651
|
+
}
|
|
652
|
+
}
|
|
653
|
+
return findings;
|
|
654
|
+
},
|
|
655
|
+
},
|
|
656
|
+
|
|
657
|
+
// REL-OPS-002: No env var validation at startup
|
|
658
|
+
{
|
|
659
|
+
id: 'REL-OPS-002',
|
|
660
|
+
category: 'reliability',
|
|
661
|
+
severity: 'high',
|
|
662
|
+
confidence: 'likely',
|
|
663
|
+
title: 'No Environment Variable Validation at Startup',
|
|
664
|
+
check({ files, stack }) {
|
|
665
|
+
const findings = [];
|
|
666
|
+
if (stack.runtime !== 'node') return findings;
|
|
667
|
+
const allDeps = { ...stack.dependencies, ...stack.devDependencies };
|
|
668
|
+
const hasValidation = 'envalid' in allDeps || [...files.values()].some(c =>
|
|
669
|
+
c.match(/if\s*\(\s*!process\.env\.\w+/) || c.match(/process\.env\.\w+\s*\|\|\s*(?:throw|process\.exit)/)
|
|
670
|
+
);
|
|
671
|
+
const envVarCount = [...files.values()].reduce((n, c) => n + (c.match(/process\.env\.\w+/g) || []).length, 0);
|
|
672
|
+
if (envVarCount > 5 && !hasValidation) {
|
|
673
|
+
findings.push({ ruleId: 'REL-OPS-002', category: 'reliability', severity: 'high',
|
|
674
|
+
title: 'No startup validation for required environment variables',
|
|
675
|
+
description: 'Use envalid or validate env vars at startup to fail fast with a clear error instead of cryptic runtime failures.',
|
|
676
|
+
fix: null });
|
|
677
|
+
}
|
|
678
|
+
return findings;
|
|
679
|
+
},
|
|
680
|
+
},
|
|
681
|
+
|
|
682
|
+
// REL-RES-001: Payment without idempotency key
|
|
683
|
+
{
|
|
684
|
+
id: 'REL-RES-001',
|
|
685
|
+
category: 'reliability',
|
|
686
|
+
severity: 'critical',
|
|
687
|
+
confidence: 'definite',
|
|
688
|
+
title: 'Payment API Call Without Idempotency Key',
|
|
689
|
+
check({ files }) {
|
|
690
|
+
const findings = [];
|
|
691
|
+
for (const [filepath, content] of files) {
|
|
692
|
+
if (!isSourceFile(filepath)) continue;
|
|
693
|
+
if (content.includes('stripe') || content.includes('paymentIntent') || content.includes('braintree')) {
|
|
694
|
+
if (content.match(/\.create\s*\(\s*\{/) && !content.includes('idempotencyKey') && !content.includes('idempotency_key')) {
|
|
695
|
+
findings.push({ ruleId: 'REL-RES-001', category: 'reliability', severity: 'critical',
|
|
696
|
+
title: 'Payment API call without idempotency key — double-charge risk on retry',
|
|
697
|
+
description: 'Pass idempotencyKey to Stripe API calls so retries due to timeouts do not create duplicate charges.',
|
|
698
|
+
file: filepath, fix: null });
|
|
699
|
+
}
|
|
700
|
+
}
|
|
701
|
+
}
|
|
702
|
+
return findings;
|
|
703
|
+
},
|
|
704
|
+
},
|
|
705
|
+
|
|
706
|
+
// REL-TEST-003: Test file with no assertions
|
|
707
|
+
{ id: 'REL-TEST-003', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'Test File With No Assertions',
|
|
708
|
+
check({ files }) {
|
|
709
|
+
const findings = [];
|
|
710
|
+
for (const [fp, c] of files) {
|
|
711
|
+
if (!fp.match(/\.(test|spec)\.(js|ts|jsx|tsx)$/)) continue;
|
|
712
|
+
if (!c.match(/expect\s*\(|assert\.|should\.|\.toBe|\.toEqual|\.toHaveBeenCalled/)) {
|
|
713
|
+
findings.push({ ruleId: 'REL-TEST-003', category: 'reliability', severity: 'medium',
|
|
714
|
+
title: 'Test file contains no assertions — tests always pass', description: 'Add expect()/assert() calls. A test with no assertions gives false confidence.', file: fp, fix: null });
|
|
715
|
+
}
|
|
716
|
+
}
|
|
717
|
+
return findings;
|
|
718
|
+
},
|
|
719
|
+
},
|
|
720
|
+
|
|
721
|
+
// REL-TEST-004: No test coverage configuration
|
|
722
|
+
{ id: 'REL-TEST-004', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'No Test Coverage Threshold',
|
|
723
|
+
check({ files, stack }) {
|
|
724
|
+
const findings = [];
|
|
725
|
+
const allDeps = { ...stack.dependencies, ...stack.devDependencies };
|
|
726
|
+
if (!Object.keys(allDeps).some(d => d.match(/jest|vitest|mocha|jasmine/))) return findings;
|
|
727
|
+
const hasCoverage = [...files.values()].some(c =>
|
|
728
|
+
c.match(/coverage.*threshold|statements.*\d+|branches.*\d+|coverageThreshold|c8/));
|
|
729
|
+
if (!hasCoverage) {
|
|
730
|
+
findings.push({ ruleId: 'REL-TEST-004', category: 'reliability', severity: 'medium',
|
|
731
|
+
title: 'No test coverage threshold configured', description: 'Set coverageThreshold in Jest/Vitest config to fail CI when coverage drops below e.g. 70%.', fix: null });
|
|
732
|
+
}
|
|
733
|
+
return findings;
|
|
734
|
+
},
|
|
735
|
+
},
|
|
736
|
+
|
|
737
|
+
// REL-TEST-005: No test for error paths
|
|
738
|
+
{ id: 'REL-TEST-005', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'Tests Only Cover Happy Path',
|
|
739
|
+
check({ files }) {
|
|
740
|
+
const findings = [];
|
|
741
|
+
for (const [fp, c] of files) {
|
|
742
|
+
if (!fp.match(/\.(test|spec)\.(js|ts|jsx|tsx)$/)) continue;
|
|
743
|
+
const hasErrorTest = c.match(/toThrow|rejects|error.*test|test.*error|should.*fail|expect.*reject/i);
|
|
744
|
+
if (!hasErrorTest && (c.match(/it\(|test\(|describe\(/) || []).length > 3) {
|
|
745
|
+
findings.push({ ruleId: 'REL-TEST-005', category: 'reliability', severity: 'medium',
|
|
746
|
+
title: 'Test file has no error case tests — only happy path covered', description: 'Add tests for error cases: invalid input, network failures, missing data. Error paths are where most production bugs hide.', file: fp, fix: null });
|
|
747
|
+
}
|
|
748
|
+
}
|
|
749
|
+
return findings;
|
|
750
|
+
},
|
|
751
|
+
},
|
|
752
|
+
|
|
753
|
+
// REL-DB-003: Long-running transactions
|
|
754
|
+
{ id: 'REL-DB-003', category: 'reliability', severity: 'high', confidence: 'likely', title: 'Long-Running Transactions Without Timeout',
|
|
755
|
+
check({ files }) {
|
|
756
|
+
const findings = [];
|
|
757
|
+
for (const [fp, c] of files) {
|
|
758
|
+
if (!isSourceFile(fp)) continue;
|
|
759
|
+
if (c.match(/await.*transaction|BEGIN.*transaction|\$transaction/) && c.match(/await.*sleep|setTimeout.*await|delay/)) {
|
|
760
|
+
findings.push({ ruleId: 'REL-DB-003', category: 'reliability', severity: 'high',
|
|
761
|
+
title: 'Database transaction may be held open with a delay/sleep inside',
|
|
762
|
+
description: 'Never sleep inside a transaction. Held locks block other queries and can cause deadlocks and timeouts.', file: fp, fix: null });
|
|
763
|
+
}
|
|
764
|
+
}
|
|
765
|
+
return findings;
|
|
766
|
+
},
|
|
767
|
+
},
|
|
768
|
+
|
|
769
|
+
// REL-DB-004: No soft deletes for important records
|
|
770
|
+
{ id: 'REL-DB-004', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'Hard Deletes Without Soft Delete Option',
|
|
771
|
+
check({ files }) {
|
|
772
|
+
const findings = [];
|
|
773
|
+
for (const [fp, c] of files) {
|
|
774
|
+
if (!isSourceFile(fp)) continue;
|
|
775
|
+
if (c.match(/\.deleteOne\s*\(|\.deleteMany\s*\(|\.destroy\s*\(/) &&
|
|
776
|
+
!c.match(/deletedAt|deleted_at|isDeleted|soft.delete|paranoid/)) {
|
|
777
|
+
if (c.match(/user|order|payment|subscription|account/i)) {
|
|
778
|
+
findings.push({ ruleId: 'REL-DB-004', category: 'reliability', severity: 'medium',
|
|
779
|
+
title: 'Hard deleting user/order/payment records without soft-delete option',
|
|
780
|
+
description: 'Add a deletedAt column for soft deletes. Hard deletes make debugging, auditing, and compliance reporting impossible.', file: fp, fix: null });
|
|
781
|
+
}
|
|
782
|
+
}
|
|
783
|
+
}
|
|
784
|
+
return findings;
|
|
785
|
+
},
|
|
786
|
+
},
|
|
787
|
+
|
|
788
|
+
// REL-DB-005: Missing foreign key validation before insert
|
|
789
|
+
{ id: 'REL-DB-005', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'No FK Validation Before Insert',
|
|
790
|
+
check({ files }) {
|
|
791
|
+
const findings = [];
|
|
792
|
+
for (const [fp, c] of files) {
|
|
793
|
+
if (!isSourceFile(fp)) continue;
|
|
794
|
+
if ((fp.includes('api/') || fp.includes('route')) && c.match(/userId|user_id|authorId|author_id/)) {
|
|
795
|
+
if (c.match(/\.create\s*\(\s*\{/) && !c.match(/findById|findOne|exists|findUnique.*userId|findFirst/)) {
|
|
796
|
+
findings.push({ ruleId: 'REL-DB-005', category: 'reliability', severity: 'medium',
|
|
797
|
+
title: 'Creating records with foreign key IDs without validating they exist',
|
|
798
|
+
description: 'Verify referenced entities exist before inserting. Rely on DB foreign key constraints AND validate at the application layer.', file: fp, fix: null });
|
|
799
|
+
}
|
|
800
|
+
}
|
|
801
|
+
}
|
|
802
|
+
return findings;
|
|
803
|
+
},
|
|
804
|
+
},
|
|
805
|
+
|
|
806
|
+
// REL-RES-003: No dead letter queue for failed jobs
|
|
807
|
+
{ id: 'REL-RES-003', category: 'reliability', severity: 'high', confidence: 'likely', title: 'No Dead Letter Queue for Failed Jobs',
|
|
808
|
+
check({ files, stack }) {
|
|
809
|
+
const findings = [];
|
|
810
|
+
const allDeps = { ...stack.dependencies, ...stack.devDependencies };
|
|
811
|
+
const hasQueue = ['bullmq', 'bull', 'bee-queue', 'agenda'].some(d => d in allDeps);
|
|
812
|
+
if (hasQueue) {
|
|
813
|
+
const hasDLQ = [...files.values()].some(c => c.match(/deadLetter|dead.letter|failed.*queue|onFailed|failedQueue/i));
|
|
814
|
+
if (!hasDLQ) {
|
|
815
|
+
findings.push({ ruleId: 'REL-RES-003', category: 'reliability', severity: 'high',
|
|
816
|
+
title: 'Job queue without dead letter queue for failed jobs',
|
|
817
|
+
description: 'Configure a dead letter queue to capture jobs that fail after retries. Without it, failed jobs are silently dropped.', fix: null });
|
|
818
|
+
}
|
|
819
|
+
}
|
|
820
|
+
return findings;
|
|
821
|
+
},
|
|
822
|
+
},
|
|
823
|
+
|
|
824
|
+
// REL-RES-004: Missing mutex for concurrent resource access
|
|
825
|
+
{ id: 'REL-RES-004', category: 'reliability', severity: 'high', confidence: 'likely', title: 'No Mutex for Concurrent Resource Access',
|
|
826
|
+
check({ files, stack }) {
|
|
827
|
+
const findings = [];
|
|
828
|
+
const allDeps = { ...stack.dependencies, ...stack.devDependencies };
|
|
829
|
+
const hasMutex = 'async-mutex' in allDeps || 'redis-mutex' in allDeps || 'redlock' in allDeps || 'node-redlock' in allDeps;
|
|
830
|
+
const hasConcurrentAccess = [...files.values()].some(c =>
|
|
831
|
+
c.match(/read.*then.*write|check.*then.*update|findOne.*then.*save/i) &&
|
|
832
|
+
(c.includes('await ') && c.includes('await '))
|
|
833
|
+
);
|
|
834
|
+
if (hasConcurrentAccess && !hasMutex) {
|
|
835
|
+
findings.push({ ruleId: 'REL-RES-004', category: 'reliability', severity: 'high',
|
|
836
|
+
title: 'Read-then-write pattern without distributed lock — race condition risk',
|
|
837
|
+
description: 'Use Redlock or optimistic locking to prevent TOCTOU (time-of-check to time-of-use) race conditions in concurrent environments.', fix: null });
|
|
838
|
+
}
|
|
839
|
+
return findings;
|
|
840
|
+
},
|
|
841
|
+
},
|
|
842
|
+
|
|
843
|
+
// REL-MON-004: No audit log for sensitive operations
|
|
844
|
+
{ id: 'REL-MON-004', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'No Audit Log for Sensitive Operations',
|
|
845
|
+
check({ files }) {
|
|
846
|
+
const findings = [];
|
|
847
|
+
const hasSensitiveOps = [...files.values()].some(c =>
|
|
848
|
+
c.match(/deleteUser|banUser|changeRole|updatePermissions|adminAction/i) ||
|
|
849
|
+
c.match(/role.*admin|isAdmin.*true|permission.*update/i)
|
|
850
|
+
);
|
|
851
|
+
const hasAuditLog = [...files.values()].some(c =>
|
|
852
|
+
c.match(/auditLog|audit_log|AuditEvent|activityLog|logAction/i)
|
|
853
|
+
);
|
|
854
|
+
if (hasSensitiveOps && !hasAuditLog) {
|
|
855
|
+
findings.push({ ruleId: 'REL-MON-004', category: 'reliability', severity: 'medium',
|
|
856
|
+
title: 'Sensitive admin operations without audit logging',
|
|
857
|
+
description: 'Log who did what and when for all privilege changes, deletions, and admin actions. Required for SOC2 and incident investigations.', fix: null });
|
|
858
|
+
}
|
|
859
|
+
return findings;
|
|
860
|
+
},
|
|
861
|
+
},
|
|
862
|
+
|
|
863
|
+
// REL-MON-005: Console.error not structured
|
|
864
|
+
{ id: 'REL-MON-005', category: 'reliability', severity: 'low', confidence: 'suggestion', title: 'console.error Instead of Structured Logger',
|
|
865
|
+
check({ files, stack }) {
|
|
866
|
+
const findings = [];
|
|
867
|
+
const allDeps = { ...stack.dependencies, ...stack.devDependencies };
|
|
868
|
+
const hasLogger = ['winston', 'pino', 'bunyan', 'log4js'].some(d => d in allDeps);
|
|
869
|
+
for (const [fp, c] of files) {
|
|
870
|
+
if (!isSourceFile(fp) || fp.includes('test') || fp.includes('spec')) continue;
|
|
871
|
+
const count = (c.match(/console\.error\s*\(/g) || []).length;
|
|
872
|
+
if (count > 3 && !hasLogger) {
|
|
873
|
+
findings.push({ ruleId: 'REL-MON-005', category: 'reliability', severity: 'low',
|
|
874
|
+
title: `${count} console.error() calls — not structured or searchable in production logs`,
|
|
875
|
+
description: 'Use pino or winston for structured JSON logging with levels, timestamps, and context fields.', file: fp, fix: null });
|
|
876
|
+
}
|
|
877
|
+
}
|
|
878
|
+
return findings;
|
|
879
|
+
},
|
|
880
|
+
},
|
|
881
|
+
|
|
882
|
+
// REL-OPS-004: Missing NODE_ENV in production config
|
|
883
|
+
{ id: 'REL-OPS-004', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'NODE_ENV Not Set in Production Config',
|
|
884
|
+
check({ files }) {
|
|
885
|
+
const findings = [];
|
|
886
|
+
for (const [fp, c] of files) {
|
|
887
|
+
if (!fp.includes('.github/workflows') && !fp.includes('docker-compose')) continue;
|
|
888
|
+
if (c.match(/production|prod.*deploy/) && !c.match(/NODE_ENV.*production|NODE_ENV:\s*production/)) {
|
|
889
|
+
findings.push({ ruleId: 'REL-OPS-004', category: 'reliability', severity: 'medium',
|
|
890
|
+
title: 'Production deployment without explicit NODE_ENV=production',
|
|
891
|
+
description: 'Set NODE_ENV=production in production environments. Many libraries (Express, React) have important optimizations gated on this.', file: fp, fix: null });
|
|
892
|
+
}
|
|
893
|
+
}
|
|
894
|
+
return findings;
|
|
895
|
+
},
|
|
896
|
+
},
|
|
897
|
+
|
|
898
|
+
// REL-OPS-005: Server started without cluster/worker
|
|
899
|
+
{ id: 'REL-OPS-005', category: 'reliability', severity: 'low', confidence: 'suggestion', title: 'Single-Process Node.js Server',
|
|
900
|
+
check({ files, stack }) {
|
|
901
|
+
const findings = [];
|
|
902
|
+
if (stack.runtime !== 'node') return findings;
|
|
903
|
+
const allDeps = { ...stack.dependencies, ...stack.devDependencies };
|
|
904
|
+
const hasCluster = 'pm2' in allDeps || [...files.values()].some(c =>
|
|
905
|
+
c.includes('cluster.fork') || c.includes('worker_threads') || c.match(/throng|@fastify\/cluster/)
|
|
906
|
+
);
|
|
907
|
+
if (!hasCluster && ![...files.keys()].some(f => f.includes('Dockerfile') || f.includes('kubernetes'))) {
|
|
908
|
+
const hasServer = [...files.values()].some(c => c.includes('.listen('));
|
|
909
|
+
if (hasServer) {
|
|
910
|
+
findings.push({ ruleId: 'REL-OPS-005', category: 'reliability', severity: 'low',
|
|
911
|
+
title: 'Single-process Node.js server — not utilizing all CPU cores',
|
|
912
|
+
description: 'Use PM2 cluster mode or the cluster module to spawn one worker per CPU core, improving throughput and fault isolation.', fix: null });
|
|
913
|
+
}
|
|
914
|
+
}
|
|
915
|
+
return findings;
|
|
916
|
+
},
|
|
917
|
+
},
|
|
918
|
+
|
|
919
|
+
// REL-OPS-006: No dependency health checks on startup
|
|
920
|
+
{ id: 'REL-OPS-006', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'No Dependency Health Checks on Startup',
|
|
921
|
+
check({ files, stack }) {
|
|
922
|
+
const findings = [];
|
|
923
|
+
if (stack.runtime !== 'node') return findings;
|
|
924
|
+
const hasDeps = stack.database || [...files.values()].some(c => c.match(/redis|mongodb|postgresql|mysql/i));
|
|
925
|
+
const hasHealthCheck = [...files.values()].some(c =>
|
|
926
|
+
c.match(/waitForDb|checkConnection|db\.connect.*catch|ping.*redis|mongoose\.connect.*catch/i)
|
|
927
|
+
);
|
|
928
|
+
if (hasDeps && !hasHealthCheck) {
|
|
929
|
+
findings.push({ ruleId: 'REL-OPS-006', category: 'reliability', severity: 'medium',
|
|
930
|
+
title: 'Server starts without verifying database/cache connectivity',
|
|
931
|
+
description: 'Verify all dependencies are reachable before accepting traffic. Fail fast on startup rather than crashing on first request.', fix: null });
|
|
932
|
+
}
|
|
933
|
+
return findings;
|
|
934
|
+
},
|
|
935
|
+
},
|
|
936
|
+
|
|
937
|
+
// REL-EH-005: Async event listener without error handling
|
|
938
|
+
{ id: 'REL-EH-005', category: 'reliability', severity: 'high', confidence: 'likely', title: 'Async Event Listener Without Error Handling',
|
|
939
|
+
check({ files }) {
|
|
940
|
+
const findings = [];
|
|
941
|
+
for (const [fp, c] of files) {
|
|
942
|
+
if (!isSourceFile(fp)) continue;
|
|
943
|
+
const lines = c.split('\n');
|
|
944
|
+
for (let i = 0; i < lines.length; i++) {
|
|
945
|
+
if (lines[i].match(/\.on\(['"].*['"],\s*async/) || lines[i].match(/\.on\(["`].*["`],\s*async/)) {
|
|
946
|
+
const handler = lines.slice(i, i + 20).join('\n');
|
|
947
|
+
if (!handler.match(/try\s*\{|\.catch\(|Promise\.reject/)) {
|
|
948
|
+
findings.push({ ruleId: 'REL-EH-005', category: 'reliability', severity: 'high', title: 'Async event listener without try/catch — unhandled promise rejection crashes Node', description: 'Wrap async event handlers in try/catch. Unhandled promise rejections in event listeners cause process crashes in Node.js.', file: fp, line: i + 1, fix: null });
|
|
949
|
+
}
|
|
950
|
+
}
|
|
951
|
+
}
|
|
952
|
+
}
|
|
953
|
+
return findings;
|
|
954
|
+
},
|
|
955
|
+
},
|
|
956
|
+
|
|
957
|
+
// REL-EH-006: Missing finally in resource acquisition
|
|
958
|
+
{ id: 'REL-EH-006', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'Resource Acquired Without finally Block',
|
|
959
|
+
check({ files }) {
|
|
960
|
+
const findings = [];
|
|
961
|
+
for (const [fp, c] of files) {
|
|
962
|
+
if (!isSourceFile(fp)) continue;
|
|
963
|
+
const lines = c.split('\n');
|
|
964
|
+
for (let i = 0; i < lines.length; i++) {
|
|
965
|
+
if (lines[i].match(/await\s+\w+\.(connect|acquire|lock|begin|open)\(/i)) {
|
|
966
|
+
const block = lines.slice(i, i + 30).join('\n');
|
|
967
|
+
if (!block.match(/finally\s*\{/) && !block.match(/using\s+/)) {
|
|
968
|
+
findings.push({ ruleId: 'REL-EH-006', category: 'reliability', severity: 'medium', title: 'Resource acquired without finally block — resource leak on error', description: 'Use try/finally to ensure resources (connections, locks, file handles) are released even when exceptions occur.', file: fp, line: i + 1, fix: null });
|
|
969
|
+
}
|
|
970
|
+
}
|
|
971
|
+
}
|
|
972
|
+
}
|
|
973
|
+
return findings;
|
|
974
|
+
},
|
|
975
|
+
},
|
|
976
|
+
|
|
977
|
+
// REL-EH-007: Empty catch block
|
|
978
|
+
{ id: 'REL-EH-007', category: 'reliability', severity: 'high', confidence: 'likely', title: 'Empty Catch Block (Swallowing Errors)',
|
|
979
|
+
check({ files }) {
|
|
980
|
+
const findings = [];
|
|
981
|
+
for (const [fp, c] of files) {
|
|
982
|
+
if (!isSourceFile(fp)) continue;
|
|
983
|
+
const lines = c.split('\n');
|
|
984
|
+
for (let i = 0; i < lines.length; i++) {
|
|
985
|
+
if (lines[i].match(/}\s*catch\s*\(\w+\)\s*\{?\s*$/) || lines[i].match(/\}\s*catch\s*\{\s*$/)) {
|
|
986
|
+
const nextLine = lines[i + 1] || '';
|
|
987
|
+
if (nextLine.match(/^\s*\}/) || nextLine.trim() === '') {
|
|
988
|
+
findings.push({ ruleId: 'REL-EH-007', category: 'reliability', severity: 'high', title: 'Empty catch block — errors silently swallowed', description: 'Never use empty catch blocks. At minimum log the error. Silent failures make debugging nearly impossible.', file: fp, line: i + 1, fix: null });
|
|
989
|
+
}
|
|
990
|
+
}
|
|
991
|
+
}
|
|
992
|
+
}
|
|
993
|
+
return findings;
|
|
994
|
+
},
|
|
995
|
+
},
|
|
996
|
+
|
|
997
|
+
// REL-TEST-006: No integration tests
|
|
998
|
+
{ id: 'REL-TEST-006', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'No Integration Tests',
|
|
999
|
+
check({ files, stack }) {
|
|
1000
|
+
const findings = [];
|
|
1001
|
+
const allDeps = { ...stack.dependencies, ...stack.devDependencies };
|
|
1002
|
+
const hasIntegration = [...files.keys()].some(f => f.match(/integration|e2e|api.*test|test.*api/i));
|
|
1003
|
+
const hasSupertest = 'supertest' in allDeps;
|
|
1004
|
+
const hasDB = stack.database || Object.keys(allDeps).some(d => d.match(/mongoose|sequelize|prisma|pg|mysql/i));
|
|
1005
|
+
if (hasDB && !hasIntegration && !hasSupertest) {
|
|
1006
|
+
findings.push({ ruleId: 'REL-TEST-006', category: 'reliability', severity: 'medium', title: 'No integration tests detected — API endpoints untested end-to-end', description: 'Add supertest integration tests that exercise the full request/response cycle including database. Unit tests alone miss integration bugs.', fix: null });
|
|
1007
|
+
}
|
|
1008
|
+
return findings;
|
|
1009
|
+
},
|
|
1010
|
+
},
|
|
1011
|
+
|
|
1012
|
+
// REL-TEST-007: Tests with hardcoded database URLs
|
|
1013
|
+
{ id: 'REL-TEST-007', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'Hardcoded Database URL in Tests',
|
|
1014
|
+
check({ files }) {
|
|
1015
|
+
const findings = [];
|
|
1016
|
+
for (const [fp, c] of files) {
|
|
1017
|
+
if (!fp.match(/\.(test|spec)\.(js|ts|jsx|tsx)$/)) continue;
|
|
1018
|
+
const lines = c.split('\n');
|
|
1019
|
+
for (let i = 0; i < lines.length; i++) {
|
|
1020
|
+
if (lines[i].match(/mongodb:\/\/localhost|postgresql:\/\/localhost|mysql:\/\/localhost/)) {
|
|
1021
|
+
findings.push({ ruleId: 'REL-TEST-007', category: 'reliability', severity: 'medium', title: 'Hardcoded database URL in test file — tests fail in CI without local DB', description: 'Use environment variables for DB URLs in tests. Use testcontainers or in-memory databases for portable test infrastructure.', file: fp, line: i + 1, fix: null });
|
|
1022
|
+
}
|
|
1023
|
+
}
|
|
1024
|
+
}
|
|
1025
|
+
return findings;
|
|
1026
|
+
},
|
|
1027
|
+
},
|
|
1028
|
+
|
|
1029
|
+
// REL-TEST-008: No load/stress test configuration
|
|
1030
|
+
{ id: 'REL-TEST-008', category: 'reliability', severity: 'low', confidence: 'suggestion', title: 'No Load Testing Configuration',
|
|
1031
|
+
check({ files, stack }) {
|
|
1032
|
+
const findings = [];
|
|
1033
|
+
const allDeps = { ...stack.dependencies, ...stack.devDependencies };
|
|
1034
|
+
const hasLoadTest = ['k6', 'artillery', 'loadtest', 'autocannon', 'vegeta'].some(d => d in allDeps) || [...files.keys()].some(f => f.match(/k6|artillery|loadtest|stress/i));
|
|
1035
|
+
if (!hasLoadTest && Object.keys(allDeps).length > 20) {
|
|
1036
|
+
findings.push({ ruleId: 'REL-TEST-008', category: 'reliability', severity: 'low', title: 'No load testing tool detected — capacity limits unknown', description: 'Add k6 or Artillery load tests. Run against staging to find breaking points before production traffic hits them.', fix: null });
|
|
1037
|
+
}
|
|
1038
|
+
return findings;
|
|
1039
|
+
},
|
|
1040
|
+
},
|
|
1041
|
+
|
|
1042
|
+
// REL-MON-006: No structured logging format
|
|
1043
|
+
{ id: 'REL-MON-006', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'No Structured Logging Library',
|
|
1044
|
+
check({ files, stack }) {
|
|
1045
|
+
const findings = [];
|
|
1046
|
+
const allDeps = { ...stack.dependencies, ...stack.devDependencies };
|
|
1047
|
+
const hasStructured = ['pino', 'winston', 'bunyan', 'log4js', 'signale', '@aws-lambda-powertools/logger'].some(d => d in allDeps);
|
|
1048
|
+
const hasConsoleOnly = [...files.values()].some(c => c.match(/console\.(log|error|warn|info)/));
|
|
1049
|
+
if (!hasStructured && hasConsoleOnly) {
|
|
1050
|
+
findings.push({ ruleId: 'REL-MON-006', category: 'reliability', severity: 'medium', title: 'Using console.log for logging — no structured logging library', description: 'Use pino or winston for structured JSON logging. console.log outputs unstructured text that cannot be queried in log aggregation tools.', fix: null });
|
|
1051
|
+
}
|
|
1052
|
+
return findings;
|
|
1053
|
+
},
|
|
1054
|
+
},
|
|
1055
|
+
|
|
1056
|
+
// REL-MON-007: No request logging middleware
|
|
1057
|
+
{ id: 'REL-MON-007', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'No HTTP Request Logging',
|
|
1058
|
+
check({ files, stack }) {
|
|
1059
|
+
const findings = [];
|
|
1060
|
+
const allDeps = { ...stack.dependencies, ...stack.devDependencies };
|
|
1061
|
+
const hasLogger = ['morgan', 'pino-http', 'express-pino-logger', 'koa-logger', 'fastify-log'].some(d => d in allDeps) || [...files.values()].some(c => c.match(/morgan|pino-http|req\.log/i));
|
|
1062
|
+
if (stack.framework && !hasLogger) {
|
|
1063
|
+
findings.push({ ruleId: 'REL-MON-007', category: 'reliability', severity: 'medium', title: 'No HTTP request logging middleware — requests not logged', description: 'Add morgan or pino-http. Request logs are essential for debugging production issues and calculating error rates.', fix: null });
|
|
1064
|
+
}
|
|
1065
|
+
return findings;
|
|
1066
|
+
},
|
|
1067
|
+
},
|
|
1068
|
+
|
|
1069
|
+
// REL-MON-008: No health check endpoint
|
|
1070
|
+
{ id: 'REL-MON-008', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'No Health Check Endpoint',
|
|
1071
|
+
check({ files }) {
|
|
1072
|
+
const findings = [];
|
|
1073
|
+
const hasHealth = [...files.values()].some(c => c.match(/\/health|\/healthz|\/ping|\/status|healthcheck/i));
|
|
1074
|
+
const hasApp = [...files.keys()].some(f => f.endsWith('package.json'));
|
|
1075
|
+
if (hasApp && !hasHealth) {
|
|
1076
|
+
findings.push({ ruleId: 'REL-MON-008', category: 'reliability', severity: 'medium', title: 'No health check endpoint — load balancers cannot detect unhealthy instances', description: 'Add GET /health endpoint returning 200 with status. Used by Kubernetes liveness probes, load balancers, and uptime monitors.', fix: null });
|
|
1077
|
+
}
|
|
1078
|
+
return findings;
|
|
1079
|
+
},
|
|
1080
|
+
},
|
|
1081
|
+
|
|
1082
|
+
// REL-MON-009: No readiness check separate from liveness
|
|
1083
|
+
{ id: 'REL-MON-009', category: 'reliability', severity: 'low', confidence: 'suggestion', title: 'No Readiness Check Separate From Liveness',
|
|
1084
|
+
check({ files }) {
|
|
1085
|
+
const findings = [];
|
|
1086
|
+
const hasHealth = [...files.values()].some(c => c.match(/\/healthz?|healthcheck/i));
|
|
1087
|
+
const hasReady = [...files.values()].some(c => c.match(/\/ready|readiness|\/readyz/i));
|
|
1088
|
+
if (hasHealth && !hasReady) {
|
|
1089
|
+
findings.push({ ruleId: 'REL-MON-009', category: 'reliability', severity: 'low', title: 'No /ready endpoint — Kubernetes cannot distinguish initializing from failed pods', description: "Add /ready endpoint that checks database connectivity. Kubernetes uses /ready to decide whether to send traffic, /health to decide whether to restart.", fix: null });
|
|
1090
|
+
}
|
|
1091
|
+
return findings;
|
|
1092
|
+
},
|
|
1093
|
+
},
|
|
1094
|
+
|
|
1095
|
+
// REL-DB-006: N+1 query in loop
|
|
1096
|
+
{ id: 'REL-DB-006', category: 'reliability', severity: 'high', confidence: 'likely', title: 'N+1 Query Pattern — Database Query in Loop',
|
|
1097
|
+
check({ files }) {
|
|
1098
|
+
const findings = [];
|
|
1099
|
+
for (const [fp, c] of files) {
|
|
1100
|
+
if (!isSourceFile(fp)) continue;
|
|
1101
|
+
const lines = c.split('\n');
|
|
1102
|
+
for (let i = 0; i < lines.length; i++) {
|
|
1103
|
+
if (lines[i].match(/for\s*\(|forEach\s*\(|\.map\s*\(/) && !lines[i].match(/Promise\.all/)) {
|
|
1104
|
+
const block = lines.slice(i, i + 10).join('\n');
|
|
1105
|
+
if (block.match(/await.*\.find|await.*\.get|await.*\.query|await.*findById|await.*findOne/i)) {
|
|
1106
|
+
findings.push({ ruleId: 'REL-DB-006', category: 'reliability', severity: 'high', title: 'Database query inside loop — N+1 query pattern', description: 'Load all related records with a single query using $in, JOIN, or include/populate. N+1 queries cause exponential performance degradation.', file: fp, line: i + 1, fix: null });
|
|
1107
|
+
}
|
|
1108
|
+
}
|
|
1109
|
+
}
|
|
1110
|
+
}
|
|
1111
|
+
return findings;
|
|
1112
|
+
},
|
|
1113
|
+
},
|
|
1114
|
+
|
|
1115
|
+
// REL-DB-007: No connection timeout
|
|
1116
|
+
{ id: 'REL-DB-007', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'No Database Connection Timeout',
|
|
1117
|
+
check({ files }) {
|
|
1118
|
+
const findings = [];
|
|
1119
|
+
for (const [fp, c] of files) {
|
|
1120
|
+
if (!isSourceFile(fp)) continue;
|
|
1121
|
+
if (c.match(/mongoose\.connect|new Pool|createPool|createConnection/i)) {
|
|
1122
|
+
if (!c.match(/connectTimeoutMS|connectionTimeoutMillis|timeout|socketTimeout|idleTimeoutMillis/i)) {
|
|
1123
|
+
findings.push({ ruleId: 'REL-DB-007', category: 'reliability', severity: 'medium', title: 'Database connection without timeout configuration', description: 'Set connectTimeoutMS and socketTimeoutMS. Without timeouts, hung connections block requests indefinitely.', file: fp, fix: null });
|
|
1124
|
+
}
|
|
1125
|
+
}
|
|
1126
|
+
}
|
|
1127
|
+
return findings;
|
|
1128
|
+
},
|
|
1129
|
+
},
|
|
1130
|
+
|
|
1131
|
+
// REL-RES-005: Missing circuit breaker for external calls
|
|
1132
|
+
{ id: 'REL-RES-005', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'No Circuit Breaker for External Service Calls',
|
|
1133
|
+
check({ files, stack }) {
|
|
1134
|
+
const findings = [];
|
|
1135
|
+
const allDeps = { ...stack.dependencies, ...stack.devDependencies };
|
|
1136
|
+
const hasCircuitBreaker = ['opossum', 'cockatiel', 'retry', 'p-retry', 'axios-retry', 'resilience4j'].some(d => d in allDeps);
|
|
1137
|
+
const hasExternalCalls = [...files.values()].some(c => c.match(/axios\.|fetch\(|got\.|superagent\.|https?\.request/i));
|
|
1138
|
+
if (hasExternalCalls && !hasCircuitBreaker) {
|
|
1139
|
+
findings.push({ ruleId: 'REL-RES-005', category: 'reliability', severity: 'medium', title: 'External HTTP calls without circuit breaker or retry logic', description: 'Add opossum or cockatiel for circuit breaking. When external services fail, circuit breakers prevent cascade failures across your entire system.', fix: null });
|
|
1140
|
+
}
|
|
1141
|
+
return findings;
|
|
1142
|
+
},
|
|
1143
|
+
},
|
|
1144
|
+
|
|
1145
|
+
// REL-RES-006: No rate limiting on API client
|
|
1146
|
+
{ id: 'REL-RES-006', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'No Rate Limit Handling for External API Calls',
|
|
1147
|
+
check({ files }) {
|
|
1148
|
+
const findings = [];
|
|
1149
|
+
for (const [fp, c] of files) {
|
|
1150
|
+
if (!isSourceFile(fp)) continue;
|
|
1151
|
+
if (c.match(/axios\.|fetch\(|got\./i) && c.match(/openai|stripe|twilio|sendgrid|anthropic/i)) {
|
|
1152
|
+
if (!c.match(/rate.*limit|429|retry.*after|backoff|throttle/i)) {
|
|
1153
|
+
findings.push({ ruleId: 'REL-RES-006', category: 'reliability', severity: 'medium', title: 'External API calls without 429/rate-limit handling', description: 'Handle 429 responses with exponential backoff. External APIs impose rate limits — unhandled 429s cause service degradation.', file: fp, fix: null });
|
|
1154
|
+
}
|
|
1155
|
+
}
|
|
1156
|
+
}
|
|
1157
|
+
return findings;
|
|
1158
|
+
},
|
|
1159
|
+
},
|
|
1160
|
+
|
|
1161
|
+
// REL-RES-007: Synchronous file operations blocking event loop
|
|
1162
|
+
{ id: 'REL-RES-007', category: 'reliability', severity: 'high', confidence: 'likely', title: 'Synchronous File I/O Blocking Event Loop',
|
|
1163
|
+
check({ files }) {
|
|
1164
|
+
const findings = [];
|
|
1165
|
+
for (const [fp, c] of files) {
|
|
1166
|
+
if (!isSourceFile(fp)) continue;
|
|
1167
|
+
const lines = c.split('\n');
|
|
1168
|
+
for (let i = 0; i < lines.length; i++) {
|
|
1169
|
+
if (lines[i].match(/fs\.readFileSync|fs\.writeFileSync|fs\.appendFileSync|fs\.readdirSync/) && !lines[i].match(/startup|init|config|bootstrap/i)) {
|
|
1170
|
+
findings.push({ ruleId: 'REL-RES-007', category: 'reliability', severity: 'high', title: 'Synchronous file operation in request handler — blocks event loop for all requests', description: 'Use async fs.readFile with await. Sync FS calls block the entire Node.js event loop, halting all concurrent requests.', file: fp, line: i + 1, fix: null });
|
|
1171
|
+
}
|
|
1172
|
+
}
|
|
1173
|
+
}
|
|
1174
|
+
return findings;
|
|
1175
|
+
},
|
|
1176
|
+
},
|
|
1177
|
+
|
|
1178
|
+
// REL-OPS-007: Hard restart instead of graceful shutdown
|
|
1179
|
+
{ id: 'REL-OPS-007', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'No Graceful Shutdown Handler',
|
|
1180
|
+
check({ files }) {
|
|
1181
|
+
const findings = [];
|
|
1182
|
+
const hasShutdown = [...files.values()].some(c => c.match(/process\.on\(['"]SIGTERM|process\.on\(['"]SIGINT|graceful.*shutdown|server\.close/i));
|
|
1183
|
+
const hasApp = [...files.values()].some(c => c.match(/express\(\)|fastify\(\)|http\.createServer|app\.listen/i));
|
|
1184
|
+
if (hasApp && !hasShutdown) {
|
|
1185
|
+
findings.push({ ruleId: 'REL-OPS-007', category: 'reliability', severity: 'medium', title: 'No SIGTERM/SIGINT handler — container stops abruptly, in-flight requests dropped', description: "Add process.on('SIGTERM', () => server.close()) to gracefully stop accepting new requests and finish in-flight ones before exit.", fix: null });
|
|
1186
|
+
}
|
|
1187
|
+
return findings;
|
|
1188
|
+
},
|
|
1189
|
+
},
|
|
1190
|
+
|
|
1191
|
+
// REL-OPS-008: No request timeout
|
|
1192
|
+
{ id: 'REL-OPS-008', category: 'reliability', severity: 'high', confidence: 'likely', title: 'No HTTP Request Timeout',
|
|
1193
|
+
check({ files }) {
|
|
1194
|
+
const findings = [];
|
|
1195
|
+
for (const [fp, c] of files) {
|
|
1196
|
+
if (!isSourceFile(fp)) continue;
|
|
1197
|
+
if (c.match(/express\(\)|app\.use\(|router\./i)) {
|
|
1198
|
+
if (!c.match(/timeout|requestTimeout|server\.setTimeout|helmet.*hsts/i)) {
|
|
1199
|
+
findings.push({ ruleId: 'REL-OPS-008', category: 'reliability', severity: 'high', title: 'Express app without request timeout — slow requests hold connections indefinitely', description: 'Add connect-timeout middleware or server.setTimeout(). Without timeouts, slow DB queries hold connections and exhaust the connection pool.', file: fp, fix: null });
|
|
1200
|
+
}
|
|
1201
|
+
}
|
|
1202
|
+
}
|
|
1203
|
+
return findings;
|
|
1204
|
+
},
|
|
1205
|
+
},
|
|
1206
|
+
|
|
1207
|
+
// REL-OPS-009: No readiness delay for K8s
|
|
1208
|
+
{ id: 'REL-OPS-009', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'No Startup Readiness Delay for Kubernetes',
|
|
1209
|
+
check({ files }) {
|
|
1210
|
+
const findings = [];
|
|
1211
|
+
for (const [fp, c] of files) {
|
|
1212
|
+
if (!fp.match(/\.(yaml|yml)$/)) continue;
|
|
1213
|
+
if (c.match(/kind:\s*Deployment/) && c.match(/readinessProbe:/) && !c.match(/initialDelaySeconds:/)) {
|
|
1214
|
+
findings.push({ ruleId: 'REL-OPS-009', category: 'reliability', severity: 'medium', title: 'Kubernetes readiness probe without initialDelaySeconds — may probe before app starts', description: 'Add initialDelaySeconds: 10 to readinessProbe. Without startup delay, probes fail before the app is ready, causing unnecessary pod restarts.', file: fp, fix: null });
|
|
1215
|
+
}
|
|
1216
|
+
}
|
|
1217
|
+
return findings;
|
|
1218
|
+
},
|
|
1219
|
+
},
|
|
1220
|
+
|
|
1221
|
+
// REL-OPS-010: Missing try/catch in async middleware
|
|
1222
|
+
{ id: 'REL-OPS-010', category: 'reliability', severity: 'high', confidence: 'likely', title: 'Async Middleware Without Error Handling',
|
|
1223
|
+
check({ files }) {
|
|
1224
|
+
const findings = [];
|
|
1225
|
+
for (const [fp, c] of files) {
|
|
1226
|
+
if (!isSourceFile(fp)) continue;
|
|
1227
|
+
const lines = c.split('\n');
|
|
1228
|
+
for (let i = 0; i < lines.length; i++) {
|
|
1229
|
+
if (lines[i].match(/app\.(use|get|post|put|delete|patch)\s*\(.*async\s*(req|request)/)) {
|
|
1230
|
+
const handler = lines.slice(i, i + 25).join('\n');
|
|
1231
|
+
if (!handler.match(/try\s*\{|\.catch\(|asyncHandler|expressAsync|wrapAsync/)) {
|
|
1232
|
+
findings.push({ ruleId: 'REL-OPS-010', category: 'reliability', severity: 'high', title: 'Async route handler without try/catch — unhandled rejection crashes Express', description: 'Wrap async handlers: app.use(asyncHandler(async (req, res) => { ... })). Or use express-async-errors package.', file: fp, line: i + 1, fix: null });
|
|
1233
|
+
}
|
|
1234
|
+
}
|
|
1235
|
+
}
|
|
1236
|
+
}
|
|
1237
|
+
return findings;
|
|
1238
|
+
},
|
|
1239
|
+
},
|
|
1240
|
+
|
|
1241
|
+
// REL-EH-008: Unhandled promise rejection in top-level code
|
|
1242
|
+
{ id: 'REL-EH-008', category: 'reliability', severity: 'high', confidence: 'likely', title: 'Top-Level Async Code Without Error Handling',
|
|
1243
|
+
check({ files }) {
|
|
1244
|
+
const findings = [];
|
|
1245
|
+
for (const [fp, c] of files) {
|
|
1246
|
+
if (!isSourceFile(fp)) continue;
|
|
1247
|
+
const lines = c.split('\n');
|
|
1248
|
+
for (let i = 0; i < lines.length; i++) {
|
|
1249
|
+
if (lines[i].match(/^(?:const|let|var)?\s*\w+\s*\(\)\.then\(/)) {
|
|
1250
|
+
if (!lines.slice(i, i + 10).join('\n').match(/\.catch\(|process\.on.*uncaughtRejection/)) {
|
|
1251
|
+
findings.push({ ruleId: 'REL-EH-008', category: 'reliability', severity: 'high', title: 'Top-level promise chain without .catch() — unhandled rejection crashes Node', description: 'Add .catch(console.error) or process.on("unhandledRejection") handler. Unhandled promise rejections terminate Node.js processes.', file: fp, line: i + 1, fix: null });
|
|
1252
|
+
}
|
|
1253
|
+
}
|
|
1254
|
+
}
|
|
1255
|
+
}
|
|
1256
|
+
return findings;
|
|
1257
|
+
},
|
|
1258
|
+
},
|
|
1259
|
+
|
|
1260
|
+
// REL-EH-009: Rethrowing wrong error type
|
|
1261
|
+
{ id: 'REL-EH-009', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'Catch Block Overwrites Original Error',
|
|
1262
|
+
check({ files }) {
|
|
1263
|
+
const findings = [];
|
|
1264
|
+
for (const [fp, c] of files) {
|
|
1265
|
+
if (!isSourceFile(fp)) continue;
|
|
1266
|
+
const lines = c.split('\n');
|
|
1267
|
+
for (let i = 0; i < lines.length; i++) {
|
|
1268
|
+
if (lines[i].match(/catch\s*\(\s*(\w+)\s*\)/)) {
|
|
1269
|
+
const errVar = lines[i].match(/catch\s*\(\s*(\w+)\s*\)/)[1];
|
|
1270
|
+
const block = lines.slice(i, i + 10).join('\n');
|
|
1271
|
+
if (block.match(new RegExp(`throw new \\w+Error.*${errVar}|throw new Error\\(`)) && !block.match(/cause:|${errVar}\.message|${errVar}\.stack/)) {
|
|
1272
|
+
findings.push({ ruleId: 'REL-EH-009', category: 'reliability', severity: 'medium', title: 'Catch block creates new error without preserving original — stack trace lost', description: 'Preserve original error: throw new AppError("msg", { cause: err }). Loss of original stack trace makes debugging production issues very difficult.', file: fp, line: i + 1, fix: null });
|
|
1273
|
+
}
|
|
1274
|
+
}
|
|
1275
|
+
}
|
|
1276
|
+
}
|
|
1277
|
+
return findings;
|
|
1278
|
+
},
|
|
1279
|
+
},
|
|
1280
|
+
|
|
1281
|
+
// REL-TEST-009: No contract testing for microservices
|
|
1282
|
+
{ id: 'REL-TEST-009', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'No Contract Testing for Service Integrations',
|
|
1283
|
+
check({ files, stack }) {
|
|
1284
|
+
const findings = [];
|
|
1285
|
+
const allDeps = { ...stack.dependencies, ...stack.devDependencies };
|
|
1286
|
+
const hasMicroservices = [...files.values()].some(c => c.match(/axios\.|fetch\(|got\./i) && c.match(/internal|service|api\./i));
|
|
1287
|
+
const hasContract = ['@pact-foundation/pact', 'pact', 'dredd'].some(d => d in allDeps);
|
|
1288
|
+
if (hasMicroservices && !hasContract) {
|
|
1289
|
+
findings.push({ ruleId: 'REL-TEST-009', category: 'reliability', severity: 'medium', title: 'Microservice integrations without contract testing', description: 'Add Pact contract tests for service-to-service APIs. Contract tests catch breaking API changes before they reach production.', fix: null });
|
|
1290
|
+
}
|
|
1291
|
+
return findings;
|
|
1292
|
+
},
|
|
1293
|
+
},
|
|
1294
|
+
|
|
1295
|
+
// REL-MON-010: No P95/P99 latency tracking
|
|
1296
|
+
{ id: 'REL-MON-010', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'No Percentile Latency Tracking',
|
|
1297
|
+
check({ files, stack }) {
|
|
1298
|
+
const findings = [];
|
|
1299
|
+
const allDeps = { ...stack.dependencies, ...stack.devDependencies };
|
|
1300
|
+
const hasMetrics = ['prom-client', 'hot-shots', 'node-statsd', 'datadog-metrics', '@opentelemetry/sdk-metrics'].some(d => d in allDeps);
|
|
1301
|
+
if (!hasMetrics && Object.keys({ ...stack.dependencies }).length > 10) {
|
|
1302
|
+
findings.push({ ruleId: 'REL-MON-010', category: 'reliability', severity: 'medium', title: 'No metrics library for P95/P99 latency tracking', description: 'Add prom-client and expose /metrics endpoint. Alert on P99 latency, not just averages. Averages hide tail latency that affects 1% of users.', fix: null });
|
|
1303
|
+
}
|
|
1304
|
+
return findings;
|
|
1305
|
+
},
|
|
1306
|
+
},
|
|
1307
|
+
|
|
1308
|
+
// REL-MON-011: No alerting on error rate
|
|
1309
|
+
{ id: 'REL-MON-011', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'No Error Rate Alerting',
|
|
1310
|
+
check({ files }) {
|
|
1311
|
+
const findings = [];
|
|
1312
|
+
const allCode = [...files.values()].join('\n');
|
|
1313
|
+
const hasErrorAlert = allCode.match(/error.*rate|error.*threshold|5xx|error_count.*alert|sentry.*alert/i);
|
|
1314
|
+
if (!hasErrorAlert) {
|
|
1315
|
+
findings.push({ ruleId: 'REL-MON-011', category: 'reliability', severity: 'medium', title: 'No error rate alerting configured', description: 'Alert when error rate exceeds 1% of requests. Error rate spikes indicate incidents before users report them. Configure in Datadog, PagerDuty, or CloudWatch.', fix: null });
|
|
1316
|
+
}
|
|
1317
|
+
return findings;
|
|
1318
|
+
},
|
|
1319
|
+
},
|
|
1320
|
+
|
|
1321
|
+
// REL-DB-008: Optimistic locking not used for concurrent updates
|
|
1322
|
+
{ id: 'REL-DB-008', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'No Optimistic Locking for Concurrent Updates',
|
|
1323
|
+
check({ files }) {
|
|
1324
|
+
const findings = [];
|
|
1325
|
+
for (const [fp, c] of files) {
|
|
1326
|
+
if (!isSourceFile(fp)) continue;
|
|
1327
|
+
if (c.match(/update|UPDATE/i) && c.match(/balance|inventory|stock|quantity|count/i)) {
|
|
1328
|
+
if (!c.match(/version|lock|transaction|FOR UPDATE|optimistic|rowVersion/i)) {
|
|
1329
|
+
findings.push({ ruleId: 'REL-DB-008', category: 'reliability', severity: 'medium', title: 'Updating numeric counter/balance without optimistic locking', description: 'Use UPDATE ... WHERE version = $v or SELECT FOR UPDATE. Without locking, concurrent updates cause lost updates (race condition).', file: fp, fix: null });
|
|
1330
|
+
}
|
|
1331
|
+
}
|
|
1332
|
+
}
|
|
1333
|
+
return findings;
|
|
1334
|
+
},
|
|
1335
|
+
},
|
|
1336
|
+
|
|
1337
|
+
// REL-DB-009: Cascade delete without soft delete
|
|
1338
|
+
{ id: 'REL-DB-009', category: 'reliability', severity: 'high', confidence: 'likely', title: 'Cascade Delete Without Soft Delete Fallback',
|
|
1339
|
+
check({ files }) {
|
|
1340
|
+
const findings = [];
|
|
1341
|
+
for (const [fp, c] of files) {
|
|
1342
|
+
if (!isSourceFile(fp) && !fp.match(/\.(sql|prisma)$/)) continue;
|
|
1343
|
+
if (c.match(/ON DELETE CASCADE|onDelete.*CASCADE|cascade.*true/i)) {
|
|
1344
|
+
if (!c.match(/deletedAt|soft_delete|paranoid/i)) {
|
|
1345
|
+
findings.push({ ruleId: 'REL-DB-009', category: 'reliability', severity: 'high', title: 'Cascade delete without soft delete — deleting parent permanently destroys children', description: 'Add soft delete (deletedAt column) before cascades. Accidental parent deletion cannot be undone without cascade if using hard deletes.', file: fp, fix: null });
|
|
1346
|
+
}
|
|
1347
|
+
}
|
|
1348
|
+
}
|
|
1349
|
+
return findings;
|
|
1350
|
+
},
|
|
1351
|
+
},
|
|
1352
|
+
|
|
1353
|
+
// REL-RES-008: No exponential backoff on retry
|
|
1354
|
+
{ id: 'REL-RES-008', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'Retry Without Exponential Backoff',
|
|
1355
|
+
check({ files }) {
|
|
1356
|
+
const findings = [];
|
|
1357
|
+
for (const [fp, c] of files) {
|
|
1358
|
+
if (!isSourceFile(fp)) continue;
|
|
1359
|
+
const lines = c.split('\n');
|
|
1360
|
+
for (let i = 0; i < lines.length; i++) {
|
|
1361
|
+
if (lines[i].match(/retry|retries|maxRetry/i) && !lines[i].match(/\/\//)) {
|
|
1362
|
+
const block = lines.slice(i, i + 15).join('\n');
|
|
1363
|
+
if (!block.match(/backoff|exponential|Math\.pow|2\s*\*\*|delay.*\*.*\d/i)) {
|
|
1364
|
+
findings.push({ ruleId: 'REL-RES-008', category: 'reliability', severity: 'medium', title: 'Retry logic without exponential backoff — retries amplify overload', description: 'Use exponential backoff: delay = Math.min(1000 * 2 ** attempt, 30000). Fixed-interval retries can amplify load on a struggling service.', file: fp, line: i + 1, fix: null });
|
|
1365
|
+
}
|
|
1366
|
+
}
|
|
1367
|
+
}
|
|
1368
|
+
}
|
|
1369
|
+
return findings;
|
|
1370
|
+
},
|
|
1371
|
+
},
|
|
1372
|
+
|
|
1373
|
+
// REL-RES-009: No fallback for failed CDN resources
|
|
1374
|
+
{ id: 'REL-RES-009', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'No Fallback for CDN Script Failure',
|
|
1375
|
+
check({ files }) {
|
|
1376
|
+
const findings = [];
|
|
1377
|
+
for (const [fp, c] of files) {
|
|
1378
|
+
if (!fp.match(/\.(html)$/)) continue;
|
|
1379
|
+
const lines = c.split('\n');
|
|
1380
|
+
for (let i = 0; i < lines.length; i++) {
|
|
1381
|
+
if (lines[i].match(/<script[^>]+src=["']https?:\/\/cdn/i)) {
|
|
1382
|
+
if (!lines.slice(i + 1, i + 5).join('\n').match(/window\.\w+\s*\|\|/)) {
|
|
1383
|
+
findings.push({ ruleId: 'REL-RES-009', category: 'reliability', severity: 'medium', title: 'CDN script without local fallback — CDN outage breaks your app', description: 'Add local fallback: window.jQuery || document.write(). CDN outages happen; local fallbacks maintain availability.', file: fp, line: i + 1, fix: null });
|
|
1384
|
+
}
|
|
1385
|
+
}
|
|
1386
|
+
}
|
|
1387
|
+
}
|
|
1388
|
+
return findings;
|
|
1389
|
+
},
|
|
1390
|
+
},
|
|
1391
|
+
|
|
1392
|
+
// REL-OPS-011: No connection pool monitoring
|
|
1393
|
+
{ id: 'REL-OPS-011', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'No Database Pool Size Monitoring',
|
|
1394
|
+
check({ files }) {
|
|
1395
|
+
const findings = [];
|
|
1396
|
+
const allCode = [...files.values()].join('\n');
|
|
1397
|
+
const hasPool = allCode.match(/pool:|poolSize|connectionLimit|pg\.Pool/i);
|
|
1398
|
+
const hasPoolMonitoring = allCode.match(/pool\.totalCount|pool\.idleCount|pool\.waitingCount|pool.*size.*metric|pool.*exhausted/i);
|
|
1399
|
+
if (hasPool && !hasPoolMonitoring) {
|
|
1400
|
+
findings.push({ ruleId: 'REL-OPS-011', category: 'reliability', severity: 'medium', title: 'Connection pool configured without pool size monitoring', description: 'Monitor pool utilization: expose pool.totalCount, pool.idleCount, pool.waitingCount as metrics. Pool exhaustion causes request timeouts.', fix: null });
|
|
1401
|
+
}
|
|
1402
|
+
return findings;
|
|
1403
|
+
},
|
|
1404
|
+
},
|
|
1405
|
+
|
|
1406
|
+
// REL-OPS-012: Server port hardcoded
|
|
1407
|
+
{ id: 'REL-OPS-012', category: 'reliability', severity: 'low', confidence: 'suggestion', title: 'Server Port Hardcoded',
|
|
1408
|
+
check({ files }) {
|
|
1409
|
+
const findings = [];
|
|
1410
|
+
for (const [fp, c] of files) {
|
|
1411
|
+
if (!isSourceFile(fp)) continue;
|
|
1412
|
+
const lines = c.split('\n');
|
|
1413
|
+
for (let i = 0; i < lines.length; i++) {
|
|
1414
|
+
if (lines[i].match(/\.listen\s*\(\s*\d{4,5}\s*[,)]/i) && !lines[i].match(/process\.env\.|PORT/)) {
|
|
1415
|
+
findings.push({ ruleId: 'REL-OPS-012', category: 'reliability', severity: 'low', title: 'Server listening on hardcoded port — not configurable per environment', description: 'Use process.env.PORT || 3000. Hardcoded ports conflict with other services in local dev and cannot be configured in container orchestration.', file: fp, line: i + 1, fix: null });
|
|
1416
|
+
}
|
|
1417
|
+
}
|
|
1418
|
+
}
|
|
1419
|
+
return findings;
|
|
1420
|
+
},
|
|
1421
|
+
},
|
|
1422
|
+
|
|
1423
|
+
// REL-EH-010: No process-level uncaught exception handler
|
|
1424
|
+
{ id: 'REL-EH-010', category: 'reliability', severity: 'high', confidence: 'likely', title: 'No Process-Level Uncaught Exception Handler',
|
|
1425
|
+
check({ files }) {
|
|
1426
|
+
const findings = [];
|
|
1427
|
+
const hasHandler = [...files.values()].some(c => c.match(/process\.on\(['"]uncaughtException|process\.on\(['"]unhandledRejection/));
|
|
1428
|
+
const hasApp = [...files.values()].some(c => c.match(/express\(\)|http\.createServer|app\.listen/i));
|
|
1429
|
+
if (hasApp && !hasHandler) {
|
|
1430
|
+
findings.push({ ruleId: 'REL-EH-010', category: 'reliability', severity: 'high', title: 'No process.on("uncaughtException") handler — unhandled errors crash the process silently', description: "Add process.on('uncaughtException', (err) => { logger.error(err); process.exit(1); }). Log the error before exiting so it's captured in logs.", fix: null });
|
|
1431
|
+
}
|
|
1432
|
+
return findings;
|
|
1433
|
+
},
|
|
1434
|
+
},
|
|
1435
|
+
|
|
1436
|
+
// REL-TEST-010: No snapshot testing for complex outputs
|
|
1437
|
+
{ id: 'REL-TEST-010', category: 'reliability', severity: 'low', confidence: 'suggestion', title: 'No Tests for Edge Cases in Data Transformers',
|
|
1438
|
+
check({ files }) {
|
|
1439
|
+
const findings = [];
|
|
1440
|
+
for (const [fp, c] of files) {
|
|
1441
|
+
if (!isSourceFile(fp) || fp.includes('test') || fp.includes('spec')) continue;
|
|
1442
|
+
if (!fp.match(/transformer|serializer|mapper|formatter|converter/i)) continue;
|
|
1443
|
+
const hasTest = [...files.keys()].some(tf => tf.includes(fp.split('/').pop().replace(/\.(js|ts)$/, '')) && tf.match(/test|spec/));
|
|
1444
|
+
if (!hasTest) {
|
|
1445
|
+
findings.push({ ruleId: 'REL-TEST-010', category: 'reliability', severity: 'low', title: 'Data transformer/serializer without associated test file', description: 'Add tests for transformers with: null input, empty arrays, missing fields, boundary values. Transformers are subtle — tests catch regressions when data format changes.', file: fp, fix: null });
|
|
1446
|
+
}
|
|
1447
|
+
}
|
|
1448
|
+
return findings;
|
|
1449
|
+
},
|
|
1450
|
+
},
|
|
1451
|
+
|
|
1452
|
+
// REL-DB-010: Missing database health check in liveness probe
|
|
1453
|
+
{ id: 'REL-DB-010', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'Health Endpoint Does Not Check Database',
|
|
1454
|
+
check({ files }) {
|
|
1455
|
+
const findings = [];
|
|
1456
|
+
for (const [fp, c] of files) {
|
|
1457
|
+
if (!isSourceFile(fp)) continue;
|
|
1458
|
+
if (c.match(/\/health|\/healthz/i)) {
|
|
1459
|
+
const lines = c.split('\n');
|
|
1460
|
+
let inHealth = false;
|
|
1461
|
+
for (let i = 0; i < lines.length; i++) {
|
|
1462
|
+
if (lines[i].match(/\/health|\/healthz/i)) inHealth = true;
|
|
1463
|
+
if (inHealth && lines[i].match(/res\.(json|send)\s*\(\s*\{/)) {
|
|
1464
|
+
if (!c.slice(c.indexOf('/health'), c.indexOf('/health') + 500).match(/db\.|pool\.|mongoose\.|prisma\.|redis\./i)) {
|
|
1465
|
+
findings.push({ ruleId: 'REL-DB-010', category: 'reliability', severity: 'medium', title: 'Health endpoint does not verify database connectivity', description: 'Check DB: await db.query("SELECT 1") in health handler. Kubernetes will keep sending traffic to pods with dead DB connections if health only returns 200.', file: fp, line: i + 1, fix: null });
|
|
1466
|
+
}
|
|
1467
|
+
inHealth = false;
|
|
1468
|
+
break;
|
|
1469
|
+
}
|
|
1470
|
+
}
|
|
1471
|
+
}
|
|
1472
|
+
}
|
|
1473
|
+
return findings;
|
|
1474
|
+
},
|
|
1475
|
+
},
|
|
1476
|
+
|
|
1477
|
+
// REL-RES-010: No timeout for database queries
|
|
1478
|
+
{ id: 'REL-RES-010', category: 'reliability', severity: 'high', confidence: 'likely', title: 'No Query Timeout Configured',
|
|
1479
|
+
check({ files }) {
|
|
1480
|
+
const findings = [];
|
|
1481
|
+
for (const [fp, c] of files) {
|
|
1482
|
+
if (!isSourceFile(fp)) continue;
|
|
1483
|
+
if (c.match(/\.query\(|\.findAll\(|\.findMany\(/i)) {
|
|
1484
|
+
if (!c.match(/timeout|statement_timeout|query_timeout|commandTimeout|maxQueryExecutionTime/i)) {
|
|
1485
|
+
findings.push({ ruleId: 'REL-RES-010', category: 'reliability', severity: 'high', title: 'Database queries without timeout configuration', description: 'Set query timeout: SET statement_timeout TO 5000; or { commandTimeout: 5000 } in Prisma. Slow queries hold connections and eventually exhaust the pool.', file: fp, fix: null });
|
|
1486
|
+
}
|
|
1487
|
+
}
|
|
1488
|
+
}
|
|
1489
|
+
return findings;
|
|
1490
|
+
},
|
|
1491
|
+
},
|
|
1492
|
+
|
|
1493
|
+
// REL-OPS-013: No CPU/memory limits for Node
|
|
1494
|
+
{ id: 'REL-OPS-013', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'No Node.js Memory Limit Set',
|
|
1495
|
+
check({ files }) {
|
|
1496
|
+
const findings = [];
|
|
1497
|
+
for (const [fp, c] of files) {
|
|
1498
|
+
if (!fp.match(/Dockerfile|docker-compose|\.sh$|package\.json/) ) continue;
|
|
1499
|
+
if (c.match(/node\s+(?!--)/i) && !c.match(/--max-old-space-size|--max_old_space_size/i)) {
|
|
1500
|
+
findings.push({ ruleId: 'REL-OPS-013', category: 'reliability', severity: 'medium', title: 'Node.js started without --max-old-space-size — OOM kill without warning', description: 'Add --max-old-space-size=2048 (or appropriate value). Without it, Node uses default heap limit and gets OOM-killed without graceful shutdown.', file: fp, fix: null });
|
|
1501
|
+
}
|
|
1502
|
+
}
|
|
1503
|
+
return findings;
|
|
1504
|
+
},
|
|
1505
|
+
},
|
|
1506
|
+
|
|
1507
|
+
// REL-OPS-014: Single point of failure — no replication
|
|
1508
|
+
{ id: 'REL-OPS-014', category: 'reliability', severity: 'high', confidence: 'likely', title: 'Single Database Instance (No Replication)',
|
|
1509
|
+
check({ files }) {
|
|
1510
|
+
const findings = [];
|
|
1511
|
+
for (const [fp, c] of files) {
|
|
1512
|
+
if (!fp.match(/\.tf$/)) continue;
|
|
1513
|
+
if (c.match(/aws_db_instance\b/i) && !c.match(/multi_az\s*=\s*true|read_replica|replica/i)) {
|
|
1514
|
+
findings.push({ ruleId: 'REL-OPS-014', category: 'reliability', severity: 'high', title: 'RDS without Multi-AZ or read replica — single point of failure', description: 'Enable multi_az = true for production RDS. Single-AZ databases have ~0.1% monthly downtime; Multi-AZ reduces to <0.01% with automatic failover.', file: fp, fix: null });
|
|
1515
|
+
}
|
|
1516
|
+
}
|
|
1517
|
+
return findings;
|
|
1518
|
+
},
|
|
1519
|
+
},
|
|
1520
|
+
|
|
1521
|
+
// REL-OPS-015: No auto-restart policy for containers
|
|
1522
|
+
{ id: 'REL-OPS-015', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'Container Without Restart Policy',
|
|
1523
|
+
check({ files }) {
|
|
1524
|
+
const findings = [];
|
|
1525
|
+
for (const [fp, c] of files) {
|
|
1526
|
+
if (!fp.match(/docker-compose\.ya?ml/i)) continue;
|
|
1527
|
+
const lines = c.split('\n');
|
|
1528
|
+
let inService = false;
|
|
1529
|
+
for (let i = 0; i < lines.length; i++) {
|
|
1530
|
+
if (lines[i].match(/^\s{2}\w+:/) && !lines[i].match(/image:|build:|ports:|volumes:|environment:/)) inService = true;
|
|
1531
|
+
if (inService && lines[i].match(/restart:/)) inService = false;
|
|
1532
|
+
}
|
|
1533
|
+
if (inService) {
|
|
1534
|
+
findings.push({ ruleId: 'REL-OPS-015', category: 'reliability', severity: 'medium', title: 'Docker Compose service without restart policy — crashes not recovered', description: 'Add restart: unless-stopped to production services. Without restart policy, crashed containers stay down until manually restarted.', file: fp, fix: null });
|
|
1535
|
+
}
|
|
1536
|
+
}
|
|
1537
|
+
return findings;
|
|
1538
|
+
},
|
|
1539
|
+
},
|
|
1540
|
+
|
|
1541
|
+
// REL-MON-012: No tracing for distributed requests
|
|
1542
|
+
{ id: 'REL-MON-012', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'No Distributed Tracing',
|
|
1543
|
+
check({ files, stack }) {
|
|
1544
|
+
const findings = [];
|
|
1545
|
+
const allDeps = { ...stack.dependencies, ...stack.devDependencies };
|
|
1546
|
+
const hasTracing = ['@opentelemetry/sdk-node', '@opentelemetry/auto-instrumentations-node', 'dd-trace', 'newrelic', 'jaeger-client', 'zipkin'].some(d => d in allDeps);
|
|
1547
|
+
const hasMultipleServices = [...files.values()].some(c => c.match(/axios\.|fetch\(|got\./i) && c.match(/microservice|service.*url|API_URL/i));
|
|
1548
|
+
if (hasMultipleServices && !hasTracing) {
|
|
1549
|
+
findings.push({ ruleId: 'REL-MON-012', category: 'reliability', severity: 'medium', title: 'Service makes external calls without distributed tracing', description: 'Add OpenTelemetry or Datadog APM. Without tracing, debugging latency across multiple services requires guessing which service is slow.', fix: null });
|
|
1550
|
+
}
|
|
1551
|
+
return findings;
|
|
1552
|
+
},
|
|
1553
|
+
},
|
|
1554
|
+
|
|
1555
|
+
// REL-MON-013: Log levels not environment-aware
|
|
1556
|
+
{ id: 'REL-MON-013', category: 'reliability', severity: 'low', confidence: 'suggestion', title: 'Log Level Not Configurable via Environment',
|
|
1557
|
+
check({ files }) {
|
|
1558
|
+
const findings = [];
|
|
1559
|
+
for (const [fp, c] of files) {
|
|
1560
|
+
if (!isSourceFile(fp)) continue;
|
|
1561
|
+
if (c.match(/winston|pino|bunyan|log4js/i)) {
|
|
1562
|
+
if (c.match(/level:\s*['"]debug['"]/) && !c.match(/process\.env\.\w*LOG.*LEVEL|process\.env\.\w*LOG_LEVEL/i)) {
|
|
1563
|
+
findings.push({ ruleId: 'REL-MON-013', category: 'reliability', severity: 'low', title: 'Logger configured with hardcoded level — not adjustable in production', description: 'Use process.env.LOG_LEVEL || "info". Hardcoded debug level in production floods logs and increases storage costs.', file: fp, fix: null });
|
|
1564
|
+
}
|
|
1565
|
+
}
|
|
1566
|
+
}
|
|
1567
|
+
return findings;
|
|
1568
|
+
},
|
|
1569
|
+
},
|
|
1570
|
+
|
|
1571
|
+
// REL-EH-011: Missing error boundary in React app
|
|
1572
|
+
{ id: 'REL-EH-011', category: 'reliability', severity: 'high', confidence: 'likely', title: 'React App Without Error Boundaries',
|
|
1573
|
+
check({ files, stack }) {
|
|
1574
|
+
const findings = [];
|
|
1575
|
+
const allDeps = { ...stack.dependencies, ...stack.devDependencies };
|
|
1576
|
+
const hasReact = 'react' in allDeps;
|
|
1577
|
+
const hasErrorBoundary = [...files.values()].some(c => c.match(/componentDidCatch|ErrorBoundary|react-error-boundary/i));
|
|
1578
|
+
if (hasReact && !hasErrorBoundary) {
|
|
1579
|
+
findings.push({ ruleId: 'REL-EH-011', category: 'reliability', severity: 'medium', title: 'React application without error boundary', description: 'Wrap major UI sections in ErrorBoundary components. Without error boundaries, a single component error unmounts the entire React tree.', fix: null });
|
|
1580
|
+
}
|
|
1581
|
+
return findings;
|
|
1582
|
+
},
|
|
1583
|
+
},
|
|
1584
|
+
// REL-TEST-011: No test for authentication middleware
|
|
1585
|
+
{ id: 'REL-TEST-011', category: 'reliability', severity: 'high', confidence: 'likely', title: 'Authentication Middleware Without Tests',
|
|
1586
|
+
check({ files }) {
|
|
1587
|
+
const findings = [];
|
|
1588
|
+
for (const [fp, c] of files) {
|
|
1589
|
+
if (!isSourceFile(fp) || fp.includes('test') || fp.includes('spec')) continue;
|
|
1590
|
+
if (!fp.match(/auth|middleware/i)) continue;
|
|
1591
|
+
if (c.match(/authenticate|authorize|verifyToken|requireAuth/i)) {
|
|
1592
|
+
const testFile = [...files.keys()].find(tf => tf.includes(fp.split('/').pop().replace(/\.(js|ts)$/, '')) && tf.match(/test|spec/));
|
|
1593
|
+
if (!testFile) {
|
|
1594
|
+
findings.push({ ruleId: 'REL-TEST-011', category: 'reliability', severity: 'high', title: 'Authentication middleware without test file', description: 'Add tests for auth middleware: valid token, expired token, missing token, tampered token. Auth bugs in untested middleware allow security bypasses.', file: fp, fix: null });
|
|
1595
|
+
}
|
|
1596
|
+
}
|
|
1597
|
+
}
|
|
1598
|
+
return findings;
|
|
1599
|
+
},
|
|
1600
|
+
},
|
|
1601
|
+
// REL-DB-011: Prisma without error handling on migrations
|
|
1602
|
+
{ id: 'REL-DB-011', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'Database Migrations Without Rollback Script',
|
|
1603
|
+
check({ files }) {
|
|
1604
|
+
const findings = [];
|
|
1605
|
+
for (const [fp, c] of files) {
|
|
1606
|
+
if (!fp.match(/migration/i)) continue;
|
|
1607
|
+
if (c.match(/ALTER TABLE|DROP COLUMN|RENAME COLUMN|DROP TABLE/i)) {
|
|
1608
|
+
if (!c.match(/DOWN|rollback|revert|UNDO/i)) {
|
|
1609
|
+
findings.push({ ruleId: 'REL-DB-011', category: 'reliability', severity: 'medium', title: 'Destructive migration without rollback/down script', description: 'Add a down() migration for every up() that makes destructive changes. Without rollback, failed deploys require manual database recovery.', file: fp, fix: null });
|
|
1610
|
+
}
|
|
1611
|
+
}
|
|
1612
|
+
}
|
|
1613
|
+
return findings;
|
|
1614
|
+
},
|
|
1615
|
+
},
|
|
1616
|
+
// REL-OPS-016: No container OOM kill monitoring
|
|
1617
|
+
{ id: 'REL-OPS-016', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'No OOM Kill Detection',
|
|
1618
|
+
check({ files }) {
|
|
1619
|
+
const findings = [];
|
|
1620
|
+
for (const [fp, c] of files) {
|
|
1621
|
+
if (!fp.match(/\.(yaml|yml)$/)) continue;
|
|
1622
|
+
if (c.match(/kind:\s*Deployment/i) && c.match(/memory:/i)) {
|
|
1623
|
+
if (!c.match(/oomKillPolicy|OOMKill|memory.*alert|oom.*monitor/i)) {
|
|
1624
|
+
findings.push({ ruleId: 'REL-OPS-016', category: 'reliability', severity: 'medium', title: 'Memory limits set without OOM kill monitoring', description: 'Alert on container OOM kills: monitor kube_pod_container_status_last_terminated_reason = OOMKilled. OOM kills indicate memory leaks or undersized limits.', file: fp, fix: null });
|
|
1625
|
+
}
|
|
1626
|
+
}
|
|
1627
|
+
}
|
|
1628
|
+
return findings;
|
|
1629
|
+
},
|
|
1630
|
+
},
|
|
1631
|
+
// REL-MON-014: No uptime monitoring with alerting
|
|
1632
|
+
{ id: 'REL-MON-014', category: 'reliability', severity: 'low', confidence: 'suggestion', title: 'No External Uptime Monitoring',
|
|
1633
|
+
check({ files }) {
|
|
1634
|
+
const findings = [];
|
|
1635
|
+
const allCode = [...files.values()].join('\n');
|
|
1636
|
+
const hasUptimeMonitor = allCode.match(/pingdom|uptime.*robot|statuspage|better.*uptime|pagerduty.*monitor|freshping|checkly/i);
|
|
1637
|
+
if (!hasUptimeMonitor) {
|
|
1638
|
+
findings.push({ ruleId: 'REL-MON-014', category: 'reliability', severity: 'medium', title: 'No external uptime monitoring detected', description: 'Add Pingdom, UptimeRobot, or Checkly. Internal health checks cannot detect DNS failures, BGP routing issues, or CDN outages that affect real users.', fix: null });
|
|
1639
|
+
}
|
|
1640
|
+
return findings;
|
|
1641
|
+
},
|
|
1642
|
+
},
|
|
1643
|
+
// REL-RES-011: No dead letter queue for failed background jobs
|
|
1644
|
+
{ id: 'REL-RES-011', category: 'reliability', severity: 'high', confidence: 'likely', title: 'No Dead Letter Queue for Failed Jobs',
|
|
1645
|
+
check({ files, stack }) {
|
|
1646
|
+
const findings = [];
|
|
1647
|
+
const allDeps = { ...stack.dependencies, ...stack.devDependencies };
|
|
1648
|
+
const hasQueue = ['bull', 'bullmq', 'bee-queue', 'kue', 'agenda', 'node-resque'].some(d => d in allDeps);
|
|
1649
|
+
const hasDLQ = [...files.values()].some(c => c.match(/failed.*queue|dlq|dead.*letter|on.*failed.*|maxAttempts|removeOnFail.*false/i));
|
|
1650
|
+
if (hasQueue && !hasDLQ) {
|
|
1651
|
+
findings.push({ ruleId: 'REL-RES-011', category: 'reliability', severity: 'high', title: 'Job queue without dead letter queue or failure handling', description: 'Configure a failed jobs queue: { removeOnFail: false, attempts: 3, backoff: { type: "exponential" } }. Failed jobs should be inspectable and retryable.', fix: null });
|
|
1652
|
+
}
|
|
1653
|
+
return findings;
|
|
1654
|
+
},
|
|
1655
|
+
},
|
|
1656
|
+
// REL-OPS-017: Log rotation not configured
|
|
1657
|
+
{ id: 'REL-OPS-017', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'No Log Rotation Configuration',
|
|
1658
|
+
check({ files, stack }) {
|
|
1659
|
+
const findings = [];
|
|
1660
|
+
const allDeps = { ...stack.dependencies, ...stack.devDependencies };
|
|
1661
|
+
const hasFileLogging = [...files.values()].some(c => c.match(/winston.*file|createWriteStream.*log|pino.*destination|log.*\/var\/log/i));
|
|
1662
|
+
const hasRotation = ['winston-daily-rotate-file', 'rotating-file-stream', 'logrotate'].some(d => d in allDeps) || [...files.values()].some(c => c.match(/rotate|maxFiles|maxSize|datePattern/i));
|
|
1663
|
+
if (hasFileLogging && !hasRotation) {
|
|
1664
|
+
findings.push({ ruleId: 'REL-OPS-017', category: 'reliability', severity: 'medium', title: 'File-based logging without rotation — log files grow until disk is full', description: 'Use winston-daily-rotate-file or configure logrotate. Unrotated log files exhaust disk space and crash the application.', fix: null });
|
|
1665
|
+
}
|
|
1666
|
+
return findings;
|
|
1667
|
+
},
|
|
1668
|
+
},
|
|
1669
|
+
// REL-EH-012: Unhandled promise in Express route
|
|
1670
|
+
{ id: 'REL-EH-012', category: 'reliability', severity: 'high', confidence: 'likely', title: 'Express Route Without Async Error Wrapper',
|
|
1671
|
+
check({ files }) {
|
|
1672
|
+
const findings = [];
|
|
1673
|
+
for (const [fp, c] of files) {
|
|
1674
|
+
if (!isSourceFile(fp)) continue;
|
|
1675
|
+
const lines = c.split('\n');
|
|
1676
|
+
for (let i = 0; i < lines.length; i++) {
|
|
1677
|
+
if (lines[i].match(/router\.(get|post|put|patch|delete)\s*\(.*async\s*(req|function)/)) {
|
|
1678
|
+
const context = lines.slice(i, Math.min(lines.length, i + 20)).join('\n');
|
|
1679
|
+
if (!context.match(/try\s*\{|asyncHandler|catchAsync|wrapAsync/)) {
|
|
1680
|
+
findings.push({ ruleId: 'REL-EH-012', category: 'reliability', severity: 'high', title: 'Async Express route handler without try/catch or error wrapper', description: 'Wrap async route handlers with express-async-errors or a catchAsync wrapper. Uncaught async errors in Express routes cause unhandled rejections and crash or hang requests.', file: fp, line: i + 1, fix: null });
|
|
1681
|
+
}
|
|
1682
|
+
}
|
|
1683
|
+
}
|
|
1684
|
+
}
|
|
1685
|
+
return findings;
|
|
1686
|
+
},
|
|
1687
|
+
},
|
|
1688
|
+
// REL-EH-013: No error classification (operational vs programmer errors)
|
|
1689
|
+
{ id: 'REL-EH-013', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'No Error Classification System',
|
|
1690
|
+
check({ files, stack }) {
|
|
1691
|
+
const findings = [];
|
|
1692
|
+
const allDeps = { ...stack.dependencies, ...stack.devDependencies };
|
|
1693
|
+
const hasErrorClass = [...files.values()].some(c => c.match(/class.*extends.*Error|AppError|OperationalError|HttpError/));
|
|
1694
|
+
const hasRoutes = [...files.values()].some(c => c.match(/express|fastify|koa/i));
|
|
1695
|
+
const hasErrorMiddleware = [...files.values()].some(c => c.match(/err.*req.*res.*next|error.*middleware/i));
|
|
1696
|
+
if (hasRoutes && hasErrorMiddleware && !hasErrorClass) {
|
|
1697
|
+
findings.push({ ruleId: 'REL-EH-013', category: 'reliability', severity: 'medium', title: 'No custom Error classes — cannot distinguish operational vs programmer errors', description: 'Create AppError class extending Error with isOperational flag. Operational errors (404, validation) should return user-friendly messages; programmer errors should crash and restart.', fix: null });
|
|
1698
|
+
}
|
|
1699
|
+
return findings;
|
|
1700
|
+
},
|
|
1701
|
+
},
|
|
1702
|
+
// REL-TEST-012: No smoke tests after deployment
|
|
1703
|
+
{ id: 'REL-TEST-012', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'No Post-Deployment Smoke Tests',
|
|
1704
|
+
check({ files }) {
|
|
1705
|
+
const findings = [];
|
|
1706
|
+
const hasCICD = [...files.keys()].some(f => f.match(/\.github\/workflows|\.gitlab-ci|Jenkinsfile|\.circleci/));
|
|
1707
|
+
const hasSmokeTest = [...files.values()].some(c => c.match(/smoke.*test|post.?deploy.*test|deployment.*verify|health.*check.*deploy/i));
|
|
1708
|
+
if (hasCICD && !hasSmokeTest) {
|
|
1709
|
+
findings.push({ ruleId: 'REL-TEST-012', category: 'reliability', severity: 'medium', title: 'No smoke tests after deployment — broken deployments not caught automatically', description: 'Add post-deployment smoke tests that verify critical paths (login, main API calls). Catch deployment failures within minutes instead of from user complaints.', fix: null });
|
|
1710
|
+
}
|
|
1711
|
+
return findings;
|
|
1712
|
+
},
|
|
1713
|
+
},
|
|
1714
|
+
// REL-MON-015: No business metrics tracking
|
|
1715
|
+
{ id: 'REL-MON-015', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'No Business Metrics Instrumentation',
|
|
1716
|
+
check({ files, stack }) {
|
|
1717
|
+
const findings = [];
|
|
1718
|
+
const allDeps = { ...stack.dependencies, ...stack.devDependencies };
|
|
1719
|
+
const hasBusinessMetrics = [...files.values()].some(c => c.match(/increment\s*\(|gauge\s*\(|histogram\s*\(|counter\s*\(|track\s*\(.*event/i)) || ['prom-client', 'datadog-metrics', 'statsd', '@segment/analytics-node'].some(d => d in allDeps);
|
|
1720
|
+
const hasRoutes = [...files.values()].some(c => c.match(/router\.(get|post|put|delete)\s*\(/));
|
|
1721
|
+
if (hasRoutes && !hasBusinessMetrics) {
|
|
1722
|
+
findings.push({ ruleId: 'REL-MON-015', category: 'reliability', severity: 'medium', title: 'No business metrics tracking — cannot measure feature health', description: 'Track key business events (signups, orders, failures) with prom-client or Datadog. Technical metrics alone do not tell you if the business is functioning correctly.', fix: null });
|
|
1723
|
+
}
|
|
1724
|
+
return findings;
|
|
1725
|
+
},
|
|
1726
|
+
},
|
|
1727
|
+
// REL-DB-012: No database migration version control
|
|
1728
|
+
{ id: 'REL-DB-012', category: 'reliability', severity: 'high', confidence: 'likely', title: 'No Database Migration System',
|
|
1729
|
+
check({ files, stack }) {
|
|
1730
|
+
const findings = [];
|
|
1731
|
+
const allDeps = { ...stack.dependencies, ...stack.devDependencies };
|
|
1732
|
+
const hasMigrations = ['knex', 'sequelize', 'typeorm', 'prisma', 'db-migrate', 'flyway', 'liquibase', 'umzug'].some(d => d in allDeps) || [...files.keys()].some(f => f.match(/migrations?\/|migration\./i));
|
|
1733
|
+
const hasDB = ['pg', 'mysql', 'mysql2', 'mongoose', 'sqlite3', 'mssql'].some(d => d in allDeps);
|
|
1734
|
+
if (hasDB && !hasMigrations) {
|
|
1735
|
+
findings.push({ ruleId: 'REL-DB-012', category: 'reliability', severity: 'high', title: 'Database used without migration system — schema changes not tracked', description: 'Use Knex, Prisma, or TypeORM migrations. Without a migration system, schema changes are applied manually and inconsistently across environments, leading to drift and outages.', fix: null });
|
|
1736
|
+
}
|
|
1737
|
+
return findings;
|
|
1738
|
+
},
|
|
1739
|
+
},
|
|
1740
|
+
// REL-RES-012: HTTP requests without timeout
|
|
1741
|
+
{ id: 'REL-RES-012', category: 'reliability', severity: 'high', confidence: 'likely', title: 'Outbound HTTP Request Without Timeout',
|
|
1742
|
+
check({ files }) {
|
|
1743
|
+
const findings = [];
|
|
1744
|
+
for (const [fp, c] of files) {
|
|
1745
|
+
if (!isSourceFile(fp)) continue;
|
|
1746
|
+
const lines = c.split('\n');
|
|
1747
|
+
for (let i = 0; i < lines.length; i++) {
|
|
1748
|
+
if (lines[i].match(/axios\.(get|post|put|delete|patch)\s*\(|fetch\s*\(/) && !lines[i].match(/timeout:|AbortSignal|signal:/)) {
|
|
1749
|
+
const context = lines.slice(Math.max(0, i - 2), i + 5).join('\n');
|
|
1750
|
+
if (!context.match(/timeout|AbortController|signal/)) {
|
|
1751
|
+
findings.push({ ruleId: 'REL-RES-012', category: 'reliability', severity: 'high', title: 'HTTP request without timeout — hangs indefinitely on unresponsive service', description: 'Always set a timeout on outbound HTTP requests. Without timeouts, a slow or unresponsive downstream service causes request threads to hang indefinitely, exhausting resources.', file: fp, line: i + 1, fix: null });
|
|
1752
|
+
}
|
|
1753
|
+
}
|
|
1754
|
+
}
|
|
1755
|
+
}
|
|
1756
|
+
return findings;
|
|
1757
|
+
},
|
|
1758
|
+
},
|
|
1759
|
+
// REL-OPS-018: Server process without cluster mode
|
|
1760
|
+
{ id: 'REL-OPS-018', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'Node.js Server Not Using Cluster Mode',
|
|
1761
|
+
check({ files, stack }) {
|
|
1762
|
+
const findings = [];
|
|
1763
|
+
const hasCluster = [...files.values()].some(c => c.match(/require\(['"]cluster['"]|import.*['"]cluster['"]|throng|pm2.*instances|cluster_mode/i));
|
|
1764
|
+
const isServer = [...files.values()].some(c => c.match(/app\.listen|server\.listen/));
|
|
1765
|
+
const isPM2 = [...files.keys()].some(f => f.match(/ecosystem\.config|pm2\./)) && [...files.values()].some(c => c.match(/instances.*\d|exec_mode.*cluster/i));
|
|
1766
|
+
if (isServer && !hasCluster && !isPM2) {
|
|
1767
|
+
findings.push({ ruleId: 'REL-OPS-018', category: 'reliability', severity: 'medium', title: 'Node.js server running as single process — not utilizing all CPU cores', description: 'Use PM2 cluster mode or Node.js cluster module. Single-process Node.js uses only one CPU core. Cluster mode improves throughput and resilience on multi-core servers.', fix: null });
|
|
1768
|
+
}
|
|
1769
|
+
return findings;
|
|
1770
|
+
},
|
|
1771
|
+
},
|
|
1772
|
+
// REL-OPS-019: No dependency health check endpoint
|
|
1773
|
+
{ id: 'REL-OPS-019', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'Health Check Endpoint Does Not Check Dependencies',
|
|
1774
|
+
check({ files }) {
|
|
1775
|
+
const findings = [];
|
|
1776
|
+
const hasHealth = [...files.values()].some(c => c.match(/\/health|\/healthz|\/ping/i));
|
|
1777
|
+
const checksDeps = [...files.values()].some(c => c.match(/\/health.*db|\/health.*redis|\/health.*queue|mongoose.*isConnected|pg.*query.*1/i));
|
|
1778
|
+
if (hasHealth && !checksDeps) {
|
|
1779
|
+
findings.push({ ruleId: 'REL-OPS-019', category: 'reliability', severity: 'medium', title: 'Health check exists but does not verify database/cache connectivity', description: 'Include dependency checks (DB ping, Redis ping) in health endpoint. A shallow health check that only returns 200 gives false confidence during partial outages.', fix: null });
|
|
1780
|
+
}
|
|
1781
|
+
return findings;
|
|
1782
|
+
},
|
|
1783
|
+
},
|
|
1784
|
+
// REL-EH-014: Promise.all without individual error handling
|
|
1785
|
+
{ id: 'REL-EH-014', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'Promise.all Fails Completely on Single Rejection',
|
|
1786
|
+
check({ files }) {
|
|
1787
|
+
const findings = [];
|
|
1788
|
+
for (const [fp, c] of files) {
|
|
1789
|
+
if (!isSourceFile(fp)) continue;
|
|
1790
|
+
const lines = c.split('\n');
|
|
1791
|
+
for (let i = 0; i < lines.length; i++) {
|
|
1792
|
+
if (lines[i].match(/Promise\.all\s*\(\[/) && !lines[i].match(/Promise\.allSettled/)) {
|
|
1793
|
+
const context = lines.slice(Math.max(0, i - 2), i + 5).join('\n');
|
|
1794
|
+
if (!context.match(/\.catch\s*\(|try\s*\{/)) {
|
|
1795
|
+
findings.push({ ruleId: 'REL-EH-014', category: 'reliability', severity: 'medium', title: 'Promise.all without error handling — single failure cancels all operations', description: 'Use Promise.allSettled() if partial results are acceptable, or add .catch() to individual promises. Promise.all rejects immediately on the first rejection, discarding other results.', file: fp, line: i + 1, fix: null });
|
|
1796
|
+
}
|
|
1797
|
+
}
|
|
1798
|
+
}
|
|
1799
|
+
}
|
|
1800
|
+
return findings;
|
|
1801
|
+
},
|
|
1802
|
+
},
|
|
1803
|
+
// REL-RES-013: Missing fallback for external service unavailability
|
|
1804
|
+
{ id: 'REL-RES-013', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'No Fallback for Third-Party Service Calls',
|
|
1805
|
+
check({ files }) {
|
|
1806
|
+
const findings = [];
|
|
1807
|
+
for (const [fp, c] of files) {
|
|
1808
|
+
if (!isSourceFile(fp)) continue;
|
|
1809
|
+
const lines = c.split('\n');
|
|
1810
|
+
for (let i = 0; i < lines.length; i++) {
|
|
1811
|
+
if (lines[i].match(/stripe\.|sendgrid\.|twilio\.|braintree\.|paypal\./i)) {
|
|
1812
|
+
const context = lines.slice(Math.max(0, i - 5), i + 15).join('\n');
|
|
1813
|
+
if (!context.match(/catch|fallback|retry|circuit|queue/i)) {
|
|
1814
|
+
findings.push({ ruleId: 'REL-RES-013', category: 'reliability', severity: 'medium', title: 'Third-party payment/communication call without retry or fallback', description: 'Add retry logic with exponential backoff for external service calls. Transient failures in Stripe, SendGrid, or Twilio should not cause unrecoverable errors.', file: fp, line: i + 1, fix: null });
|
|
1815
|
+
}
|
|
1816
|
+
}
|
|
1817
|
+
}
|
|
1818
|
+
}
|
|
1819
|
+
return findings;
|
|
1820
|
+
},
|
|
1821
|
+
},
|
|
1822
|
+
// REL-TEST-013: No API contract testing
|
|
1823
|
+
{ id: 'REL-TEST-013', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'No API Contract Tests Between Services',
|
|
1824
|
+
check({ files, stack }) {
|
|
1825
|
+
const findings = [];
|
|
1826
|
+
const allDeps = { ...stack.dependencies, ...stack.devDependencies };
|
|
1827
|
+
const hasPact = ['@pact-foundation/pact', 'pactum', 'dredd'].some(d => d in allDeps) || [...files.keys()].some(f => f.match(/pact|contract.*test/i));
|
|
1828
|
+
const hasMultipleServices = [...files.values()].some(c => c.match(/microservice|service.*discovery|consul|kubernetes.*service/i));
|
|
1829
|
+
if (hasMultipleServices && !hasPact) {
|
|
1830
|
+
findings.push({ ruleId: 'REL-TEST-013', category: 'reliability', severity: 'medium', title: 'Microservices without contract tests — API breaking changes not caught before deployment', description: 'Add Pact.js for consumer-driven contract testing. Without contract tests, API changes between services cause production failures that integration tests miss.', fix: null });
|
|
1831
|
+
}
|
|
1832
|
+
return findings;
|
|
1833
|
+
},
|
|
1834
|
+
},
|
|
1835
|
+
// REL-OPS-020: Application startup errors not fatal
|
|
1836
|
+
{ id: 'REL-OPS-020', category: 'reliability', severity: 'high', confidence: 'likely', title: 'Startup Configuration Errors Not Causing Fatal Exit',
|
|
1837
|
+
check({ files }) {
|
|
1838
|
+
const findings = [];
|
|
1839
|
+
for (const [fp, c] of files) {
|
|
1840
|
+
if (!isSourceFile(fp)) continue;
|
|
1841
|
+
if (!c.match(/app\.listen|server\.listen/)) continue;
|
|
1842
|
+
const hasFatalOnMissingEnv = c.match(/process\.exit\s*\(|throw.*required|missing.*env|env.*required/i);
|
|
1843
|
+
const hasMissingEnvCheck = c.match(/process\.env\.\w+\s*\|\|\s*['"`]|!process\.env\.\w+/);
|
|
1844
|
+
if (hasMissingEnvCheck && !hasFatalOnMissingEnv) {
|
|
1845
|
+
findings.push({ ruleId: 'REL-OPS-020', category: 'reliability', severity: 'high', title: 'Missing required env vars fall back to empty string instead of crashing', description: 'Validate all required environment variables at startup and call process.exit(1) if any are missing. Silent fallbacks cause hard-to-debug runtime errors.', file: fp, fix: null });
|
|
1846
|
+
}
|
|
1847
|
+
}
|
|
1848
|
+
return findings;
|
|
1849
|
+
},
|
|
1850
|
+
},
|
|
1851
|
+
// REL-DB-013: Missing index on foreign key columns
|
|
1852
|
+
{ id: 'REL-DB-013', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'Foreign Key Without Database Index',
|
|
1853
|
+
check({ files }) {
|
|
1854
|
+
const findings = [];
|
|
1855
|
+
for (const [fp, c] of files) {
|
|
1856
|
+
if (!isSourceFile(fp)) continue;
|
|
1857
|
+
const lines = c.split('\n');
|
|
1858
|
+
for (let i = 0; i < lines.length; i++) {
|
|
1859
|
+
if (lines[i].match(/references\s*\(|REFERENCES\s+\w+|belongsTo|hasMany|hasOne/i) && !lines[i].match(/\/\//)) {
|
|
1860
|
+
const ctx = lines.slice(Math.max(0, i - 5), i + 10).join('\n');
|
|
1861
|
+
if (!ctx.match(/index:\s*true|addIndex|INDEX\s+\w+\s+\(|\.index\s*\(/i)) {
|
|
1862
|
+
findings.push({ ruleId: 'REL-DB-013', category: 'reliability', severity: 'medium', title: 'Foreign key relationship without explicit index', description: 'Add an index on foreign key columns. Without an index, queries joining on or filtering by a foreign key perform full table scans that degrade as data grows.', file: fp, line: i + 1, fix: null });
|
|
1863
|
+
}
|
|
1864
|
+
}
|
|
1865
|
+
}
|
|
1866
|
+
}
|
|
1867
|
+
return findings;
|
|
1868
|
+
},
|
|
1869
|
+
},
|
|
1870
|
+
// REL-MON-016: No SLO/SLA definition
|
|
1871
|
+
{ id: 'REL-MON-016', category: 'reliability', severity: 'low', confidence: 'suggestion', title: 'No SLO/SLA Defined for Service',
|
|
1872
|
+
check({ files }) {
|
|
1873
|
+
const findings = [];
|
|
1874
|
+
const hasSLO = [...files.keys()].some(f => f.match(/slo|sla|service.?level/i)) || [...files.values()].some(c => c.match(/availabilityTarget|errorBudget|slo:|sla:|uptime.*\d{2,3}%/i));
|
|
1875
|
+
const hasMonitoring = [...files.values()].some(c => c.match(/cloudwatch|datadog|prometheus|grafana|pagerduty|opsgenie/i));
|
|
1876
|
+
if (hasMonitoring && !hasSLO) {
|
|
1877
|
+
findings.push({ ruleId: 'REL-MON-016', category: 'reliability', severity: 'low', title: 'Monitoring configured without documented SLOs', description: 'Define Service Level Objectives (e.g., 99.9% uptime, p99 < 500ms). SLOs make reliability goals measurable and drive alert threshold decisions. Document them in a README or runbook.', fix: null });
|
|
1878
|
+
}
|
|
1879
|
+
return findings;
|
|
1880
|
+
},
|
|
1881
|
+
},
|
|
1882
|
+
// REL-RES-014: Job queue without dead-letter queue
|
|
1883
|
+
{ id: 'REL-RES-014', category: 'reliability', severity: 'high', confidence: 'likely', title: 'Job Queue Without Dead-Letter Queue Configuration',
|
|
1884
|
+
check({ files, stack }) {
|
|
1885
|
+
const findings = [];
|
|
1886
|
+
const allDeps = { ...stack.dependencies, ...stack.devDependencies };
|
|
1887
|
+
const hasQueue = ['bull', 'bullmq', 'bee-queue', 'agenda', 'kue'].some(d => d in allDeps);
|
|
1888
|
+
const hasDLQ = [...files.values()].some(c => c.match(/deadLetter|dead_letter|failedQueue|onFailed|failed.*queue|dlq/i));
|
|
1889
|
+
if (hasQueue && !hasDLQ) {
|
|
1890
|
+
findings.push({ ruleId: 'REL-RES-014', category: 'reliability', severity: 'high', title: 'Job queue without dead-letter queue — failed jobs silently discarded', description: 'Configure a dead-letter queue and onFailed handler. Without DLQ, failed jobs are lost after max retries with no ability to inspect failures or replay messages.', fix: null });
|
|
1891
|
+
}
|
|
1892
|
+
return findings;
|
|
1893
|
+
},
|
|
1894
|
+
},
|
|
1895
|
+
// REL-EH-015: Unchecked null from .find() / .get()
|
|
1896
|
+
{ id: 'REL-EH-015', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'Result of .find() Used Without Null Check',
|
|
1897
|
+
check({ files }) {
|
|
1898
|
+
const findings = [];
|
|
1899
|
+
for (const [fp, c] of files) {
|
|
1900
|
+
if (!isSourceFile(fp)) continue;
|
|
1901
|
+
const lines = c.split('\n');
|
|
1902
|
+
for (let i = 0; i < lines.length; i++) {
|
|
1903
|
+
if (lines[i].match(/=\s*\w+\.find\s*\(/) && !lines[i].match(/\/\//)) {
|
|
1904
|
+
const varMatch = lines[i].match(/(?:const|let|var)\s+(\w+)\s*=/);
|
|
1905
|
+
if (varMatch) {
|
|
1906
|
+
const varName = varMatch[1];
|
|
1907
|
+
const nextLines = lines.slice(i + 1, i + 5).join('\n');
|
|
1908
|
+
if (nextLines.match(new RegExp(`${varName}\\.\\w+`)) && !nextLines.match(new RegExp(`if\\s*\\(!?${varName}|${varName}\\?\\.|${varName}\\s*&&`))) {
|
|
1909
|
+
findings.push({ ruleId: 'REL-EH-015', category: 'reliability', severity: 'medium', title: `.find() result used without null check — TypeError when item not found`, description: 'Check that .find() returned a value before accessing its properties. Array.find() returns undefined if no match is found, causing "Cannot read property of undefined" errors.', file: fp, line: i + 1, fix: null });
|
|
1910
|
+
}
|
|
1911
|
+
}
|
|
1912
|
+
}
|
|
1913
|
+
}
|
|
1914
|
+
}
|
|
1915
|
+
return findings;
|
|
1916
|
+
},
|
|
1917
|
+
},
|
|
1918
|
+
// REL-TEST-014: Missing database seed for tests
|
|
1919
|
+
{ id: 'REL-TEST-014', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'Integration Tests Without Database Seeding',
|
|
1920
|
+
check({ files }) {
|
|
1921
|
+
const findings = [];
|
|
1922
|
+
const hasIntegrationTests = [...files.keys()].some(f => f.match(/integration|e2e|\.test\.(js|ts)$/));
|
|
1923
|
+
const hasSeeding = [...files.values()].some(c => c.match(/seed|fixture|factory|beforeEach.*insert|beforeAll.*create/i));
|
|
1924
|
+
if (hasIntegrationTests && !hasSeeding) {
|
|
1925
|
+
findings.push({ ruleId: 'REL-TEST-014', category: 'reliability', severity: 'medium', title: 'Integration tests without database seeding — tests depend on pre-existing data', description: 'Use database factories or seed functions in beforeEach/beforeAll. Tests that depend on pre-existing data are fragile and non-deterministic.', fix: null });
|
|
1926
|
+
}
|
|
1927
|
+
return findings;
|
|
1928
|
+
},
|
|
1929
|
+
},
|
|
1930
|
+
// REL-OPS-021: No dependency on stable external services
|
|
1931
|
+
{ id: 'REL-OPS-021', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'Hard Dependency on Single DNS/Service Without Fallback',
|
|
1932
|
+
check({ files }) {
|
|
1933
|
+
const findings = [];
|
|
1934
|
+
for (const [fp, c] of files) {
|
|
1935
|
+
if (!isSourceFile(fp)) continue;
|
|
1936
|
+
const hardcodedEndpoints = (c.match(/https?:\/\/[a-z0-9-]+\.[a-z]{2,}(?::\d+)?\/[^\s'"`]+/gi) || []);
|
|
1937
|
+
if (hardcodedEndpoints.length > 3) {
|
|
1938
|
+
findings.push({ ruleId: 'REL-OPS-021', category: 'reliability', severity: 'medium', title: 'Multiple hardcoded external endpoints — consider service discovery', description: 'Move external service URLs to environment variables. Hardcoded URLs prevent environment-specific configuration and make switching or load-balancing endpoints impossible without code changes.', file: fp, fix: null });
|
|
1939
|
+
}
|
|
1940
|
+
}
|
|
1941
|
+
return findings;
|
|
1942
|
+
},
|
|
1943
|
+
},
|
|
1944
|
+
// REL-DB-014: Using TRUNCATE in application code
|
|
1945
|
+
{ id: 'REL-DB-014', category: 'reliability', severity: 'high', confidence: 'likely', title: 'TRUNCATE Statement in Application Code',
|
|
1946
|
+
check({ files }) {
|
|
1947
|
+
const findings = [];
|
|
1948
|
+
for (const [fp, c] of files) {
|
|
1949
|
+
if (!isSourceFile(fp)) continue;
|
|
1950
|
+
const lines = c.split('\n');
|
|
1951
|
+
for (let i = 0; i < lines.length; i++) {
|
|
1952
|
+
if (lines[i].match(/TRUNCATE\s+TABLE|truncate\s*\(/i) && !lines[i].match(/\/\/|test|spec|seed|migration/i)) {
|
|
1953
|
+
if (!fp.match(/test|spec|seed|migration/i)) {
|
|
1954
|
+
findings.push({ ruleId: 'REL-DB-014', category: 'reliability', severity: 'high', title: 'TRUNCATE in application code — irrecoverable data loss if called accidentally', description: 'Avoid TRUNCATE in production application code. If data deletion is required, use DELETE with a WHERE clause. TRUNCATE removes all rows instantly with no rollback path.', file: fp, line: i + 1, fix: null });
|
|
1955
|
+
}
|
|
1956
|
+
}
|
|
1957
|
+
}
|
|
1958
|
+
}
|
|
1959
|
+
return findings;
|
|
1960
|
+
},
|
|
1961
|
+
},
|
|
1962
|
+
];
|
|
1963
|
+
|
|
1964
|
+
export default rules;
|
|
1965
|
+
|
|
1966
|
+
// REL-015: Missing process.on('uncaughtException')
|
|
1967
|
+
rules.push({
|
|
1968
|
+
id: 'REL-015', category: 'reliability', severity: 'high', confidence: 'likely', title: 'No uncaughtException handler — process will crash on unhandled errors',
|
|
1969
|
+
check({ files }) {
|
|
1970
|
+
const findings = [];
|
|
1971
|
+
const hasMainEntry = [...files.keys()].some(f => f.match(/(?:^|\/)(?:index|server|app|main)\.[jt]s$/));
|
|
1972
|
+
const hasUncaught = [...files.values()].some(c => /process\.on\s*\(\s*['"]uncaughtException['"]/.test(c));
|
|
1973
|
+
if (hasMainEntry && !hasUncaught) {
|
|
1974
|
+
findings.push({ ruleId: 'REL-015', category: 'reliability', severity: 'high', title: 'No process.on("uncaughtException") handler — unhandled errors crash the process', description: 'Without an uncaughtException handler, any thrown synchronous error not caught will terminate Node.js. Add a handler to log and gracefully shut down.', fix: null });
|
|
1975
|
+
}
|
|
1976
|
+
return findings;
|
|
1977
|
+
},
|
|
1978
|
+
});
|
|
1979
|
+
|
|
1980
|
+
// REL-016: Missing unhandledRejection handler
|
|
1981
|
+
rules.push({
|
|
1982
|
+
id: 'REL-016', category: 'reliability', severity: 'high', confidence: 'likely', title: 'No unhandledRejection handler — silent promise failures',
|
|
1983
|
+
check({ files }) {
|
|
1984
|
+
const findings = [];
|
|
1985
|
+
const hasMainEntry = [...files.keys()].some(f => f.match(/(?:^|\/)(?:index|server|app|main)\.[jt]s$/));
|
|
1986
|
+
const hasHandler = [...files.values()].some(c => /process\.on\s*\(\s*['"]unhandledRejection['"]/.test(c));
|
|
1987
|
+
if (hasMainEntry && !hasHandler) {
|
|
1988
|
+
findings.push({ ruleId: 'REL-016', category: 'reliability', severity: 'high', title: 'No process.on("unhandledRejection") — failed promises silently ignored', description: 'Unhandled promise rejections cause silent data corruption or incomplete operations. Add process.on("unhandledRejection") to log and handle failures.', fix: null });
|
|
1989
|
+
}
|
|
1990
|
+
return findings;
|
|
1991
|
+
},
|
|
1992
|
+
});
|
|
1993
|
+
|
|
1994
|
+
// REL-017: while(true) without break condition
|
|
1995
|
+
rules.push({
|
|
1996
|
+
id: 'REL-017', category: 'reliability', severity: 'high', confidence: 'likely', title: 'while(true) loop without visible break/return — infinite loop risk',
|
|
1997
|
+
check({ files }) {
|
|
1998
|
+
const findings = [];
|
|
1999
|
+
for (const [fp, c] of files) {
|
|
2000
|
+
if (!isSourceFile(fp)) continue;
|
|
2001
|
+
const lines = c.split('\n');
|
|
2002
|
+
for (let i = 0; i < lines.length; i++) {
|
|
2003
|
+
if (/^\s*(\/\/|\/\*|\*)/.test(lines[i])) continue;
|
|
2004
|
+
if (/while\s*\(\s*true\s*\)/.test(lines[i])) {
|
|
2005
|
+
const body = lines.slice(i, Math.min(lines.length, i + 20)).join('\n');
|
|
2006
|
+
if (!/\bbreak\b|\breturn\b|\bthrow\b/.test(body)) {
|
|
2007
|
+
findings.push({ ruleId: 'REL-017', category: 'reliability', severity: 'high', title: 'while(true) with no break/return visible — infinite loop risk', description: 'Infinite loops without a visible exit condition may hang the process. Ensure a break, return, or throw is reachable within the loop.', file: fp, line: i + 1, fix: null });
|
|
2008
|
+
}
|
|
2009
|
+
}
|
|
2010
|
+
}
|
|
2011
|
+
}
|
|
2012
|
+
return findings;
|
|
2013
|
+
},
|
|
2014
|
+
});
|
|
2015
|
+
|
|
2016
|
+
// REL-018: Missing finally block for resource cleanup
|
|
2017
|
+
rules.push({
|
|
2018
|
+
id: 'REL-018', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'Resource opened in try block without finally cleanup',
|
|
2019
|
+
check({ files }) {
|
|
2020
|
+
const findings = [];
|
|
2021
|
+
for (const [fp, c] of files) {
|
|
2022
|
+
if (!isSourceFile(fp)) continue;
|
|
2023
|
+
const lines = c.split('\n');
|
|
2024
|
+
for (let i = 0; i < lines.length; i++) {
|
|
2025
|
+
if (/^\s*(\/\/|\/\*|\*)/.test(lines[i])) continue;
|
|
2026
|
+
if (/\btry\s*\{/.test(lines[i])) {
|
|
2027
|
+
const block = lines.slice(i, Math.min(lines.length, i + 30)).join('\n');
|
|
2028
|
+
if (/(?:open|connect|acquire|lock|createWriteStream|createReadStream)/.test(block) && !/finally\s*\{/.test(block)) {
|
|
2029
|
+
findings.push({ ruleId: 'REL-018', category: 'reliability', severity: 'medium', title: 'Resource opened in try without finally — leak on exception', description: 'File handles, DB connections, and locks opened in try blocks must be closed in finally to prevent resource leaks on exceptions.', file: fp, line: i + 1, fix: null });
|
|
2030
|
+
}
|
|
2031
|
+
}
|
|
2032
|
+
}
|
|
2033
|
+
}
|
|
2034
|
+
return findings;
|
|
2035
|
+
},
|
|
2036
|
+
});
|
|
2037
|
+
|
|
2038
|
+
// REL-019: Missing graceful shutdown (SIGTERM handler)
|
|
2039
|
+
rules.push({
|
|
2040
|
+
id: 'REL-019', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'No SIGTERM handler — container/k8s restarts without graceful shutdown',
|
|
2041
|
+
check({ files }) {
|
|
2042
|
+
const findings = [];
|
|
2043
|
+
const hasServer = [...files.values()].some(c => /(?:app|server)\.listen\s*\(/.test(c));
|
|
2044
|
+
const hasSigterm = [...files.values()].some(c => /process\.on\s*\(\s*['"]SIGTERM['"]/.test(c));
|
|
2045
|
+
if (hasServer && !hasSigterm) {
|
|
2046
|
+
findings.push({ ruleId: 'REL-019', category: 'reliability', severity: 'medium', title: 'Server without SIGTERM handler — Kubernetes rolling deploys will drop in-flight requests', description: 'Handle SIGTERM to finish in-flight requests before exiting. Kubernetes sends SIGTERM before SIGKILL during rolling updates.', fix: null });
|
|
2047
|
+
}
|
|
2048
|
+
return findings;
|
|
2049
|
+
},
|
|
2050
|
+
});
|
|
2051
|
+
|
|
2052
|
+
// REL-020: setTimeout without clearTimeout — memory leak
|
|
2053
|
+
rules.push({
|
|
2054
|
+
id: 'REL-020', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'setInterval used without clearInterval — perpetual timer leak',
|
|
2055
|
+
check({ files }) {
|
|
2056
|
+
const findings = [];
|
|
2057
|
+
for (const [fp, c] of files) {
|
|
2058
|
+
if (!isSourceFile(fp)) continue;
|
|
2059
|
+
const setCount = (c.match(/setInterval\s*\(/g) || []).length;
|
|
2060
|
+
const clearCount = (c.match(/clearInterval\s*\(/g) || []).length;
|
|
2061
|
+
if (setCount > 0 && clearCount === 0) {
|
|
2062
|
+
findings.push({ ruleId: 'REL-020', category: 'reliability', severity: 'medium', title: `setInterval() called ${setCount} time(s) without clearInterval`, description: 'Intervals not cleared prevent garbage collection and may cause duplicate operations after reinitialization. Store the interval ID and call clearInterval in cleanup.', file: fp, fix: null });
|
|
2063
|
+
}
|
|
2064
|
+
}
|
|
2065
|
+
return findings;
|
|
2066
|
+
},
|
|
2067
|
+
});
|
|
2068
|
+
|
|
2069
|
+
// REL-021: Promise resolve/reject called multiple times
|
|
2070
|
+
rules.push({
|
|
2071
|
+
id: 'REL-021', category: 'reliability', severity: 'high', confidence: 'likely', title: 'Potential multiple resolve/reject calls in Promise constructor',
|
|
2072
|
+
check({ files }) {
|
|
2073
|
+
const findings = [];
|
|
2074
|
+
for (const [fp, c] of files) {
|
|
2075
|
+
if (!isSourceFile(fp)) continue;
|
|
2076
|
+
const lines = c.split('\n');
|
|
2077
|
+
for (let i = 0; i < lines.length; i++) {
|
|
2078
|
+
if (/^\s*(\/\/|\/\*|\*)/.test(lines[i])) continue;
|
|
2079
|
+
if (/new\s+Promise\s*\(/.test(lines[i])) {
|
|
2080
|
+
const body = lines.slice(i, Math.min(lines.length, i + 30)).join('\n');
|
|
2081
|
+
const resolveCount = (body.match(/\bresolve\s*\(/g) || []).length;
|
|
2082
|
+
const rejectCount = (body.match(/\breject\s*\(/g) || []).length;
|
|
2083
|
+
if (resolveCount + rejectCount > 2) {
|
|
2084
|
+
findings.push({ ruleId: 'REL-021', category: 'reliability', severity: 'high', title: 'Multiple resolve/reject paths in Promise — only first call wins, rest silently ignored', description: 'Only the first resolve() or reject() call has effect. Multiple calls indicate logic errors. Use return after each settlement.', file: fp, line: i + 1, fix: null });
|
|
2085
|
+
}
|
|
2086
|
+
}
|
|
2087
|
+
}
|
|
2088
|
+
}
|
|
2089
|
+
return findings;
|
|
2090
|
+
},
|
|
2091
|
+
});
|
|
2092
|
+
|
|
2093
|
+
// REL-022: Missing null check before property access
|
|
2094
|
+
rules.push({
|
|
2095
|
+
id: 'REL-022', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'Property accessed on potentially null/undefined value',
|
|
2096
|
+
check({ files }) {
|
|
2097
|
+
const findings = [];
|
|
2098
|
+
for (const [fp, c] of files) {
|
|
2099
|
+
if (!isSourceFile(fp)) continue;
|
|
2100
|
+
const lines = c.split('\n');
|
|
2101
|
+
for (let i = 0; i < lines.length; i++) {
|
|
2102
|
+
if (/^\s*(\/\/|\/\*|\*)/.test(lines[i])) continue;
|
|
2103
|
+
// Pattern: req.body.x.y without optional chaining
|
|
2104
|
+
if (/req\.(?:body|params|query)\.\w+\.\w+/.test(lines[i]) && !/req\.(?:body|params|query)\?\.\w+/.test(lines[i])) {
|
|
2105
|
+
findings.push({ ruleId: 'REL-022', category: 'reliability', severity: 'medium', title: 'Deep property access on req.body/params without null check', description: 'Accessing nested properties on req.body without validation throws "Cannot read properties of undefined". Use optional chaining (?.) or validate with Joi/Zod first.', file: fp, line: i + 1, fix: null });
|
|
2106
|
+
}
|
|
2107
|
+
}
|
|
2108
|
+
}
|
|
2109
|
+
return findings;
|
|
2110
|
+
},
|
|
2111
|
+
});
|
|
2112
|
+
|
|
2113
|
+
// REL-023: Deprecated Node.js API usage
|
|
2114
|
+
rules.push({
|
|
2115
|
+
id: 'REL-023', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'Deprecated Node.js API used',
|
|
2116
|
+
check({ files }) {
|
|
2117
|
+
const findings = [];
|
|
2118
|
+
const deprecated = /new\s+Buffer\s*\(|require\s*\(\s*['"]sys['"]\)|require\s*\(\s*['"]punycode['"]\)|crypto\.createCredentials|domain\.create/;
|
|
2119
|
+
for (const [fp, c] of files) {
|
|
2120
|
+
if (!isSourceFile(fp)) continue;
|
|
2121
|
+
const lines = c.split('\n');
|
|
2122
|
+
for (let i = 0; i < lines.length; i++) {
|
|
2123
|
+
if (/^\s*(\/\/|\/\*|\*)/.test(lines[i])) continue;
|
|
2124
|
+
if (deprecated.test(lines[i])) {
|
|
2125
|
+
findings.push({ ruleId: 'REL-023', category: 'reliability', severity: 'medium', title: 'Deprecated Node.js API — may be removed in future version', description: 'Replace deprecated APIs: new Buffer() → Buffer.from/alloc/allocUnsafe; require("sys") → require("util"); require("punycode") → use third-party module.', file: fp, line: i + 1, fix: null });
|
|
2126
|
+
}
|
|
2127
|
+
}
|
|
2128
|
+
}
|
|
2129
|
+
return findings;
|
|
2130
|
+
},
|
|
2131
|
+
});
|
|
2132
|
+
|
|
2133
|
+
// REL-024: Unbounded recursion risk
|
|
2134
|
+
rules.push({
|
|
2135
|
+
id: 'REL-024', category: 'reliability', severity: 'high', confidence: 'likely', title: 'Recursive function without depth limit — stack overflow risk',
|
|
2136
|
+
check({ files }) {
|
|
2137
|
+
const findings = [];
|
|
2138
|
+
for (const [fp, c] of files) {
|
|
2139
|
+
if (!isSourceFile(fp)) continue;
|
|
2140
|
+
const lines = c.split('\n');
|
|
2141
|
+
for (let i = 0; i < lines.length; i++) {
|
|
2142
|
+
if (/^\s*(\/\/|\/\*|\*)/.test(lines[i])) continue;
|
|
2143
|
+
const fnMatch = lines[i].match(/function\s+(\w+)\s*\([^)]*\)/);
|
|
2144
|
+
if (fnMatch) {
|
|
2145
|
+
const fnName = fnMatch[1];
|
|
2146
|
+
// Skip React components (PascalCase) and common non-recursive patterns
|
|
2147
|
+
if (/^[A-Z]/.test(fnName)) continue; // React components are PascalCase
|
|
2148
|
+
if (/^(use[A-Z]|get[A-Z]|set[A-Z]|handle[A-Z]|on[A-Z])/.test(fnName)) continue; // Hooks, handlers
|
|
2149
|
+
const body = lines.slice(i + 1, Math.min(lines.length, i + 40)).join('\n');
|
|
2150
|
+
// Must call itself directly (not just appear as a reference)
|
|
2151
|
+
const callPattern = new RegExp(`\\b${fnName}\\s*\\(`);
|
|
2152
|
+
if (callPattern.test(body) && !/depth|maxDepth|level|maxLevel|limit/.test(body)) {
|
|
2153
|
+
findings.push({ ruleId: 'REL-024', category: 'reliability', severity: 'high', title: `Recursive function '${fnName}' without depth guard — stack overflow on deep input`, description: 'Add a depth parameter with a maximum limit. Large or adversarial inputs can exhaust the call stack.', file: fp, line: i + 1, fix: null });
|
|
2154
|
+
}
|
|
2155
|
+
}
|
|
2156
|
+
}
|
|
2157
|
+
}
|
|
2158
|
+
return findings;
|
|
2159
|
+
},
|
|
2160
|
+
});
|
|
2161
|
+
|
|
2162
|
+
// REL-025: EventEmitter without setMaxListeners
|
|
2163
|
+
rules.push({
|
|
2164
|
+
id: 'REL-025', category: 'reliability', severity: 'low', confidence: 'suggestion', title: 'EventEmitter without setMaxListeners — Node.js memory leak warning',
|
|
2165
|
+
check({ files }) {
|
|
2166
|
+
const findings = [];
|
|
2167
|
+
for (const [fp, c] of files) {
|
|
2168
|
+
if (!isSourceFile(fp)) continue;
|
|
2169
|
+
if (/EventEmitter|\.emit\s*\(|\.on\s*\(/.test(c) && !/setMaxListeners/.test(c)) {
|
|
2170
|
+
const addCount = (c.match(/\.on\s*\(/g) || []).length;
|
|
2171
|
+
if (addCount > 10) {
|
|
2172
|
+
findings.push({ ruleId: 'REL-025', category: 'reliability', severity: 'low', title: 'Many event listeners without setMaxListeners — potential leak warning', description: 'Node.js warns about memory leaks when an EventEmitter has more than 10 listeners. Call emitter.setMaxListeners(n) to suppress false positives or fix actual leaks.', file: fp, fix: null });
|
|
2173
|
+
}
|
|
2174
|
+
}
|
|
2175
|
+
}
|
|
2176
|
+
return findings;
|
|
2177
|
+
},
|
|
2178
|
+
});
|
|
2179
|
+
|
|
2180
|
+
// REL-026: Database query without error handling
|
|
2181
|
+
rules.push({
|
|
2182
|
+
id: 'REL-026', category: 'reliability', severity: 'high', confidence: 'likely', title: 'Database query without try/catch or .catch()',
|
|
2183
|
+
check({ files }) {
|
|
2184
|
+
const findings = [];
|
|
2185
|
+
for (const [fp, c] of files) {
|
|
2186
|
+
if (!isSourceFile(fp)) continue;
|
|
2187
|
+
const lines = c.split('\n');
|
|
2188
|
+
for (let i = 0; i < lines.length; i++) {
|
|
2189
|
+
if (/^\s*(\/\/|\/\*|\*)/.test(lines[i])) continue;
|
|
2190
|
+
if (/await\s+(?:db\.|prisma\.|sequelize\.|mongoose\.|knex\.)/.test(lines[i])) {
|
|
2191
|
+
const ctx = lines.slice(Math.max(0, i - 10), Math.min(lines.length, i + 5)).join('\n');
|
|
2192
|
+
if (!/try\s*\{|\.catch\s*\(/.test(ctx)) {
|
|
2193
|
+
findings.push({ ruleId: 'REL-026', category: 'reliability', severity: 'high', title: 'DB query without error handling — unhandled rejection on connection failure', description: 'Database queries can fail due to connection issues, constraints, or timeouts. Always wrap in try/catch or chain .catch().', file: fp, line: i + 1, fix: null });
|
|
2194
|
+
}
|
|
2195
|
+
}
|
|
2196
|
+
}
|
|
2197
|
+
}
|
|
2198
|
+
return findings;
|
|
2199
|
+
},
|
|
2200
|
+
});
|
|
2201
|
+
|
|
2202
|
+
// REL-027: Async function called without await or .catch
|
|
2203
|
+
rules.push({
|
|
2204
|
+
id: 'REL-027', category: 'reliability', severity: 'high', confidence: 'likely', title: 'Floating promise — async function called without await or .catch()',
|
|
2205
|
+
check({ files }) {
|
|
2206
|
+
const findings = [];
|
|
2207
|
+
for (const [fp, c] of files) {
|
|
2208
|
+
if (!isSourceFile(fp)) continue;
|
|
2209
|
+
const lines = c.split('\n');
|
|
2210
|
+
for (let i = 0; i < lines.length; i++) {
|
|
2211
|
+
if (/^\s*(\/\/|\/\*|\*)/.test(lines[i])) continue;
|
|
2212
|
+
// Detect patterns like: someAsyncFn(); without await/catch
|
|
2213
|
+
if (/^\s*\w+(?:\.\w+)*\s*\([^)]*\)\s*;/.test(lines[i]) && !/await|\.catch|\.then|return/.test(lines[i])) {
|
|
2214
|
+
const ctx = lines.slice(Math.max(0, i - 3), i + 1).join('\n');
|
|
2215
|
+
if (/async\s+function|async\s*\(/.test(ctx.split('\n')[0]) || /await/.test(ctx)) continue;
|
|
2216
|
+
// Check if the function being called is likely async (common patterns)
|
|
2217
|
+
if (/(?:save|create|update|delete|fetch|load|init|connect|send|emit|publish|process)\s*\(/i.test(lines[i]) && /async/.test(c.substring(0, c.indexOf(lines[i])))) {
|
|
2218
|
+
findings.push({ ruleId: 'REL-027', category: 'reliability', severity: 'high', title: 'Potential floating promise — async call without await or error handling', description: 'Calling async functions without await or .catch() silently swallows errors. Always await or handle the returned promise.', file: fp, line: i + 1, fix: null });
|
|
2219
|
+
}
|
|
2220
|
+
}
|
|
2221
|
+
}
|
|
2222
|
+
}
|
|
2223
|
+
return findings;
|
|
2224
|
+
},
|
|
2225
|
+
});
|
|
2226
|
+
|
|
2227
|
+
// REL-028: Missing connection timeout
|
|
2228
|
+
rules.push({
|
|
2229
|
+
id: 'REL-028', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'HTTP/DB connection without timeout configured',
|
|
2230
|
+
check({ files }) {
|
|
2231
|
+
const findings = [];
|
|
2232
|
+
for (const [fp, c] of files) {
|
|
2233
|
+
if (!isSourceFile(fp)) continue;
|
|
2234
|
+
const lines = c.split('\n');
|
|
2235
|
+
for (let i = 0; i < lines.length; i++) {
|
|
2236
|
+
if (/^\s*(\/\/|\/\*|\*)/.test(lines[i])) continue;
|
|
2237
|
+
if (/(?:fetch|axios)\s*\(|new\s+(?:Pool|Client)\s*\(/.test(lines[i])) {
|
|
2238
|
+
const ctx = lines.slice(i, Math.min(lines.length, i + 8)).join('\n');
|
|
2239
|
+
if (!/timeout\s*:|connectTimeout|requestTimeout/.test(ctx)) {
|
|
2240
|
+
findings.push({ ruleId: 'REL-028', category: 'reliability', severity: 'medium', title: 'HTTP/DB client without timeout — hangs indefinitely on unresponsive server', description: 'Network calls without timeouts hang forever if the server is slow or unresponsive. Set connectTimeout and requestTimeout options.', file: fp, line: i + 1, fix: null });
|
|
2241
|
+
}
|
|
2242
|
+
}
|
|
2243
|
+
}
|
|
2244
|
+
}
|
|
2245
|
+
return findings;
|
|
2246
|
+
},
|
|
2247
|
+
});
|
|
2248
|
+
|
|
2249
|
+
// REL-029: Missing retry logic for transient failures
|
|
2250
|
+
rules.push({
|
|
2251
|
+
id: 'REL-029', category: 'reliability', severity: 'low', confidence: 'suggestion', title: 'External API call without retry logic',
|
|
2252
|
+
check({ files }) {
|
|
2253
|
+
const findings = [];
|
|
2254
|
+
const hasFetch = [...files.values()].some(c => /fetch\s*\(|axios\s*\(/.test(c));
|
|
2255
|
+
const hasRetry = [...files.values()].some(c => /retry|p-retry|axios-retry|backoff|exponential/i.test(c));
|
|
2256
|
+
if (hasFetch && !hasRetry) {
|
|
2257
|
+
findings.push({ ruleId: 'REL-029', category: 'reliability', severity: 'low', title: 'External HTTP calls without retry logic — no resilience to transient failures', description: 'Network calls fail transiently. Implement retry with exponential backoff using p-retry or axios-retry for resilient external service calls.', fix: null });
|
|
2258
|
+
}
|
|
2259
|
+
return findings;
|
|
2260
|
+
},
|
|
2261
|
+
});
|
|
2262
|
+
|
|
2263
|
+
// REL-030: Catch block swallowing errors silently
|
|
2264
|
+
rules.push({
|
|
2265
|
+
id: 'REL-030', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'Empty catch block swallows errors silently',
|
|
2266
|
+
check({ files }) {
|
|
2267
|
+
const findings = [];
|
|
2268
|
+
for (const [fp, c] of files) {
|
|
2269
|
+
if (!isSourceFile(fp)) continue;
|
|
2270
|
+
const lines = c.split('\n');
|
|
2271
|
+
for (let i = 0; i < lines.length; i++) {
|
|
2272
|
+
if (/^\s*(\/\/|\/\*|\*)/.test(lines[i])) continue;
|
|
2273
|
+
if (/\}\s*catch\s*\([^)]*\)\s*\{/.test(lines[i])) {
|
|
2274
|
+
const catchBody = lines.slice(i + 1, Math.min(lines.length, i + 5)).join('\n');
|
|
2275
|
+
if (/^\s*\}/.test(catchBody.trim().split('\n')[0]) || /^\s*\/\/.*\n\s*\}/.test(catchBody)) {
|
|
2276
|
+
findings.push({ ruleId: 'REL-030', category: 'reliability', severity: 'medium', title: 'Empty catch block — errors silently swallowed', description: 'An empty catch block hides errors and makes debugging impossible. At minimum, log the error: catch(e) { logger.error(e); throw e; }', file: fp, line: i + 1, fix: null });
|
|
2277
|
+
}
|
|
2278
|
+
}
|
|
2279
|
+
}
|
|
2280
|
+
}
|
|
2281
|
+
return findings;
|
|
2282
|
+
},
|
|
2283
|
+
});
|
|
2284
|
+
|
|
2285
|
+
// REL-031 through REL-055: Additional reliability rules
|
|
2286
|
+
|
|
2287
|
+
// REL-031: Missing health check route
|
|
2288
|
+
rules.push({
|
|
2289
|
+
id: 'REL-031', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'No /health endpoint in Express app',
|
|
2290
|
+
check({ files }) {
|
|
2291
|
+
const findings = [];
|
|
2292
|
+
const hasExpress = [...files.values()].some(c => /require.*express|from.*express/i.test(c));
|
|
2293
|
+
const hasHealth = [...files.values()].some(c => /\/health|\/healthz|\/ping/i.test(c));
|
|
2294
|
+
if (hasExpress && !hasHealth) {
|
|
2295
|
+
findings.push({ ruleId: 'REL-031', category: 'reliability', severity: 'medium', title: 'No /health endpoint — load balancers and Kubernetes cannot check liveness', description: 'Add a /health endpoint that returns 200 OK when the service is ready. Required by most orchestration platforms for health checking.', fix: null });
|
|
2296
|
+
}
|
|
2297
|
+
return findings;
|
|
2298
|
+
},
|
|
2299
|
+
});
|
|
2300
|
+
|
|
2301
|
+
// REL-032: Missing database transaction for multi-step operation
|
|
2302
|
+
rules.push({
|
|
2303
|
+
id: 'REL-032', category: 'reliability', severity: 'high', confidence: 'likely', title: 'Multi-step DB operations without transaction',
|
|
2304
|
+
check({ files }) {
|
|
2305
|
+
const findings = [];
|
|
2306
|
+
for (const [fp, c] of files) {
|
|
2307
|
+
if (!isSourceFile(fp)) continue;
|
|
2308
|
+
const lines = c.split('\n');
|
|
2309
|
+
let createCount = 0;
|
|
2310
|
+
for (let i = 0; i < lines.length; i++) {
|
|
2311
|
+
if (/^\s*(\/\/|\/\*|\*)/.test(lines[i])) continue;
|
|
2312
|
+
if (/await.*(?:create|update|delete|save)\s*\(/i.test(lines[i])) createCount++;
|
|
2313
|
+
if (createCount > 1 && !/transaction|BEGIN|COMMIT|prisma\.\$transaction/i.test(c)) {
|
|
2314
|
+
findings.push({ ruleId: 'REL-032', category: 'reliability', severity: 'high', title: 'Multiple DB write operations without transaction — partial failure leaves inconsistent state', description: 'Wrap multiple related write operations in a transaction to ensure atomicity. If any step fails, the transaction rolls back.', file: fp, line: i + 1, fix: null });
|
|
2315
|
+
break;
|
|
2316
|
+
}
|
|
2317
|
+
}
|
|
2318
|
+
}
|
|
2319
|
+
return findings;
|
|
2320
|
+
},
|
|
2321
|
+
});
|
|
2322
|
+
|
|
2323
|
+
// REL-033: Async forEach (await inside forEach)
|
|
2324
|
+
rules.push({
|
|
2325
|
+
id: 'REL-033', category: 'reliability', severity: 'high', confidence: 'likely', title: 'await used inside Array.forEach — async not properly awaited',
|
|
2326
|
+
check({ files }) {
|
|
2327
|
+
const findings = [];
|
|
2328
|
+
for (const [fp, c] of files) {
|
|
2329
|
+
if (!isSourceFile(fp)) continue;
|
|
2330
|
+
const lines = c.split('\n');
|
|
2331
|
+
for (let i = 0; i < lines.length; i++) {
|
|
2332
|
+
if (/^\s*(\/\/|\/\*|\*)/.test(lines[i])) continue;
|
|
2333
|
+
if (/\.forEach\s*\(\s*async/.test(lines[i])) {
|
|
2334
|
+
findings.push({ ruleId: 'REL-033', category: 'reliability', severity: 'high', title: 'async function inside .forEach() — awaiting forEach() does not wait for async callbacks', description: 'Array.forEach() ignores promise return values. Use for...of with await, or Promise.all(arr.map(async item => ...)) instead.', file: fp, line: i + 1, fix: null });
|
|
2335
|
+
}
|
|
2336
|
+
}
|
|
2337
|
+
}
|
|
2338
|
+
return findings;
|
|
2339
|
+
},
|
|
2340
|
+
});
|
|
2341
|
+
|
|
2342
|
+
// REL-034: Missing error boundary in React
|
|
2343
|
+
rules.push({
|
|
2344
|
+
id: 'REL-034', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'React application without error boundary',
|
|
2345
|
+
check({ files }) {
|
|
2346
|
+
const findings = [];
|
|
2347
|
+
const hasReact = [...files.values()].some(c => /from.*['"]react['"]/i.test(c));
|
|
2348
|
+
const hasErrorBoundary = [...files.values()].some(c => /componentDidCatch|ErrorBoundary|error.*boundary/i.test(c));
|
|
2349
|
+
if (hasReact && !hasErrorBoundary) {
|
|
2350
|
+
findings.push({ ruleId: 'REL-034', category: 'reliability', severity: 'medium', title: 'No React error boundary — component errors unmount entire app', description: 'Add ErrorBoundary components to catch React rendering errors and display fallback UI instead of crashing the whole application.', fix: null });
|
|
2351
|
+
}
|
|
2352
|
+
return findings;
|
|
2353
|
+
},
|
|
2354
|
+
});
|
|
2355
|
+
|
|
2356
|
+
// REL-035: Missing cleanup in useEffect
|
|
2357
|
+
rules.push({
|
|
2358
|
+
id: 'REL-035', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'useEffect with subscription/listener but no cleanup return',
|
|
2359
|
+
check({ files }) {
|
|
2360
|
+
const findings = [];
|
|
2361
|
+
for (const [fp, c] of files) {
|
|
2362
|
+
if (!fp.match(/\.(jsx|tsx)$/)) continue;
|
|
2363
|
+
const lines = c.split('\n');
|
|
2364
|
+
for (let i = 0; i < lines.length; i++) {
|
|
2365
|
+
if (/^\s*(\/\/|\/\*|\*)/.test(lines[i])) continue;
|
|
2366
|
+
if (/useEffect\s*\(/.test(lines[i])) {
|
|
2367
|
+
const block = lines.slice(i, Math.min(lines.length, i + 20)).join('\n');
|
|
2368
|
+
if (/(addEventListener|subscribe|setInterval|\.on\s*\()/.test(block) && !/return\s*\(?\s*\(\s*\)\s*=>/.test(block)) {
|
|
2369
|
+
findings.push({ ruleId: 'REL-035', category: 'reliability', severity: 'medium', title: 'useEffect with subscription but no cleanup — memory leak on unmount', description: 'useEffect with event listeners, subscriptions, or intervals must return a cleanup function to prevent memory leaks when the component unmounts.', file: fp, line: i + 1, fix: null });
|
|
2370
|
+
}
|
|
2371
|
+
}
|
|
2372
|
+
}
|
|
2373
|
+
}
|
|
2374
|
+
return findings;
|
|
2375
|
+
},
|
|
2376
|
+
});
|
|
2377
|
+
|
|
2378
|
+
// REL-036: Unhandled promise in route handler
|
|
2379
|
+
rules.push({
|
|
2380
|
+
id: 'REL-036', category: 'reliability', severity: 'high', confidence: 'likely', title: 'Async route handler without error catching',
|
|
2381
|
+
check({ files }) {
|
|
2382
|
+
const findings = [];
|
|
2383
|
+
for (const [fp, c] of files) {
|
|
2384
|
+
if (!isSourceFile(fp)) continue;
|
|
2385
|
+
const lines = c.split('\n');
|
|
2386
|
+
for (let i = 0; i < lines.length; i++) {
|
|
2387
|
+
if (/^\s*(\/\/|\/\*|\*)/.test(lines[i])) continue;
|
|
2388
|
+
if (/(?:router|app)\.\w+\s*\([^,]+,\s*async\s*\(/.test(lines[i])) {
|
|
2389
|
+
const block = lines.slice(i, Math.min(lines.length, i + 20)).join('\n');
|
|
2390
|
+
if (!/try\s*\{|\.catch\s*\(|asyncHandler|catchAsync/.test(block)) {
|
|
2391
|
+
findings.push({ ruleId: 'REL-036', category: 'reliability', severity: 'high', title: 'Async route handler without try/catch — unhandled rejection crashes server', description: 'Wrap async route handlers in try/catch or use an asyncHandler() wrapper to ensure errors are passed to Express error middleware.', file: fp, line: i + 1, fix: null });
|
|
2392
|
+
}
|
|
2393
|
+
}
|
|
2394
|
+
}
|
|
2395
|
+
}
|
|
2396
|
+
return findings;
|
|
2397
|
+
},
|
|
2398
|
+
});
|
|
2399
|
+
|
|
2400
|
+
// REL-037: Missing SIGINT handler
|
|
2401
|
+
rules.push({
|
|
2402
|
+
id: 'REL-037', category: 'reliability', severity: 'low', confidence: 'suggestion', title: 'No SIGINT handler — Ctrl+C does not gracefully shutdown',
|
|
2403
|
+
check({ files }) {
|
|
2404
|
+
const findings = [];
|
|
2405
|
+
const hasServer = [...files.values()].some(c => /(?:app|server)\.listen\s*\(/.test(c));
|
|
2406
|
+
const hasSigint = [...files.values()].some(c => /process\.on\s*\(\s*['"]SIGINT['"]/.test(c));
|
|
2407
|
+
if (hasServer && !hasSigint) {
|
|
2408
|
+
findings.push({ ruleId: 'REL-037', category: 'reliability', severity: 'low', title: 'No SIGINT handler — development Ctrl+C does not drain connections', description: 'Handle SIGINT to close the server gracefully during development. Without it, in-flight requests are abruptly terminated.', fix: null });
|
|
2409
|
+
}
|
|
2410
|
+
return findings;
|
|
2411
|
+
},
|
|
2412
|
+
});
|
|
2413
|
+
|
|
2414
|
+
// REL-038: Constructor throwing error
|
|
2415
|
+
rules.push({
|
|
2416
|
+
id: 'REL-038', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'Constructor with async initialization — use factory pattern instead',
|
|
2417
|
+
check({ files }) {
|
|
2418
|
+
const findings = [];
|
|
2419
|
+
for (const [fp, c] of files) {
|
|
2420
|
+
if (!isSourceFile(fp)) continue;
|
|
2421
|
+
const lines = c.split('\n');
|
|
2422
|
+
for (let i = 0; i < lines.length; i++) {
|
|
2423
|
+
if (/^\s*(\/\/|\/\*|\*)/.test(lines[i])) continue;
|
|
2424
|
+
if (/constructor\s*\(/.test(lines[i])) {
|
|
2425
|
+
const block = lines.slice(i, Math.min(lines.length, i + 15)).join('\n');
|
|
2426
|
+
if (/await\s+/.test(block)) {
|
|
2427
|
+
findings.push({ ruleId: 'REL-038', category: 'reliability', severity: 'medium', title: 'await inside constructor — constructors cannot be async', description: 'Constructors cannot be async. Move async initialization to a static async factory method: static async create() { const obj = new Class(); await obj.init(); return obj; }', file: fp, line: i + 1, fix: null });
|
|
2428
|
+
}
|
|
2429
|
+
}
|
|
2430
|
+
}
|
|
2431
|
+
}
|
|
2432
|
+
return findings;
|
|
2433
|
+
},
|
|
2434
|
+
});
|
|
2435
|
+
|
|
2436
|
+
// REL-039: Missing connection error handler
|
|
2437
|
+
rules.push({
|
|
2438
|
+
id: 'REL-039', category: 'reliability', severity: 'high', confidence: 'likely', title: 'Database connection without error event handler',
|
|
2439
|
+
check({ files }) {
|
|
2440
|
+
const findings = [];
|
|
2441
|
+
for (const [fp, c] of files) {
|
|
2442
|
+
if (!isSourceFile(fp)) continue;
|
|
2443
|
+
if (/mongoose\.connect|createPool|MongoClient\.connect/i.test(c)) {
|
|
2444
|
+
if (!c.match(/\.on\s*\(\s*['"]error['"]|\.catch\s*\(/)) {
|
|
2445
|
+
findings.push({ ruleId: 'REL-039', category: 'reliability', severity: 'high', title: 'DB connection without error handler — connection errors crash unhandled', description: 'Add an error event handler to database connections to handle connection drops and authentication failures gracefully.', file: fp, fix: null });
|
|
2446
|
+
}
|
|
2447
|
+
}
|
|
2448
|
+
}
|
|
2449
|
+
return findings;
|
|
2450
|
+
},
|
|
2451
|
+
});
|
|
2452
|
+
|
|
2453
|
+
// REL-040: Hardcoded retry count without jitter
|
|
2454
|
+
rules.push({
|
|
2455
|
+
id: 'REL-040', category: 'reliability', severity: 'low', confidence: 'suggestion', title: 'Retry logic without jitter — thundering herd on recovery',
|
|
2456
|
+
check({ files }) {
|
|
2457
|
+
const findings = [];
|
|
2458
|
+
for (const [fp, c] of files) {
|
|
2459
|
+
if (!isSourceFile(fp)) continue;
|
|
2460
|
+
if (/retry|retryCount|maxRetries/i.test(c) && !/jitter|Math\.random|randomDelay/i.test(c)) {
|
|
2461
|
+
findings.push({ ruleId: 'REL-040', category: 'reliability', severity: 'low', title: 'Retry without jitter — coordinated retries create thundering herd', description: 'Add random jitter to retry delays to prevent synchronized retries from overwhelming a recovering service: delay = baseDelay * 2^attempt + Math.random() * 1000.', file: fp, fix: null });
|
|
2462
|
+
}
|
|
2463
|
+
}
|
|
2464
|
+
return findings;
|
|
2465
|
+
},
|
|
2466
|
+
});
|
|
2467
|
+
|
|
2468
|
+
// REL-041: Missing rate limit error handling
|
|
2469
|
+
rules.push({
|
|
2470
|
+
id: 'REL-041', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'External API call without handling 429 rate limit response',
|
|
2471
|
+
check({ files }) {
|
|
2472
|
+
const findings = [];
|
|
2473
|
+
for (const [fp, c] of files) {
|
|
2474
|
+
if (!isSourceFile(fp)) continue;
|
|
2475
|
+
if (/(fetch|axios)\s*\(/.test(c) && !/status.*429|statusCode.*429|429|TooManyRequests|rate.?limit/i.test(c)) {
|
|
2476
|
+
if (/openai|stripe|twilio|sendgrid|github.*api/i.test(c)) {
|
|
2477
|
+
findings.push({ ruleId: 'REL-041', category: 'reliability', severity: 'medium', title: 'Third-party API call without 429 rate limit handling', description: 'External APIs may return 429 Too Many Requests. Handle this response by implementing exponential backoff and respecting Retry-After headers.', file: fp, fix: null });
|
|
2478
|
+
}
|
|
2479
|
+
}
|
|
2480
|
+
}
|
|
2481
|
+
return findings;
|
|
2482
|
+
},
|
|
2483
|
+
});
|
|
2484
|
+
|
|
2485
|
+
// REL-042 through REL-062: More reliability rules
|
|
2486
|
+
|
|
2487
|
+
// REL-042: Promise.all without individual error handling
|
|
2488
|
+
rules.push({
|
|
2489
|
+
id: 'REL-042', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'Promise.all without per-promise error isolation',
|
|
2490
|
+
check({ files }) {
|
|
2491
|
+
const findings = [];
|
|
2492
|
+
for (const [fp, c] of files) {
|
|
2493
|
+
if (!isSourceFile(fp)) continue;
|
|
2494
|
+
const lines = c.split('\n');
|
|
2495
|
+
for (let i = 0; i < lines.length; i++) {
|
|
2496
|
+
if (/^\s*(\/\/|\/\*|\*)/.test(lines[i])) continue;
|
|
2497
|
+
if (/Promise\.all\s*\(/.test(lines[i])) {
|
|
2498
|
+
const ctx = lines.slice(Math.max(0, i - 2), Math.min(lines.length, i + 5)).join('\n');
|
|
2499
|
+
if (!/Promise\.allSettled|\.catch/.test(ctx)) {
|
|
2500
|
+
findings.push({ ruleId: 'REL-042', category: 'reliability', severity: 'medium', title: 'Promise.all rejects entirely if any promise fails — use Promise.allSettled for independent operations', description: 'Promise.all fails fast if any promise rejects, cancelling all results. Use Promise.allSettled when each operation is independent and partial results are acceptable.', file: fp, line: i + 1, fix: null });
|
|
2501
|
+
}
|
|
2502
|
+
}
|
|
2503
|
+
}
|
|
2504
|
+
}
|
|
2505
|
+
return findings;
|
|
2506
|
+
},
|
|
2507
|
+
});
|
|
2508
|
+
|
|
2509
|
+
// REL-043: Missing database index on soft delete field
|
|
2510
|
+
rules.push({
|
|
2511
|
+
id: 'REL-043', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'Soft delete pattern without index on deletedAt field',
|
|
2512
|
+
check({ files }) {
|
|
2513
|
+
const findings = [];
|
|
2514
|
+
for (const [fp, c] of files) {
|
|
2515
|
+
if (!isSourceFile(fp)) continue;
|
|
2516
|
+
if (/deletedAt|deleted_at|softDelete/i.test(c) && /findMany|findAll|SELECT/i.test(c)) {
|
|
2517
|
+
if (!/@@index.*deletedAt|index.*deleted_at|deletedAt.*@index/i.test(c)) {
|
|
2518
|
+
findings.push({ ruleId: 'REL-043', category: 'reliability', severity: 'medium', title: 'Soft delete pattern without index on deletedAt — WHERE deletedAt IS NULL causes full scan', description: 'Add a database index on deletedAt to optimize queries that filter soft-deleted records. Without it, every query scans all rows.', file: fp, fix: null });
|
|
2519
|
+
}
|
|
2520
|
+
}
|
|
2521
|
+
}
|
|
2522
|
+
return findings;
|
|
2523
|
+
},
|
|
2524
|
+
});
|
|
2525
|
+
|
|
2526
|
+
// REL-044: Missing input validation on env variables
|
|
2527
|
+
rules.push({
|
|
2528
|
+
id: 'REL-044', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'Environment variables used without validation',
|
|
2529
|
+
check({ files }) {
|
|
2530
|
+
const findings = [];
|
|
2531
|
+
const usesEnv = [...files.values()].some(c => /process\.env\.\w+/i.test(c));
|
|
2532
|
+
const hasEnvValidation = [...files.values()].some(c => /envalid|env-schema|zod.*env|dotenv.*safe|joi.*env|validateEnv/i.test(c));
|
|
2533
|
+
if (usesEnv && !hasEnvValidation) {
|
|
2534
|
+
findings.push({ ruleId: 'REL-044', category: 'reliability', severity: 'medium', title: 'Environment variables used without validation — missing vars cause runtime crashes', description: 'Validate all required environment variables at startup using envalid, dotenv-safe, or a Zod schema. This catches missing configs before they cause production failures.', fix: null });
|
|
2535
|
+
}
|
|
2536
|
+
return findings;
|
|
2537
|
+
},
|
|
2538
|
+
});
|
|
2539
|
+
|
|
2540
|
+
// REL-045: Mutating shared state across requests
|
|
2541
|
+
rules.push({
|
|
2542
|
+
id: 'REL-045', category: 'reliability', severity: 'high', confidence: 'likely', title: 'Module-level mutable state shared across requests',
|
|
2543
|
+
check({ files }) {
|
|
2544
|
+
const findings = [];
|
|
2545
|
+
for (const [fp, c] of files) {
|
|
2546
|
+
if (!isSourceFile(fp)) continue;
|
|
2547
|
+
if (!fp.match(/(?:route|handler|controller|middleware)/i)) continue;
|
|
2548
|
+
const lines = c.split('\n');
|
|
2549
|
+
for (let i = 0; i < lines.length; i++) {
|
|
2550
|
+
if (/^\s*(\/\/|\/\*|\*)/.test(lines[i])) continue;
|
|
2551
|
+
// Module-level let array/object that gets pushed/modified in handler
|
|
2552
|
+
if (/^let\s+\w+\s*=\s*\[\s*\]|^let\s+\w+\s*=\s*\{/.test(lines[i])) {
|
|
2553
|
+
const varName = (lines[i].match(/^let\s+(\w+)/) || [])[1];
|
|
2554
|
+
if (varName) {
|
|
2555
|
+
const rest = c.slice(c.indexOf(lines[i]) + lines[i].length);
|
|
2556
|
+
if (rest.match(new RegExp(`\\b${varName}\\s*\\.\\s*push|${varName}\\s*\\[`)) && rest.match(/req,\s*res|handler|router\./)) {
|
|
2557
|
+
findings.push({ ruleId: 'REL-045', category: 'reliability', severity: 'high', title: `Module-level mutable "${varName}" modified in request handler — shared across all requests`, description: 'Module-level state is shared across all concurrent requests. Use request-scoped storage or database for per-request state.', file: fp, line: i + 1, fix: null });
|
|
2558
|
+
}
|
|
2559
|
+
}
|
|
2560
|
+
}
|
|
2561
|
+
}
|
|
2562
|
+
}
|
|
2563
|
+
return findings;
|
|
2564
|
+
},
|
|
2565
|
+
});
|
|
2566
|
+
|
|
2567
|
+
// REL-046: setTimeout used for time-sensitive operations
|
|
2568
|
+
rules.push({
|
|
2569
|
+
id: 'REL-046', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'setTimeout used for time-sensitive operation — imprecise timing',
|
|
2570
|
+
check({ files }) {
|
|
2571
|
+
const findings = [];
|
|
2572
|
+
for (const [fp, c] of files) {
|
|
2573
|
+
if (!isSourceFile(fp)) continue;
|
|
2574
|
+
const lines = c.split('\n');
|
|
2575
|
+
for (let i = 0; i < lines.length; i++) {
|
|
2576
|
+
if (/^\s*(\/\/|\/\*|\*)/.test(lines[i])) continue;
|
|
2577
|
+
if (/setTimeout\s*\(/.test(lines[i])) {
|
|
2578
|
+
const ctx = lines.slice(i, Math.min(lines.length, i + 5)).join('\n');
|
|
2579
|
+
if (/payment|charge|expire|session.*timeout|lock.*timeout/i.test(ctx)) {
|
|
2580
|
+
findings.push({ ruleId: 'REL-046', category: 'reliability', severity: 'medium', title: 'setTimeout for time-sensitive payment/session/lock operation — use DB-persisted expiry', description: 'setTimeout is not reliable for critical timeouts — it resets on restart and drifts. Use a database-persisted expiry timestamp instead.', file: fp, line: i + 1, fix: null });
|
|
2581
|
+
}
|
|
2582
|
+
}
|
|
2583
|
+
}
|
|
2584
|
+
}
|
|
2585
|
+
return findings;
|
|
2586
|
+
},
|
|
2587
|
+
});
|
|
2588
|
+
|
|
2589
|
+
// REL-047: Missing circuit breaker for external services
|
|
2590
|
+
rules.push({
|
|
2591
|
+
id: 'REL-047', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'External service calls without circuit breaker pattern',
|
|
2592
|
+
check({ files }) {
|
|
2593
|
+
const findings = [];
|
|
2594
|
+
const hasExternalCalls = [...files.values()].some(c => /fetch|axios|http\.request/i.test(c));
|
|
2595
|
+
const hasCircuitBreaker = [...files.values()].some(c => /opossum|brakes|circuit.?breaker|CircuitBreaker|hystrix/i.test(c));
|
|
2596
|
+
if (hasExternalCalls && !hasCircuitBreaker) {
|
|
2597
|
+
findings.push({ ruleId: 'REL-047', category: 'reliability', severity: 'medium', title: 'External HTTP calls without circuit breaker — cascading failures possible', description: 'Implement the circuit breaker pattern using opossum or similar library. When an external service fails repeatedly, open the circuit to fail fast rather than queue up timeouts.', fix: null });
|
|
2598
|
+
}
|
|
2599
|
+
return findings;
|
|
2600
|
+
},
|
|
2601
|
+
});
|
|
2602
|
+
|
|
2603
|
+
// REL-048: Missing .finally() on critical promises
|
|
2604
|
+
rules.push({
|
|
2605
|
+
id: 'REL-048', category: 'reliability', severity: 'low', confidence: 'suggestion', title: 'Promise chain without .finally() for cleanup',
|
|
2606
|
+
check({ files }) {
|
|
2607
|
+
const findings = [];
|
|
2608
|
+
for (const [fp, c] of files) {
|
|
2609
|
+
if (!isSourceFile(fp)) continue;
|
|
2610
|
+
const lines = c.split('\n');
|
|
2611
|
+
for (let i = 0; i < lines.length; i++) {
|
|
2612
|
+
if (/^\s*(\/\/|\/\*|\*)/.test(lines[i])) continue;
|
|
2613
|
+
if (/\.then\s*\([^)]+\)\.catch\s*\(/.test(lines[i]) && /lock|mutex|connection|transaction|acquire/i.test(lines[i])) {
|
|
2614
|
+
if (!/\.finally\s*\(/.test(lines[i])) {
|
|
2615
|
+
findings.push({ ruleId: 'REL-048', category: 'reliability', severity: 'low', title: 'Promise with lock/connection but no .finally() cleanup', description: 'Add .finally() to release locks and connections even if the promise rejects. Avoids resource leaks on failure.', file: fp, line: i + 1, fix: null });
|
|
2616
|
+
}
|
|
2617
|
+
}
|
|
2618
|
+
}
|
|
2619
|
+
}
|
|
2620
|
+
return findings;
|
|
2621
|
+
},
|
|
2622
|
+
});
|
|
2623
|
+
|
|
2624
|
+
// REL-049: Unconditional throw in middleware
|
|
2625
|
+
rules.push({
|
|
2626
|
+
id: 'REL-049', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'Middleware that always throws — will break all routes',
|
|
2627
|
+
check({ files }) {
|
|
2628
|
+
const findings = [];
|
|
2629
|
+
for (const [fp, c] of files) {
|
|
2630
|
+
if (!isSourceFile(fp)) continue;
|
|
2631
|
+
if (!fp.match(/middleware/i)) continue;
|
|
2632
|
+
const lines = c.split('\n');
|
|
2633
|
+
for (let i = 0; i < lines.length; i++) {
|
|
2634
|
+
if (/^\s*(\/\/|\/\*|\*)/.test(lines[i])) continue;
|
|
2635
|
+
if (/^\s*throw\s+new\s+\w+/.test(lines[i])) {
|
|
2636
|
+
const ctx = lines.slice(Math.max(0, i - 5), i).join('\n');
|
|
2637
|
+
if (!/if\s*\(|catch\s*\(|\?\s*/.test(ctx)) {
|
|
2638
|
+
findings.push({ ruleId: 'REL-049', category: 'reliability', severity: 'medium', title: 'Unconditional throw in middleware — always throws regardless of condition', description: 'An unconditional throw in middleware will break all requests handled by it. Wrap in a condition to only throw when the error condition is met.', file: fp, line: i + 1, fix: null });
|
|
2639
|
+
}
|
|
2640
|
+
}
|
|
2641
|
+
}
|
|
2642
|
+
}
|
|
2643
|
+
return findings;
|
|
2644
|
+
},
|
|
2645
|
+
});
|
|
2646
|
+
|
|
2647
|
+
// REL-050: Missing database connection check
|
|
2648
|
+
rules.push({
|
|
2649
|
+
id: 'REL-050', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'Application starts without verifying database connection',
|
|
2650
|
+
check({ files }) {
|
|
2651
|
+
const findings = [];
|
|
2652
|
+
const hasDB = [...files.values()].some(c => /mongoose\.connect|sequelize\.|pg\.Pool|mysql\.createPool/i.test(c));
|
|
2653
|
+
const hasConnCheck = [...files.values()].some(c => /\.authenticate\s*\(\)|\.ping\s*\(\)|\.connect\s*\(\).*\.then|db\.ready/i.test(c));
|
|
2654
|
+
const hasServerListen = [...files.values()].some(c => /app\.listen|server\.listen/i.test(c));
|
|
2655
|
+
if (hasDB && hasServerListen && !hasConnCheck) {
|
|
2656
|
+
findings.push({ ruleId: 'REL-050', category: 'reliability', severity: 'medium', title: 'Server starts without verifying database connectivity', description: 'Verify the database connection before starting the HTTP server to fail fast on misconfiguration. Call db.authenticate() or a health query before app.listen().', fix: null });
|
|
2657
|
+
}
|
|
2658
|
+
return findings;
|
|
2659
|
+
},
|
|
2660
|
+
});
|
|
2661
|
+
|
|
2662
|
+
// REL-051: Missing error handling in Express middleware
|
|
2663
|
+
rules.push({
|
|
2664
|
+
id: 'REL-051', category: 'reliability', severity: 'high', confidence: 'likely', title: 'Express middleware without try/catch — unhandled async errors',
|
|
2665
|
+
check({ files }) {
|
|
2666
|
+
const findings = [];
|
|
2667
|
+
for (const [fp, c] of files) {
|
|
2668
|
+
if (!isSourceFile(fp)) continue;
|
|
2669
|
+
const lines = c.split('\n');
|
|
2670
|
+
for (let i = 0; i < lines.length; i++) {
|
|
2671
|
+
if (/(?:router|app)\.use\s*\(\s*async\s+function|\bapp\.use\s*\(\s*async\s+\(/.test(lines[i])) {
|
|
2672
|
+
const body = lines.slice(i, Math.min(lines.length, i + 20)).join('\n');
|
|
2673
|
+
if (!/try\s*\{|\.catch\s*\(/.test(body)) findings.push({ ruleId: 'REL-051', category: 'reliability', severity: 'high', title: 'Async Express middleware without try/catch — errors not forwarded to next(err)', description: 'Wrap async middleware in try/catch and call next(err) on failure.', file: fp, line: i + 1, fix: null });
|
|
2674
|
+
}
|
|
2675
|
+
}
|
|
2676
|
+
}
|
|
2677
|
+
return findings;
|
|
2678
|
+
},
|
|
2679
|
+
});
|
|
2680
|
+
|
|
2681
|
+
// REL-052: Promise rejection in event listener not handled
|
|
2682
|
+
rules.push({
|
|
2683
|
+
id: 'REL-052', category: 'reliability', severity: 'high', confidence: 'likely', title: 'Async event listener without error handling',
|
|
2684
|
+
check({ files }) {
|
|
2685
|
+
const findings = [];
|
|
2686
|
+
for (const [fp, c] of files) {
|
|
2687
|
+
if (!isSourceFile(fp)) continue;
|
|
2688
|
+
const lines = c.split('\n');
|
|
2689
|
+
for (let i = 0; i < lines.length; i++) {
|
|
2690
|
+
if (/\.on\s*\(['"][^'"]+['"]\s*,\s*async\s+/.test(lines[i])) {
|
|
2691
|
+
const body = lines.slice(i, Math.min(lines.length, i + 15)).join('\n');
|
|
2692
|
+
if (!/try\s*\{|\.catch\s*\(/.test(body)) findings.push({ ruleId: 'REL-052', category: 'reliability', severity: 'high', title: 'Async event listener without try/catch — unhandled promise rejection', description: 'Wrap async event handlers in try/catch to prevent silent failures.', file: fp, line: i + 1, fix: null });
|
|
2693
|
+
}
|
|
2694
|
+
}
|
|
2695
|
+
}
|
|
2696
|
+
return findings;
|
|
2697
|
+
},
|
|
2698
|
+
});
|
|
2699
|
+
|
|
2700
|
+
// REL-053: Missing graceful shutdown on database
|
|
2701
|
+
rules.push({
|
|
2702
|
+
id: 'REL-053', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'No database disconnect on process SIGTERM',
|
|
2703
|
+
check({ files }) {
|
|
2704
|
+
const findings = [];
|
|
2705
|
+
for (const [fp, c] of files) {
|
|
2706
|
+
if (!isSourceFile(fp)) continue;
|
|
2707
|
+
if (/mongoose\.connect|pg\.Pool|redis\.createClient|sequelize\s*=\s*new/i.test(c) && !/SIGTERM/.test(c)) {
|
|
2708
|
+
findings.push({ ruleId: 'REL-053', category: 'reliability', severity: 'medium', title: 'Database connection without SIGTERM cleanup', description: 'Add SIGTERM handler to gracefully close database connections before process exit.', file: fp, fix: null });
|
|
2709
|
+
}
|
|
2710
|
+
}
|
|
2711
|
+
return findings;
|
|
2712
|
+
},
|
|
2713
|
+
});
|
|
2714
|
+
|
|
2715
|
+
// REL-054: Missing validation on environment variables at startup
|
|
2716
|
+
rules.push({
|
|
2717
|
+
id: 'REL-054', category: 'reliability', severity: 'high', confidence: 'likely', title: 'Required environment variables not validated at startup',
|
|
2718
|
+
check({ files }) {
|
|
2719
|
+
const findings = [];
|
|
2720
|
+
for (const [fp, c] of files) {
|
|
2721
|
+
if (!isSourceFile(fp)) continue;
|
|
2722
|
+
if (/process\.env\.\w+/.test(c) && !/(?:joi|zod|yup|envalid|dotenv-safe)/.test(c) && /app\.listen|server\.listen|createServer/.test(c) && !/process\.env\.\w+\s*\|\|.*throw|if\s*\(\s*!process\.env/.test(c)) {
|
|
2723
|
+
findings.push({ ruleId: 'REL-054', category: 'reliability', severity: 'high', title: 'Environment variables used without validation — app may fail silently on missing config', description: 'Validate all required environment variables at startup using envalid, joi, or zod.', file: fp, fix: null });
|
|
2724
|
+
}
|
|
2725
|
+
}
|
|
2726
|
+
return findings;
|
|
2727
|
+
},
|
|
2728
|
+
});
|
|
2729
|
+
|
|
2730
|
+
// REL-055: Unhandled stream error events
|
|
2731
|
+
rules.push({
|
|
2732
|
+
id: 'REL-055', category: 'reliability', severity: 'high', confidence: 'likely', title: 'Stream created without error event handler',
|
|
2733
|
+
check({ files }) {
|
|
2734
|
+
const findings = [];
|
|
2735
|
+
for (const [fp, c] of files) {
|
|
2736
|
+
if (!isSourceFile(fp)) continue;
|
|
2737
|
+
const lines = c.split('\n');
|
|
2738
|
+
for (let i = 0; i < lines.length; i++) {
|
|
2739
|
+
if (/(?:createReadStream|createWriteStream|pipe)\s*\(/.test(lines[i])) {
|
|
2740
|
+
const block = lines.slice(i, Math.min(lines.length, i + 10)).join('\n');
|
|
2741
|
+
if (!/\.on\s*\(\s*['"]error['"]/.test(block)) findings.push({ ruleId: 'REL-055', category: 'reliability', severity: 'high', title: 'Stream without error handler — unhandled error may crash process', description: 'Always attach an error event handler to streams: stream.on("error", handler).', file: fp, line: i + 1, fix: null });
|
|
2742
|
+
}
|
|
2743
|
+
}
|
|
2744
|
+
}
|
|
2745
|
+
return findings;
|
|
2746
|
+
},
|
|
2747
|
+
});
|
|
2748
|
+
|
|
2749
|
+
// REL-056: Using deprecated crypto.createCipher
|
|
2750
|
+
rules.push({
|
|
2751
|
+
id: 'REL-056', category: 'reliability', severity: 'high', confidence: 'likely', title: 'crypto.createCipher is deprecated — use createCipheriv',
|
|
2752
|
+
check({ files }) {
|
|
2753
|
+
const findings = [];
|
|
2754
|
+
for (const [fp, c] of files) {
|
|
2755
|
+
if (!isSourceFile(fp)) continue;
|
|
2756
|
+
const lines = c.split('\n');
|
|
2757
|
+
for (let i = 0; i < lines.length; i++) {
|
|
2758
|
+
if (/crypto\.createCipher\s*\((?!iv)/.test(lines[i])) findings.push({ ruleId: 'REL-056', category: 'reliability', severity: 'high', title: 'crypto.createCipher() is deprecated — use createCipheriv() with explicit IV', description: 'Replace crypto.createCipher() with crypto.createCipheriv() which requires an explicit IV parameter.', file: fp, line: i + 1, fix: null });
|
|
2759
|
+
}
|
|
2760
|
+
}
|
|
2761
|
+
return findings;
|
|
2762
|
+
},
|
|
2763
|
+
});
|
|
2764
|
+
|
|
2765
|
+
// REL-057: Callback called multiple times
|
|
2766
|
+
rules.push({
|
|
2767
|
+
id: 'REL-057', category: 'reliability', severity: 'high', confidence: 'likely', title: 'Callback may be invoked multiple times in function',
|
|
2768
|
+
check({ files }) {
|
|
2769
|
+
const findings = [];
|
|
2770
|
+
for (const [fp, c] of files) {
|
|
2771
|
+
if (!isSourceFile(fp)) continue;
|
|
2772
|
+
const lines = c.split('\n');
|
|
2773
|
+
for (let i = 0; i < lines.length; i++) {
|
|
2774
|
+
const m = lines[i].match(/function\s*\w*\s*\([^)]*(?:callback|cb|done|next)\b/);
|
|
2775
|
+
if (m) {
|
|
2776
|
+
const body = lines.slice(i, Math.min(lines.length, i + 30)).join('\n');
|
|
2777
|
+
const cbName = (lines[i].match(/\b(callback|cb|done|next)\b/) || [])[1];
|
|
2778
|
+
if (cbName) {
|
|
2779
|
+
const calls = (body.match(new RegExp(`\\b${cbName}\\s*\\(`, 'g')) || []).length;
|
|
2780
|
+
if (calls >= 3) findings.push({ ruleId: 'REL-057', category: 'reliability', severity: 'high', title: `Callback '${cbName}' may be called multiple times — double invocation bug`, description: 'Use return after each callback invocation to prevent the function from continuing execution.', file: fp, line: i + 1, fix: null });
|
|
2781
|
+
}
|
|
2782
|
+
}
|
|
2783
|
+
}
|
|
2784
|
+
}
|
|
2785
|
+
return findings;
|
|
2786
|
+
},
|
|
2787
|
+
});
|
|
2788
|
+
|
|
2789
|
+
// REL-058: No timeout on external HTTP requests
|
|
2790
|
+
rules.push({
|
|
2791
|
+
id: 'REL-058', category: 'reliability', severity: 'high', confidence: 'likely', title: 'HTTP request to external service without timeout',
|
|
2792
|
+
check({ files }) {
|
|
2793
|
+
const findings = [];
|
|
2794
|
+
for (const [fp, c] of files) {
|
|
2795
|
+
if (!isSourceFile(fp)) continue;
|
|
2796
|
+
const lines = c.split('\n');
|
|
2797
|
+
for (let i = 0; i < lines.length; i++) {
|
|
2798
|
+
if (/axios\.(?:get|post|put|delete|request)\s*\(/.test(lines[i])) {
|
|
2799
|
+
const block = lines.slice(i, Math.min(lines.length, i + 5)).join('\n');
|
|
2800
|
+
if (!/timeout/.test(block)) findings.push({ ruleId: 'REL-058', category: 'reliability', severity: 'high', title: 'axios request without timeout — may hang indefinitely', description: 'Add a timeout option to all axios requests: axios.get(url, { timeout: 5000 }).', file: fp, line: i + 1, fix: null });
|
|
2801
|
+
}
|
|
2802
|
+
}
|
|
2803
|
+
}
|
|
2804
|
+
return findings;
|
|
2805
|
+
},
|
|
2806
|
+
});
|
|
2807
|
+
|
|
2808
|
+
// REL-059: Global state mutation in request handler
|
|
2809
|
+
rules.push({
|
|
2810
|
+
id: 'REL-059', category: 'reliability', severity: 'high', confidence: 'likely', title: 'Module-level variable mutated in request handler — shared state',
|
|
2811
|
+
check({ files }) {
|
|
2812
|
+
const findings = [];
|
|
2813
|
+
for (const [fp, c] of files) {
|
|
2814
|
+
if (!isSourceFile(fp)) continue;
|
|
2815
|
+
const topVars = [];
|
|
2816
|
+
const lines = c.split('\n');
|
|
2817
|
+
let inHandler = false;
|
|
2818
|
+
for (let i = 0; i < lines.length; i++) {
|
|
2819
|
+
const varMatch = lines[i].match(/^(?:let|var)\s+(\w+)\s*=/);
|
|
2820
|
+
if (varMatch && i < 50) topVars.push(varMatch[1]);
|
|
2821
|
+
if (/(?:router|app)\.(?:get|post|put|delete)\s*\(/.test(lines[i])) inHandler = true;
|
|
2822
|
+
if (inHandler && topVars.some(v => new RegExp(`\\b${v}\\s*(?:[+\\-*]=|=(?!=))`).test(lines[i]))) {
|
|
2823
|
+
findings.push({ ruleId: 'REL-059', category: 'reliability', severity: 'high', title: 'Module-level variable mutated in request handler — race condition under concurrent requests', description: 'Avoid mutating module-level state in request handlers. Use request-scoped variables or a proper data store.', file: fp, line: i + 1, fix: null });
|
|
2824
|
+
break;
|
|
2825
|
+
}
|
|
2826
|
+
}
|
|
2827
|
+
}
|
|
2828
|
+
return findings;
|
|
2829
|
+
},
|
|
2830
|
+
});
|
|
2831
|
+
|
|
2832
|
+
// REL-060: Missing connection error handler on Redis client
|
|
2833
|
+
rules.push({
|
|
2834
|
+
id: 'REL-060', category: 'reliability', severity: 'high', confidence: 'likely', title: 'Redis client without error event handler',
|
|
2835
|
+
check({ files, stack }) {
|
|
2836
|
+
if (!stack.dependencies?.['redis'] && !stack.dependencies?.['ioredis']) return [];
|
|
2837
|
+
const findings = [];
|
|
2838
|
+
for (const [fp, c] of files) {
|
|
2839
|
+
if (!isSourceFile(fp)) continue;
|
|
2840
|
+
if (/createClient\s*\(|new\s+Redis\s*\(/.test(c) && !/\.on\s*\(\s*['"]error['"]/.test(c)) {
|
|
2841
|
+
findings.push({ ruleId: 'REL-060', category: 'reliability', severity: 'high', title: 'Redis client without error handler — connection errors will crash process', description: 'Add client.on("error", handler) to handle Redis connection errors gracefully.', file: fp, fix: null });
|
|
2842
|
+
}
|
|
2843
|
+
}
|
|
2844
|
+
return findings;
|
|
2845
|
+
},
|
|
2846
|
+
});
|
|
2847
|
+
|
|
2848
|
+
// REL-061: Mongoose connection without error handler
|
|
2849
|
+
rules.push({
|
|
2850
|
+
id: 'REL-061', category: 'reliability', severity: 'high', confidence: 'likely', title: 'Mongoose connection without error event handler',
|
|
2851
|
+
check({ files, stack }) {
|
|
2852
|
+
if (!stack.dependencies?.['mongoose']) return [];
|
|
2853
|
+
const findings = [];
|
|
2854
|
+
for (const [fp, c] of files) {
|
|
2855
|
+
if (!isSourceFile(fp)) continue;
|
|
2856
|
+
if (/mongoose\.connect\s*\(/.test(c) && !/mongoose\.connection\.on\s*\(\s*['"]error['"]|\.catch\s*\(/.test(c)) {
|
|
2857
|
+
findings.push({ ruleId: 'REL-061', category: 'reliability', severity: 'high', title: 'mongoose.connect() without error handling', description: 'Add .catch() or mongoose.connection.on("error") to handle connection failures.', file: fp, fix: null });
|
|
2858
|
+
}
|
|
2859
|
+
}
|
|
2860
|
+
return findings;
|
|
2861
|
+
},
|
|
2862
|
+
});
|
|
2863
|
+
|
|
2864
|
+
// REL-062: Missing null check on configuration values
|
|
2865
|
+
rules.push({
|
|
2866
|
+
id: 'REL-062', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'Configuration value used without null/undefined guard',
|
|
2867
|
+
check({ files }) {
|
|
2868
|
+
const findings = [];
|
|
2869
|
+
for (const [fp, c] of files) {
|
|
2870
|
+
if (!isSourceFile(fp)) continue;
|
|
2871
|
+
const lines = c.split('\n');
|
|
2872
|
+
for (let i = 0; i < lines.length; i++) {
|
|
2873
|
+
if (/config\.\w+\.\w+/.test(lines[i]) && !/config\?\.|config\.\w+\?\./.test(lines[i]) && !/if\s*\(config|config &&/.test(lines[i])) {
|
|
2874
|
+
findings.push({ ruleId: 'REL-062', category: 'reliability', severity: 'medium', title: 'Config property access without null guard — may throw on missing config', description: 'Use optional chaining (config?.section?.key) or validate config at startup.', file: fp, line: i + 1, fix: null });
|
|
2875
|
+
break;
|
|
2876
|
+
}
|
|
2877
|
+
}
|
|
2878
|
+
}
|
|
2879
|
+
return findings;
|
|
2880
|
+
},
|
|
2881
|
+
});
|
|
2882
|
+
|
|
2883
|
+
// REL-063: Unclosed database cursor/transaction
|
|
2884
|
+
rules.push({
|
|
2885
|
+
id: 'REL-063', category: 'reliability', severity: 'high', confidence: 'likely', title: 'Database transaction started without try/finally',
|
|
2886
|
+
check({ files }) {
|
|
2887
|
+
const findings = [];
|
|
2888
|
+
for (const [fp, c] of files) {
|
|
2889
|
+
if (!isSourceFile(fp)) continue;
|
|
2890
|
+
const lines = c.split('\n');
|
|
2891
|
+
for (let i = 0; i < lines.length; i++) {
|
|
2892
|
+
if (/(?:beginTransaction|startTransaction|transaction\.begin|sequelize\.transaction)\s*\(/.test(lines[i])) {
|
|
2893
|
+
const body = lines.slice(i, Math.min(lines.length, i + 30)).join('\n');
|
|
2894
|
+
if (!/finally\s*\{/.test(body)) findings.push({ ruleId: 'REL-063', category: 'reliability', severity: 'high', title: 'Database transaction without try/finally — may not rollback on error', description: 'Wrap transactions in try/catch/finally and call rollback() in the finally block.', file: fp, line: i + 1, fix: null });
|
|
2895
|
+
}
|
|
2896
|
+
}
|
|
2897
|
+
}
|
|
2898
|
+
return findings;
|
|
2899
|
+
},
|
|
2900
|
+
});
|
|
2901
|
+
|
|
2902
|
+
// REL-064: Missing fallback value for external service response
|
|
2903
|
+
rules.push({
|
|
2904
|
+
id: 'REL-064', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'External API response used without fallback/default',
|
|
2905
|
+
check({ files }) {
|
|
2906
|
+
const findings = [];
|
|
2907
|
+
for (const [fp, c] of files) {
|
|
2908
|
+
if (!isSourceFile(fp)) continue;
|
|
2909
|
+
const lines = c.split('\n');
|
|
2910
|
+
for (let i = 0; i < lines.length; i++) {
|
|
2911
|
+
if (/const\s+\{[^}]+\}\s*=\s*(?:await\s+)?axios\./.test(lines[i]) || /const\s+\w+\s*=\s*(?:await\s+)?axios\./.test(lines[i])) {
|
|
2912
|
+
const block = lines.slice(i, Math.min(lines.length, i + 5)).join('\n');
|
|
2913
|
+
if (!/\|\||fallback|default|catch|try/.test(block)) findings.push({ ruleId: 'REL-064', category: 'reliability', severity: 'medium', title: 'External service response without fallback — may fail without graceful degradation', description: 'Add default values or fallback behavior for external API responses.', file: fp, line: i + 1, fix: null });
|
|
2914
|
+
}
|
|
2915
|
+
}
|
|
2916
|
+
}
|
|
2917
|
+
return findings;
|
|
2918
|
+
},
|
|
2919
|
+
});
|
|
2920
|
+
|
|
2921
|
+
// REL-065: Missing port conflict handling
|
|
2922
|
+
rules.push({
|
|
2923
|
+
id: 'REL-065', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'Server listen() without EADDRINUSE error handler',
|
|
2924
|
+
check({ files }) {
|
|
2925
|
+
const findings = [];
|
|
2926
|
+
for (const [fp, c] of files) {
|
|
2927
|
+
if (!isSourceFile(fp)) continue;
|
|
2928
|
+
if (/\.listen\s*\(/.test(c) && !/EADDRINUSE|address already in use/i.test(c)) {
|
|
2929
|
+
findings.push({ ruleId: 'REL-065', category: 'reliability', severity: 'medium', title: 'server.listen() without EADDRINUSE handler — port conflicts cause unhandled crash', description: 'Add server.on("error") handler to detect EADDRINUSE and either retry on another port or exit cleanly.', file: fp, fix: null });
|
|
2930
|
+
}
|
|
2931
|
+
}
|
|
2932
|
+
return findings;
|
|
2933
|
+
},
|
|
2934
|
+
});
|
|
2935
|
+
|
|
2936
|
+
// REL-066: Child process spawn without error handling
|
|
2937
|
+
rules.push({
|
|
2938
|
+
id: 'REL-066', category: 'reliability', severity: 'high', confidence: 'likely', title: 'Child process without error event handler',
|
|
2939
|
+
check({ files }) {
|
|
2940
|
+
const findings = [];
|
|
2941
|
+
for (const [fp, c] of files) {
|
|
2942
|
+
if (!isSourceFile(fp)) continue;
|
|
2943
|
+
const lines = c.split('\n');
|
|
2944
|
+
for (let i = 0; i < lines.length; i++) {
|
|
2945
|
+
if (/(?:spawn|fork|execFile)\s*\(/.test(lines[i])) {
|
|
2946
|
+
const block = lines.slice(i, Math.min(lines.length, i + 10)).join('\n');
|
|
2947
|
+
if (!/\.on\s*\(\s*['"]error['"]|\.catch\s*\(/.test(block)) findings.push({ ruleId: 'REL-066', category: 'reliability', severity: 'high', title: 'Child process without error handler — spawn errors are silently ignored', description: 'Add child.on("error") handler to detect spawn failures.', file: fp, line: i + 1, fix: null });
|
|
2948
|
+
}
|
|
2949
|
+
}
|
|
2950
|
+
}
|
|
2951
|
+
return findings;
|
|
2952
|
+
},
|
|
2953
|
+
});
|
|
2954
|
+
|
|
2955
|
+
// REL-067: Race condition with shared file access
|
|
2956
|
+
rules.push({
|
|
2957
|
+
id: 'REL-067', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'File existence check followed by operation — TOCTOU race condition',
|
|
2958
|
+
check({ files }) {
|
|
2959
|
+
const findings = [];
|
|
2960
|
+
for (const [fp, c] of files) {
|
|
2961
|
+
if (!isSourceFile(fp)) continue;
|
|
2962
|
+
const lines = c.split('\n');
|
|
2963
|
+
for (let i = 0; i < lines.length; i++) {
|
|
2964
|
+
if (/fs\.(?:existsSync|access(?:Sync)?)\s*\(/.test(lines[i])) {
|
|
2965
|
+
const block = lines.slice(i, Math.min(lines.length, i + 5)).join('\n');
|
|
2966
|
+
if (/fs\.(?:readFile|writeFile|unlink|rename)/.test(block)) findings.push({ ruleId: 'REL-067', category: 'reliability', severity: 'medium', title: 'Check-then-act file pattern — TOCTOU race condition', description: 'Avoid check-then-act patterns with files. Use try/catch on the operation directly instead of pre-checking existence.', file: fp, line: i + 1, fix: null });
|
|
2967
|
+
}
|
|
2968
|
+
}
|
|
2969
|
+
}
|
|
2970
|
+
return findings;
|
|
2971
|
+
},
|
|
2972
|
+
});
|
|
2973
|
+
|
|
2974
|
+
// REL-068: Lack of idempotency in critical operations
|
|
2975
|
+
rules.push({
|
|
2976
|
+
id: 'REL-068', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'Payment/billing operation without idempotency key',
|
|
2977
|
+
check({ files }) {
|
|
2978
|
+
const findings = [];
|
|
2979
|
+
for (const [fp, c] of files) {
|
|
2980
|
+
if (!isSourceFile(fp)) continue;
|
|
2981
|
+
if (/stripe\.|paypal\.|braintree\./i.test(c) && !/idempotency|idempotencyKey|idempotent/i.test(c)) {
|
|
2982
|
+
findings.push({ ruleId: 'REL-068', category: 'reliability', severity: 'medium', title: 'Payment API calls without idempotency key — duplicate charges on retry', description: 'Pass idempotency keys to payment API calls to prevent duplicate transactions on network retries.', file: fp, fix: null });
|
|
2983
|
+
}
|
|
2984
|
+
}
|
|
2985
|
+
return findings;
|
|
2986
|
+
},
|
|
2987
|
+
});
|
|
2988
|
+
|
|
2989
|
+
// REL-069: Missing dead letter queue configuration
|
|
2990
|
+
rules.push({
|
|
2991
|
+
id: 'REL-069', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'Message queue consumer without dead letter queue',
|
|
2992
|
+
check({ files }) {
|
|
2993
|
+
const findings = [];
|
|
2994
|
+
for (const [fp, c] of files) {
|
|
2995
|
+
if (!isSourceFile(fp)) continue;
|
|
2996
|
+
if (/(?:sqs|rabbitmq|amqp|kafka)\b/i.test(c) && /consume|subscribe|consumer/i.test(c) && !/dead.?letter|dlq|DLQ|deadletter/i.test(c)) {
|
|
2997
|
+
findings.push({ ruleId: 'REL-069', category: 'reliability', severity: 'medium', title: 'Message queue consumer without dead letter queue configuration', description: 'Configure a dead letter queue for failed messages to prevent infinite retry loops and data loss.', file: fp, fix: null });
|
|
2998
|
+
}
|
|
2999
|
+
}
|
|
3000
|
+
return findings;
|
|
3001
|
+
},
|
|
3002
|
+
});
|
|
3003
|
+
|
|
3004
|
+
// REL-070: Missing input validation on webhook payloads
|
|
3005
|
+
rules.push({
|
|
3006
|
+
id: 'REL-070', category: 'reliability', severity: 'high', confidence: 'likely', title: 'Webhook handler without payload signature verification',
|
|
3007
|
+
check({ files }) {
|
|
3008
|
+
const findings = [];
|
|
3009
|
+
for (const [fp, c] of files) {
|
|
3010
|
+
if (!isSourceFile(fp)) continue;
|
|
3011
|
+
const lines = c.split('\n');
|
|
3012
|
+
for (let i = 0; i < lines.length; i++) {
|
|
3013
|
+
if (/(?:router|app)\.post\s*\([^)]*webhook/i.test(lines[i])) {
|
|
3014
|
+
const body = lines.slice(i, Math.min(lines.length, i + 20)).join('\n');
|
|
3015
|
+
if (!/signature|hmac|secret|x-hub-signature|stripe-signature|verify/i.test(body)) findings.push({ ruleId: 'REL-070', category: 'reliability', severity: 'high', title: 'Webhook endpoint without signature verification', description: 'Verify webhook payloads using HMAC signatures to prevent spoofed events.', file: fp, line: i + 1, fix: null });
|
|
3016
|
+
}
|
|
3017
|
+
}
|
|
3018
|
+
}
|
|
3019
|
+
return findings;
|
|
3020
|
+
},
|
|
3021
|
+
});
|
|
3022
|
+
|
|
3023
|
+
// REL-071: Missing health check dependencies
|
|
3024
|
+
rules.push({
|
|
3025
|
+
id: 'REL-071', category: 'reliability', severity: 'medium', confidence: 'likely', title: 'Health check endpoint does not check DB/cache connectivity',
|
|
3026
|
+
check({ files }) {
|
|
3027
|
+
const findings = [];
|
|
3028
|
+
for (const [fp, c] of files) {
|
|
3029
|
+
if (!isSourceFile(fp)) continue;
|
|
3030
|
+
const lines = c.split('\n');
|
|
3031
|
+
for (let i = 0; i < lines.length; i++) {
|
|
3032
|
+
if (/(?:router|app)\.get\s*\([^)]*(?:health|ping|status)['"]/i.test(lines[i])) {
|
|
3033
|
+
const body = lines.slice(i, Math.min(lines.length, i + 15)).join('\n');
|
|
3034
|
+
if (!/db\.|mongoose\.|redis\.|sequelize\.|pool\.|ping/i.test(body)) findings.push({ ruleId: 'REL-071', category: 'reliability', severity: 'medium', title: 'Health check endpoint does not verify dependency connectivity', description: 'Include database and cache connectivity checks in health endpoints for accurate load balancer health status.', file: fp, line: i + 1, fix: null });
|
|
3035
|
+
}
|
|
3036
|
+
}
|
|
3037
|
+
}
|
|
3038
|
+
return findings;
|
|
3039
|
+
},
|
|
3040
|
+
});
|