getdoorman 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (123) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +181 -0
  3. package/bin/doorman.js +444 -0
  4. package/package.json +74 -0
  5. package/src/ai-fixer.js +559 -0
  6. package/src/ast-scanner.js +434 -0
  7. package/src/auth.js +149 -0
  8. package/src/baseline.js +48 -0
  9. package/src/compliance.js +539 -0
  10. package/src/config.js +466 -0
  11. package/src/custom-rules.js +32 -0
  12. package/src/dashboard.js +202 -0
  13. package/src/detector.js +142 -0
  14. package/src/fix-engine.js +48 -0
  15. package/src/fix-registry-extra.js +95 -0
  16. package/src/fix-registry-go-rust.js +77 -0
  17. package/src/fix-registry-java-csharp.js +77 -0
  18. package/src/fix-registry-js.js +99 -0
  19. package/src/fix-registry-mcp-ai.js +57 -0
  20. package/src/fix-registry-python.js +87 -0
  21. package/src/fixer-ruby-php.js +608 -0
  22. package/src/fixer.js +2113 -0
  23. package/src/hooks.js +115 -0
  24. package/src/ignore.js +176 -0
  25. package/src/index.js +384 -0
  26. package/src/metrics.js +126 -0
  27. package/src/monorepo.js +65 -0
  28. package/src/presets.js +54 -0
  29. package/src/reporter.js +975 -0
  30. package/src/rule-worker.js +36 -0
  31. package/src/rules/ast-rules.js +756 -0
  32. package/src/rules/bugs/accessibility.js +235 -0
  33. package/src/rules/bugs/ai-codegen-fixable.js +172 -0
  34. package/src/rules/bugs/ai-codegen.js +365 -0
  35. package/src/rules/bugs/code-smell-bugs.js +247 -0
  36. package/src/rules/bugs/crypto-bugs.js +195 -0
  37. package/src/rules/bugs/docker-bugs.js +158 -0
  38. package/src/rules/bugs/general.js +361 -0
  39. package/src/rules/bugs/go-bugs.js +279 -0
  40. package/src/rules/bugs/index.js +73 -0
  41. package/src/rules/bugs/js-api.js +257 -0
  42. package/src/rules/bugs/js-array-object.js +210 -0
  43. package/src/rules/bugs/js-async-fixable.js +223 -0
  44. package/src/rules/bugs/js-async.js +211 -0
  45. package/src/rules/bugs/js-closure-scope.js +182 -0
  46. package/src/rules/bugs/js-database.js +203 -0
  47. package/src/rules/bugs/js-error-handling.js +148 -0
  48. package/src/rules/bugs/js-logic.js +261 -0
  49. package/src/rules/bugs/js-memory.js +214 -0
  50. package/src/rules/bugs/js-node.js +361 -0
  51. package/src/rules/bugs/js-react.js +373 -0
  52. package/src/rules/bugs/js-regex.js +200 -0
  53. package/src/rules/bugs/js-state.js +272 -0
  54. package/src/rules/bugs/js-type-coercion.js +318 -0
  55. package/src/rules/bugs/nextjs-bugs.js +242 -0
  56. package/src/rules/bugs/nextjs-fixable.js +120 -0
  57. package/src/rules/bugs/node-fixable.js +178 -0
  58. package/src/rules/bugs/python-advanced.js +245 -0
  59. package/src/rules/bugs/python-fixable.js +98 -0
  60. package/src/rules/bugs/python.js +284 -0
  61. package/src/rules/bugs/react-fixable.js +207 -0
  62. package/src/rules/bugs/ruby-bugs.js +182 -0
  63. package/src/rules/bugs/shell-bugs.js +181 -0
  64. package/src/rules/bugs/silent-failures.js +261 -0
  65. package/src/rules/bugs/ts-bugs.js +235 -0
  66. package/src/rules/bugs/unused-vars.js +65 -0
  67. package/src/rules/compliance/accessibility-ext.js +468 -0
  68. package/src/rules/compliance/education.js +322 -0
  69. package/src/rules/compliance/financial.js +421 -0
  70. package/src/rules/compliance/frameworks.js +507 -0
  71. package/src/rules/compliance/healthcare.js +520 -0
  72. package/src/rules/compliance/index.js +2714 -0
  73. package/src/rules/compliance/regional-eu.js +480 -0
  74. package/src/rules/compliance/regional-international.js +903 -0
  75. package/src/rules/cost/index.js +1993 -0
  76. package/src/rules/data/index.js +2503 -0
  77. package/src/rules/dependencies/index.js +1684 -0
  78. package/src/rules/deployment/index.js +2050 -0
  79. package/src/rules/index.js +71 -0
  80. package/src/rules/infrastructure/index.js +3048 -0
  81. package/src/rules/performance/index.js +3455 -0
  82. package/src/rules/quality/index.js +3175 -0
  83. package/src/rules/reliability/index.js +3040 -0
  84. package/src/rules/scope-rules.js +815 -0
  85. package/src/rules/security/ai-api.js +1177 -0
  86. package/src/rules/security/auth.js +1328 -0
  87. package/src/rules/security/cors.js +127 -0
  88. package/src/rules/security/crypto.js +527 -0
  89. package/src/rules/security/csharp.js +862 -0
  90. package/src/rules/security/csrf.js +193 -0
  91. package/src/rules/security/dart.js +835 -0
  92. package/src/rules/security/deserialization.js +291 -0
  93. package/src/rules/security/file-upload.js +187 -0
  94. package/src/rules/security/go.js +850 -0
  95. package/src/rules/security/headers.js +235 -0
  96. package/src/rules/security/index.js +65 -0
  97. package/src/rules/security/injection.js +1639 -0
  98. package/src/rules/security/mcp-server.js +71 -0
  99. package/src/rules/security/misconfiguration.js +660 -0
  100. package/src/rules/security/oauth-jwt.js +329 -0
  101. package/src/rules/security/path-traversal.js +295 -0
  102. package/src/rules/security/php.js +1054 -0
  103. package/src/rules/security/prototype-pollution.js +283 -0
  104. package/src/rules/security/rate-limiting.js +208 -0
  105. package/src/rules/security/ruby.js +1061 -0
  106. package/src/rules/security/rust.js +693 -0
  107. package/src/rules/security/secrets.js +747 -0
  108. package/src/rules/security/shell.js +647 -0
  109. package/src/rules/security/ssrf.js +298 -0
  110. package/src/rules/security/supply-chain-advanced.js +393 -0
  111. package/src/rules/security/supply-chain.js +734 -0
  112. package/src/rules/security/swift.js +835 -0
  113. package/src/rules/security/taint.js +27 -0
  114. package/src/rules/security/xss.js +520 -0
  115. package/src/scan-cache.js +71 -0
  116. package/src/scanner.js +710 -0
  117. package/src/scope-analyzer.js +685 -0
  118. package/src/share.js +88 -0
  119. package/src/taint.js +300 -0
  120. package/src/telemetry.js +183 -0
  121. package/src/tracer.js +190 -0
  122. package/src/upload.js +35 -0
  123. package/src/worker.js +31 -0
@@ -0,0 +1,3455 @@
1
+ const JS_EXTENSIONS = ['.js', '.jsx', '.ts', '.tsx', '.mjs', '.cjs'];
2
+ function isSourceFile(f) { return JS_EXTENSIONS.some(ext => f.endsWith(ext)); }
3
+
4
+ const rules = [
5
+ // PERF-001: SELECT * usage
6
+ {
7
+ id: 'PERF-001',
8
+ category: 'performance',
9
+ severity: 'medium',
10
+ confidence: 'likely',
11
+ title: 'SELECT * in database query',
12
+ check({ files }) {
13
+ const findings = [];
14
+ for (const [filepath, content] of files) {
15
+ if (!isSourceFile(filepath)) continue;
16
+ const lines = content.split('\n');
17
+ for (let i = 0; i < lines.length; i++) {
18
+ if (lines[i].match(/SELECT\s+\*/i) && !lines[i].match(/COUNT\s*\(\s*\*/i)) {
19
+ findings.push({
20
+ ruleId: 'PERF-001', category: 'performance', severity: 'medium',
21
+ title: 'SELECT * fetches all columns — specify only needed columns',
22
+ file: filepath, line: i + 1, fix: null,
23
+ });
24
+ }
25
+ }
26
+ }
27
+ return findings;
28
+ },
29
+ },
30
+
31
+ // PERF-002: No pagination on list endpoints
32
+ {
33
+ id: 'PERF-002',
34
+ category: 'performance',
35
+ severity: 'high',
36
+ confidence: 'likely',
37
+ title: 'No Pagination on API Endpoint',
38
+ check({ files }) {
39
+ const findings = [];
40
+ for (const [filepath, content] of files) {
41
+ if (!isSourceFile(filepath)) continue;
42
+ // Look for findMany/find/select without take/limit
43
+ if (content.match(/\.findMany\s*\(\s*\)/) ||
44
+ content.match(/\.find\s*\(\s*\{\s*\}\s*\)/) ||
45
+ content.match(/\.find\s*\(\s*\)/)) {
46
+ if (!content.includes('take:') && !content.includes('limit') && !content.includes('pagination')) {
47
+ findings.push({
48
+ ruleId: 'PERF-002', category: 'performance', severity: 'high',
49
+ title: 'Database query returns all rows without pagination',
50
+ description: 'Add pagination (limit/offset or cursor-based) to prevent loading entire tables.',
51
+ file: filepath, fix: null,
52
+ });
53
+ }
54
+ }
55
+ }
56
+ return findings;
57
+ },
58
+ },
59
+
60
+ // PERF-003: Synchronous file operations
61
+ {
62
+ id: 'PERF-003',
63
+ category: 'performance',
64
+ severity: 'medium',
65
+ confidence: 'likely',
66
+ title: 'Synchronous File I/O',
67
+ check({ files }) {
68
+ const findings = [];
69
+ const syncOps = ['readFileSync', 'writeFileSync', 'existsSync', 'mkdirSync', 'readdirSync', 'statSync'];
70
+ for (const [filepath, content] of files) {
71
+ if (!isSourceFile(filepath)) continue;
72
+ // Skip config files and CLI tools where sync is fine
73
+ if (filepath.includes('config') || filepath.includes('bin/') || filepath.includes('scripts/')) continue;
74
+
75
+ const lines = content.split('\n');
76
+ for (let i = 0; i < lines.length; i++) {
77
+ for (const op of syncOps) {
78
+ if (lines[i].includes(op) && !lines[i].trim().startsWith('//')) {
79
+ findings.push({
80
+ ruleId: 'PERF-003', category: 'performance', severity: 'medium',
81
+ title: `Sync file operation (${op}) blocks the event loop`,
82
+ description: 'Use async alternatives (readFile, writeFile, etc.) in request handlers.',
83
+ file: filepath, line: i + 1, fix: null,
84
+ });
85
+ break;
86
+ }
87
+ }
88
+ }
89
+ }
90
+ return findings;
91
+ },
92
+ },
93
+
94
+ // PERF-004: No caching for API responses
95
+ {
96
+ id: 'PERF-004',
97
+ category: 'performance',
98
+ severity: 'medium',
99
+ confidence: 'likely',
100
+ title: 'No Response Caching',
101
+ check({ files, stack }) {
102
+ const findings = [];
103
+ if (stack.runtime !== 'node') return findings;
104
+
105
+ const hasCaching = Object.keys({ ...stack.dependencies, ...stack.devDependencies }).some(dep =>
106
+ ['redis', 'ioredis', 'node-cache', 'lru-cache', 'keyv', '@upstash/redis'].includes(dep)
107
+ );
108
+
109
+ if (!hasCaching) {
110
+ const hasApiRoutes = [...files.keys()].some(f => f.includes('/api/') || f.includes('routes'));
111
+ if (hasApiRoutes) {
112
+ findings.push({
113
+ ruleId: 'PERF-004', category: 'performance', severity: 'medium',
114
+ title: 'No caching library detected for API responses',
115
+ description: 'Add caching (Redis, LRU cache, etc.) to reduce database load and improve response times.',
116
+ fix: null,
117
+ });
118
+ }
119
+ }
120
+ return findings;
121
+ },
122
+ },
123
+
124
+ // PERF-005: Large bundle / no code splitting
125
+ {
126
+ id: 'PERF-005',
127
+ category: 'performance',
128
+ severity: 'medium',
129
+ confidence: 'likely',
130
+ title: 'No Code Splitting / Lazy Loading',
131
+ check({ files, stack }) {
132
+ const findings = [];
133
+ if (!['nextjs', 'react', 'vue', 'sveltekit'].includes(stack.framework)) return findings;
134
+
135
+ const hasLazyLoad = [...files.values()].some(content =>
136
+ content.includes('React.lazy') ||
137
+ content.includes('dynamic(') ||
138
+ content.includes('defineAsyncComponent') ||
139
+ content.includes('import(')
140
+ );
141
+
142
+ if (!hasLazyLoad) {
143
+ findings.push({
144
+ ruleId: 'PERF-005', category: 'performance', severity: 'medium',
145
+ title: 'No lazy loading / code splitting detected',
146
+ description: 'Large apps should split code into chunks. Use dynamic imports or React.lazy() for routes.',
147
+ fix: null,
148
+ });
149
+ }
150
+ return findings;
151
+ },
152
+ },
153
+
154
+ // PERF-006: No image optimization
155
+ {
156
+ id: 'PERF-006',
157
+ category: 'performance',
158
+ severity: 'low',
159
+ confidence: 'suggestion',
160
+ title: 'Unoptimized Images',
161
+ check({ files, stack }) {
162
+ const findings = [];
163
+ if (stack.framework !== 'nextjs' && stack.framework !== 'react') return findings;
164
+
165
+ let usesImgTag = false;
166
+ let usesNextImage = false;
167
+
168
+ for (const [filepath, content] of files) {
169
+ if (!filepath.match(/\.(jsx|tsx)$/)) continue;
170
+ if (content.includes('<img ')) usesImgTag = true;
171
+ if (content.includes('next/image') || content.includes('<Image')) usesNextImage = true;
172
+ }
173
+
174
+ if (usesImgTag && !usesNextImage && stack.framework === 'nextjs') {
175
+ findings.push({
176
+ ruleId: 'PERF-006', category: 'performance', severity: 'low',
177
+ title: 'Using <img> instead of Next.js <Image> component',
178
+ description: 'The Next.js Image component auto-optimizes images (WebP, lazy loading, sizing).',
179
+ fix: null,
180
+ });
181
+ }
182
+ return findings;
183
+ },
184
+ },
185
+
186
+ // PERF-007: No database connection pooling
187
+ {
188
+ id: 'PERF-007',
189
+ category: 'performance',
190
+ severity: 'high',
191
+ confidence: 'likely',
192
+ title: 'No Database Connection Pooling',
193
+ check({ files, stack }) {
194
+ const findings = [];
195
+ if (stack.orm === 'prisma') return findings; // Prisma handles pooling
196
+
197
+ for (const [filepath, content] of files) {
198
+ if (!isSourceFile(filepath)) continue;
199
+ // Creating new connections per request
200
+ if (content.match(/new\s+(?:Client|Pool|Connection)\s*\(/) &&
201
+ (filepath.includes('api/') || filepath.includes('route') || filepath.includes('handler'))) {
202
+ findings.push({
203
+ ruleId: 'PERF-007', category: 'performance', severity: 'high',
204
+ title: 'Database connection created per-request instead of using a pool',
205
+ description: 'Create a shared connection pool instead of new connections per request.',
206
+ file: filepath, fix: null,
207
+ });
208
+ }
209
+ }
210
+ return findings;
211
+ },
212
+ },
213
+
214
+ // PERF-LOAD-001: No load testing framework detected
215
+ {
216
+ id: 'PERF-LOAD-001',
217
+ category: 'performance',
218
+ severity: 'medium',
219
+ confidence: 'likely',
220
+ title: 'No Load Testing Framework Detected',
221
+ check({ files, stack }) {
222
+ const findings = [];
223
+ const allDeps = { ...stack.dependencies, ...stack.devDependencies };
224
+ const loadTestLibs = ['k6', 'artillery', 'locust', 'gatling', 'jmeter', 'autocannon', 'loadtest', 'vegeta', 'wrk'];
225
+ const hasLoadTestDep = loadTestLibs.some(lib => lib in allDeps);
226
+
227
+ if (!hasLoadTestDep) {
228
+ const loadTestConfigs = ['k6.js', 'k6.config.js', 'artillery.yml', 'artillery.yaml', 'artillery.json', 'locustfile.py'];
229
+ const hasLoadTestConfig = [...files.keys()].some(f => loadTestConfigs.some(cfg => f.endsWith(cfg)));
230
+
231
+ if (!hasLoadTestConfig) {
232
+ // Only flag if the project has a server
233
+ const hasServer = [...files.values()].some(content =>
234
+ content.includes('.listen(') || content.includes('createServer')
235
+ );
236
+ if (hasServer) {
237
+ findings.push({
238
+ ruleId: 'PERF-LOAD-001', category: 'performance', severity: 'medium',
239
+ title: 'No load testing framework detected',
240
+ description: 'Add a load testing tool (k6, artillery, autocannon, etc.) to validate performance under realistic traffic.',
241
+ fix: null,
242
+ });
243
+ }
244
+ }
245
+ }
246
+ return findings;
247
+ },
248
+ },
249
+
250
+ // PERF-LOAD-002: No stress test scripts
251
+ {
252
+ id: 'PERF-LOAD-002',
253
+ category: 'performance',
254
+ severity: 'medium',
255
+ confidence: 'likely',
256
+ title: 'No Stress Test Scripts',
257
+ check({ files, stack }) {
258
+ const findings = [];
259
+ const scripts = stack.scripts || {};
260
+ const stressKeywords = ['load', 'stress', 'perf', 'benchmark'];
261
+ const hasStressScript = Object.keys(scripts).some(name =>
262
+ stressKeywords.some(kw => name.toLowerCase().includes(kw))
263
+ );
264
+
265
+ if (!hasStressScript) {
266
+ const hasStressFile = [...files.keys()].some(f => {
267
+ const lower = f.toLowerCase();
268
+ return stressKeywords.some(kw =>
269
+ (lower.includes('test') || lower.includes('spec')) &&
270
+ (lower.includes(kw) || lower.includes('performance'))
271
+ );
272
+ });
273
+
274
+ if (!hasStressFile) {
275
+ const hasServer = [...files.values()].some(content =>
276
+ content.includes('.listen(') || content.includes('createServer')
277
+ );
278
+ if (hasServer) {
279
+ findings.push({
280
+ ruleId: 'PERF-LOAD-002', category: 'performance', severity: 'medium',
281
+ title: 'No stress/load test scripts found',
282
+ description: 'Add npm scripts (e.g. "test:load", "test:stress") and test files for load and performance testing.',
283
+ fix: null,
284
+ });
285
+ }
286
+ }
287
+ }
288
+ return findings;
289
+ },
290
+ },
291
+
292
+ // PERF-LOAD-003: No connection pool limits
293
+ {
294
+ id: 'PERF-LOAD-003',
295
+ category: 'performance',
296
+ severity: 'high',
297
+ confidence: 'likely',
298
+ title: 'No Connection Pool Limits',
299
+ check({ files }) {
300
+ const findings = [];
301
+ for (const [filepath, content] of files) {
302
+ if (!isSourceFile(filepath)) continue;
303
+ const lines = content.split('\n');
304
+ for (let i = 0; i < lines.length; i++) {
305
+ const line = lines[i];
306
+ if (line.match(/createPool\s*\(/) || line.match(/createConnection\s*\(/)) {
307
+ // Scan a window around the call for pool limit settings
308
+ const windowStart = Math.max(0, i - 2);
309
+ const windowEnd = Math.min(lines.length - 1, i + 15);
310
+ const window = lines.slice(windowStart, windowEnd + 1).join('\n');
311
+ const hasLimits = /\b(max|connectionLimit|pool\.max|maxConnections|poolSize)\b/.test(window);
312
+ if (!hasLimits) {
313
+ findings.push({
314
+ ruleId: 'PERF-LOAD-003', category: 'performance', severity: 'high',
315
+ title: 'Database pool/connection created without max connection limits',
316
+ description: 'Set max/connectionLimit/pool.max to prevent unbounded connections under load.',
317
+ file: filepath, line: i + 1, fix: null,
318
+ });
319
+ }
320
+ }
321
+ }
322
+ }
323
+ return findings;
324
+ },
325
+ },
326
+
327
+ // PERF-LOAD-004: No request timeout configured
328
+ {
329
+ id: 'PERF-LOAD-004',
330
+ category: 'performance',
331
+ severity: 'high',
332
+ confidence: 'likely',
333
+ title: 'No Request Timeout Configured',
334
+ check({ files }) {
335
+ const findings = [];
336
+ for (const [filepath, content] of files) {
337
+ if (!isSourceFile(filepath)) continue;
338
+ if (content.includes('.listen(') || content.includes('createServer')) {
339
+ const hasTimeout = content.includes('server.timeout') ||
340
+ content.includes('requestTimeout') ||
341
+ content.includes('timeout(') ||
342
+ content.includes('connect-timeout') ||
343
+ content.includes('express-timeout') ||
344
+ content.includes('setTimeout(') && content.includes('server');
345
+ if (!hasTimeout) {
346
+ findings.push({
347
+ ruleId: 'PERF-LOAD-004', category: 'performance', severity: 'high',
348
+ title: 'Server has no request timeout configured',
349
+ description: 'Set server.timeout or use timeout middleware to prevent slow requests from exhausting resources.',
350
+ file: filepath, fix: null,
351
+ });
352
+ }
353
+ }
354
+ }
355
+ return findings;
356
+ },
357
+ },
358
+
359
+ // PERF-LOAD-005: No rate limiting under load
360
+ {
361
+ id: 'PERF-LOAD-005',
362
+ category: 'performance',
363
+ severity: 'medium',
364
+ confidence: 'likely',
365
+ title: 'No Rate Limiting Under Load',
366
+ check({ files, stack }) {
367
+ const findings = [];
368
+ const allDeps = { ...stack.dependencies, ...stack.devDependencies };
369
+ const rateLimitLibs = [
370
+ 'express-rate-limit', 'rate-limiter-flexible', 'koa-ratelimit',
371
+ 'fastify-rate-limit', '@fastify/rate-limit', 'express-slow-down',
372
+ 'bottleneck', 'p-throttle', 'limiter',
373
+ ];
374
+ const hasRateLimitDep = rateLimitLibs.some(lib => lib in allDeps);
375
+
376
+ if (!hasRateLimitDep) {
377
+ const hasRateLimitCode = [...files.values()].some(content =>
378
+ content.includes('rateLimit') || content.includes('rate-limit') ||
379
+ content.includes('rateLimiter') || content.includes('throttle')
380
+ );
381
+ if (!hasRateLimitCode) {
382
+ const hasApiRoutes = [...files.entries()].some(([f, c]) =>
383
+ (f.includes('/api/') || f.includes('route')) && isSourceFile(f)
384
+ );
385
+ if (hasApiRoutes) {
386
+ findings.push({
387
+ ruleId: 'PERF-LOAD-005', category: 'performance', severity: 'medium',
388
+ title: 'No rate limiting detected on API routes',
389
+ description: 'Add rate limiting (express-rate-limit, rate-limiter-flexible, etc.) to protect against abuse and overload.',
390
+ fix: null,
391
+ });
392
+ }
393
+ }
394
+ }
395
+ return findings;
396
+ },
397
+ },
398
+
399
+ // PERF-LOAD-006: No graceful shutdown
400
+ {
401
+ id: 'PERF-LOAD-006',
402
+ category: 'performance',
403
+ severity: 'high',
404
+ confidence: 'likely',
405
+ title: 'No Graceful Shutdown',
406
+ check({ files }) {
407
+ const findings = [];
408
+ for (const [filepath, content] of files) {
409
+ if (!isSourceFile(filepath)) continue;
410
+ if (content.includes('.listen(') || content.includes('createServer')) {
411
+ const hasGracefulShutdown =
412
+ content.includes("'SIGTERM'") || content.includes('"SIGTERM"') ||
413
+ content.includes("'SIGINT'") || content.includes('"SIGINT"') ||
414
+ content.includes('graceful') || content.includes('shutdown');
415
+ if (!hasGracefulShutdown) {
416
+ findings.push({
417
+ ruleId: 'PERF-LOAD-006', category: 'performance', severity: 'high',
418
+ title: 'Server has no graceful shutdown handling',
419
+ description: 'Handle SIGTERM/SIGINT to close connections gracefully and avoid dropped requests during deploys.',
420
+ file: filepath, fix: null,
421
+ });
422
+ }
423
+ }
424
+ }
425
+ return findings;
426
+ },
427
+ },
428
+
429
+ // PERF-LOAD-007: No health check endpoint
430
+ {
431
+ id: 'PERF-LOAD-007',
432
+ category: 'performance',
433
+ severity: 'medium',
434
+ confidence: 'likely',
435
+ title: 'No Health Check Endpoint',
436
+ check({ files }) {
437
+ const findings = [];
438
+ const hasHealthEndpoint = [...files.values()].some(content =>
439
+ content.includes('/health') || content.includes('/healthz') ||
440
+ content.includes('/readiness') || content.includes('/liveness') ||
441
+ content.includes('/ready') || content.includes('/alive')
442
+ );
443
+
444
+ if (!hasHealthEndpoint) {
445
+ const hasServer = [...files.entries()].some(([f, c]) =>
446
+ isSourceFile(f) && (c.includes('.listen(') || c.includes('createServer'))
447
+ );
448
+ if (hasServer) {
449
+ findings.push({
450
+ ruleId: 'PERF-LOAD-007', category: 'performance', severity: 'medium',
451
+ title: 'No health check endpoint detected',
452
+ description: 'Add /health or /healthz endpoint for load balancer and orchestrator readiness checks.',
453
+ fix: null,
454
+ });
455
+ }
456
+ }
457
+ return findings;
458
+ },
459
+ },
460
+
461
+ // PERF-LOAD-008: Unbounded concurrent connections
462
+ {
463
+ id: 'PERF-LOAD-008',
464
+ category: 'performance',
465
+ severity: 'high',
466
+ confidence: 'likely',
467
+ title: 'Unbounded Concurrent Connections',
468
+ check({ files }) {
469
+ const findings = [];
470
+ for (const [filepath, content] of files) {
471
+ if (!isSourceFile(filepath)) continue;
472
+ if (content.includes('.listen(') || content.includes('createServer')) {
473
+ const hasBounds = content.includes('maxConnections') ||
474
+ content.includes('maxHeadersCount') ||
475
+ content.includes('maxRequestsPerSocket') ||
476
+ content.includes('maxSockets');
477
+ if (!hasBounds) {
478
+ findings.push({
479
+ ruleId: 'PERF-LOAD-008', category: 'performance', severity: 'high',
480
+ title: 'Server allows unbounded concurrent connections',
481
+ description: 'Set server.maxConnections or maxHeadersCount to prevent resource exhaustion under heavy load.',
482
+ file: filepath, fix: null,
483
+ });
484
+ }
485
+ }
486
+ }
487
+ return findings;
488
+ },
489
+ },
490
+
491
+ // PERF-LOAD-009: No memory leak detection
492
+ {
493
+ id: 'PERF-LOAD-009',
494
+ category: 'performance',
495
+ severity: 'medium',
496
+ confidence: 'definite',
497
+ title: 'No Memory Leak Detection',
498
+ check({ files, stack }) {
499
+ const findings = [];
500
+ const allDeps = { ...stack.dependencies, ...stack.devDependencies };
501
+ const memLibs = ['heapdump', 'v8-profiler', 'clinic', 'memwatch', 'node-memwatch', '@airbnb/node-memwatch'];
502
+ const hasMemLib = memLibs.some(lib => lib in allDeps);
503
+
504
+ if (!hasMemLib) {
505
+ const hasMemMonitoring = [...files.values()].some(content =>
506
+ content.includes('--max-old-space-size') ||
507
+ content.includes('memoryUsage') ||
508
+ content.includes('heapdump') ||
509
+ content.includes('v8.getHeapStatistics') ||
510
+ content.includes('process.memoryUsage')
511
+ );
512
+ if (!hasMemMonitoring) {
513
+ const hasServer = [...files.values()].some(content =>
514
+ content.includes('.listen(') || content.includes('createServer')
515
+ );
516
+ if (hasServer) {
517
+ findings.push({
518
+ ruleId: 'PERF-LOAD-009', category: 'performance', severity: 'medium',
519
+ title: 'No memory leak detection or monitoring found',
520
+ description: 'Add --max-old-space-size, process.memoryUsage() monitoring, or heapdump for memory leak detection.',
521
+ fix: null,
522
+ });
523
+ }
524
+ }
525
+ }
526
+ return findings;
527
+ },
528
+ },
529
+
530
+ // PERF-LOAD-010: No horizontal scaling support
531
+ {
532
+ id: 'PERF-LOAD-010',
533
+ category: 'performance',
534
+ severity: 'medium',
535
+ confidence: 'likely',
536
+ title: 'No Horizontal Scaling Support',
537
+ check({ files, stack }) {
538
+ const findings = [];
539
+ const allDeps = { ...stack.dependencies, ...stack.devDependencies };
540
+
541
+ // Check for in-memory session stores without external backing
542
+ const hasExternalSessionStore = ['connect-redis', 'connect-mongo', 'connect-pg-simple',
543
+ 'express-mysql-session', '@fastify/session', 'ioredis', 'redis'].some(lib => lib in allDeps);
544
+
545
+ const usesInMemorySession = [...files.values()].some(content =>
546
+ (content.includes('express-session') || content.includes('session(')) &&
547
+ !content.includes('RedisStore') && !content.includes('MongoStore') &&
548
+ !content.includes('PgStore') && !content.includes('store:') && !content.includes('store :')
549
+ );
550
+
551
+ const usesLocalFs = [...files.values()].some(content =>
552
+ (content.includes('multer') || content.includes('upload')) &&
553
+ content.includes('diskStorage') &&
554
+ !content.includes('s3') && !content.includes('S3') &&
555
+ !content.includes('gcs') && !content.includes('cloudinary') &&
556
+ !content.includes('azure')
557
+ );
558
+
559
+ if ((usesInMemorySession && !hasExternalSessionStore) || usesLocalFs) {
560
+ findings.push({
561
+ ruleId: 'PERF-LOAD-010', category: 'performance', severity: 'medium',
562
+ title: 'Application uses in-memory state or local filesystem that prevents horizontal scaling',
563
+ description: 'Use Redis or an external store for sessions, and cloud storage (S3, GCS) for file uploads to enable horizontal scaling.',
564
+ fix: null,
565
+ });
566
+ }
567
+ return findings;
568
+ },
569
+ },
570
+
571
+ // PERF-DB-001: N+1 query pattern
572
+ {
573
+ id: 'PERF-DB-001',
574
+ category: 'performance',
575
+ severity: 'high',
576
+ confidence: 'definite',
577
+ title: 'N+1 Query Pattern',
578
+ check({ files }) {
579
+ const findings = [];
580
+ for (const [filepath, content] of files) {
581
+ if (!isSourceFile(filepath)) continue;
582
+ const lines = content.split('\n');
583
+ for (let i = 0; i < lines.length; i++) {
584
+ if (lines[i].match(/for\s*\(|\.forEach\s*\(|\.map\s*\(/)) {
585
+ const block = lines.slice(i, Math.min(i + 8, lines.length)).join('\n');
586
+ if (block.match(/await\s+\w+\.(findOne|findById|findUnique|find|get|fetch|query)\s*\(/)) {
587
+ findings.push({ ruleId: 'PERF-DB-001', category: 'performance', severity: 'high',
588
+ title: 'Possible N+1 query: database call inside a loop',
589
+ description: 'Each loop iteration triggers a separate DB query. Use findMany with IN, include/eager loading, or batch loading (DataLoader).',
590
+ file: filepath, line: i + 1, fix: null });
591
+ }
592
+ }
593
+ }
594
+ }
595
+ return findings;
596
+ },
597
+ },
598
+
599
+ // PERF-DB-002: LIKE with leading wildcard
600
+ {
601
+ id: 'PERF-DB-002',
602
+ category: 'performance',
603
+ severity: 'medium',
604
+ confidence: 'likely',
605
+ title: "LIKE '%...' Leading Wildcard Prevents Index Use",
606
+ check({ files }) {
607
+ const findings = [];
608
+ for (const [filepath, content] of files) {
609
+ if (!isSourceFile(filepath)) continue;
610
+ const lines = content.split('\n');
611
+ for (let i = 0; i < lines.length; i++) {
612
+ if (lines[i].match(/LIKE\s+['"`]%/i)) {
613
+ findings.push({ ruleId: 'PERF-DB-002', category: 'performance', severity: 'medium',
614
+ title: "LIKE '%...' leading wildcard causes full table scan",
615
+ description: 'Leading wildcards prevent index use. Consider full-text search (PostgreSQL tsvector, MySQL FULLTEXT) instead.',
616
+ file: filepath, line: i + 1, fix: null });
617
+ }
618
+ }
619
+ }
620
+ return findings;
621
+ },
622
+ },
623
+
624
+ // PERF-DB-003: ORDER BY RAND()
625
+ {
626
+ id: 'PERF-DB-003',
627
+ category: 'performance',
628
+ severity: 'high',
629
+ confidence: 'likely',
630
+ title: 'ORDER BY RAND() Full Table Scan',
631
+ check({ files }) {
632
+ const findings = [];
633
+ for (const [filepath, content] of files) {
634
+ if (!isSourceFile(filepath)) continue;
635
+ const lines = content.split('\n');
636
+ for (let i = 0; i < lines.length; i++) {
637
+ if (lines[i].match(/ORDER\s+BY\s+(?:RAND|RANDOM)\s*\(\)/i)) {
638
+ findings.push({ ruleId: 'PERF-DB-003', category: 'performance', severity: 'high',
639
+ title: 'ORDER BY RAND() requires full table scan and sort',
640
+ description: 'This is O(n log n) on every query. Use offset-based random selection or pre-computed random columns.',
641
+ file: filepath, line: i + 1, fix: null });
642
+ }
643
+ }
644
+ }
645
+ return findings;
646
+ },
647
+ },
648
+
649
+ // PERF-DB-004: SELECT without WHERE on large tables
650
+ {
651
+ id: 'PERF-DB-004',
652
+ category: 'performance',
653
+ severity: 'high',
654
+ confidence: 'likely',
655
+ title: 'Unbounded Table Scan',
656
+ check({ files }) {
657
+ const findings = [];
658
+ for (const [filepath, content] of files) {
659
+ if (!isSourceFile(filepath)) continue;
660
+ const lines = content.split('\n');
661
+ for (let i = 0; i < lines.length; i++) {
662
+ if (lines[i].match(/\bDELETE\s+FROM\b|\bUPDATE\s+\w+\s+SET\b/i)) {
663
+ if (!lines[i].match(/WHERE/i)) {
664
+ const nextLines = lines.slice(i, i + 3).join(' ');
665
+ if (!nextLines.match(/WHERE/i)) {
666
+ findings.push({ ruleId: 'PERF-DB-004', category: 'performance', severity: 'high',
667
+ title: 'DELETE/UPDATE without WHERE clause — full table operation',
668
+ description: 'This will affect every row. Add a WHERE clause to scope the operation.',
669
+ file: filepath, line: i + 1, fix: null });
670
+ }
671
+ }
672
+ }
673
+ }
674
+ }
675
+ return findings;
676
+ },
677
+ },
678
+
679
+ // PERF-DB-005: Prisma findMany without pagination
680
+ {
681
+ id: 'PERF-DB-005',
682
+ category: 'performance',
683
+ severity: 'high',
684
+ confidence: 'likely',
685
+ title: 'ORM findMany Without Pagination',
686
+ check({ files }) {
687
+ const findings = [];
688
+ for (const [filepath, content] of files) {
689
+ if (!isSourceFile(filepath)) continue;
690
+ const lines = content.split('\n');
691
+ for (let i = 0; i < lines.length; i++) {
692
+ if (lines[i].match(/\.findMany\s*\(\s*\{/) || lines[i].match(/\.findMany\s*\(\s*\)/)) {
693
+ const block = lines.slice(i, Math.min(i + 10, lines.length)).join('\n');
694
+ if (!block.match(/\b(take|skip|limit|offset|pagination|page)\b/)) {
695
+ findings.push({ ruleId: 'PERF-DB-005', category: 'performance', severity: 'high',
696
+ title: 'findMany without take/skip — returns all rows',
697
+ description: 'Add take/skip for pagination to prevent loading entire tables into memory.',
698
+ file: filepath, line: i + 1, fix: null });
699
+ }
700
+ }
701
+ }
702
+ }
703
+ return findings;
704
+ },
705
+ },
706
+
707
+ // PERF-DB-006: Mongoose find without limit
708
+ {
709
+ id: 'PERF-DB-006',
710
+ category: 'performance',
711
+ severity: 'medium',
712
+ confidence: 'likely',
713
+ title: 'Mongoose Query Without Limit',
714
+ check({ files }) {
715
+ const findings = [];
716
+ for (const [filepath, content] of files) {
717
+ if (!isSourceFile(filepath)) continue;
718
+ const lines = content.split('\n');
719
+ for (let i = 0; i < lines.length; i++) {
720
+ if (lines[i].match(/\.\s*find\s*\(\s*\{/) && !lines[i].match(/findOne|findById/)) {
721
+ const block = lines.slice(i, Math.min(i + 5, lines.length)).join('\n');
722
+ if (!block.match(/\.limit\s*\(/) && !block.match(/\.skip\s*\(/)) {
723
+ findings.push({ ruleId: 'PERF-DB-006', category: 'performance', severity: 'medium',
724
+ title: 'Mongoose .find() without .limit() — unbounded result set',
725
+ description: 'Chain .limit(n) to prevent loading all documents. Use cursor-based pagination for large collections.',
726
+ file: filepath, line: i + 1, fix: null });
727
+ }
728
+ }
729
+ }
730
+ }
731
+ return findings;
732
+ },
733
+ },
734
+
735
+ // PERF-DB-007: No database query timeout
736
+ {
737
+ id: 'PERF-DB-007',
738
+ category: 'performance',
739
+ severity: 'high',
740
+ confidence: 'likely',
741
+ title: 'No Database Query Timeout',
742
+ check({ files, stack }) {
743
+ const findings = [];
744
+ if (!stack.database) return findings;
745
+ const hasQueryTimeout = [...files.values()].some(c =>
746
+ c.includes('statement_timeout') || c.includes('query_timeout') ||
747
+ c.includes('queryTimeout') || c.includes('connectionTimeoutMillis') ||
748
+ c.includes('connectTimeout') || c.includes('lock_timeout')
749
+ );
750
+ if (!hasQueryTimeout) {
751
+ findings.push({ ruleId: 'PERF-DB-007', category: 'performance', severity: 'high',
752
+ title: 'No database query timeout configured',
753
+ description: 'Long-running queries block connections and can exhaust the pool. Set statement_timeout (PostgreSQL) or queryTimeout (MySQL/MongoDB).',
754
+ fix: null });
755
+ }
756
+ return findings;
757
+ },
758
+ },
759
+
760
+ // PERF-DB-008: Selecting all fields with Prisma
761
+ {
762
+ id: 'PERF-DB-008',
763
+ category: 'performance',
764
+ severity: 'low',
765
+ confidence: 'suggestion',
766
+ title: 'ORM Query Without Field Selection',
767
+ check({ files }) {
768
+ const findings = [];
769
+ for (const [filepath, content] of files) {
770
+ if (!isSourceFile(filepath)) continue;
771
+ const lines = content.split('\n');
772
+ for (let i = 0; i < lines.length; i++) {
773
+ if (lines[i].match(/\.findFirst\s*\(\s*\{/) || lines[i].match(/\.findUnique\s*\(\s*\{/)) {
774
+ const block = lines.slice(i, Math.min(i + 10, lines.length)).join('\n');
775
+ if (!block.match(/\bselect\s*:/) && !block.match(/\binclude\s*:/)) {
776
+ findings.push({ ruleId: 'PERF-DB-008', category: 'performance', severity: 'low',
777
+ title: 'Prisma query without select — fetching all fields',
778
+ description: 'Use select: { field1: true } to fetch only the columns you need, reducing data transfer.',
779
+ file: filepath, line: i + 1, fix: null });
780
+ }
781
+ }
782
+ }
783
+ }
784
+ return findings;
785
+ },
786
+ },
787
+
788
+ // PERF-CACHE-001: No Cache-Control headers
789
+ {
790
+ id: 'PERF-CACHE-001',
791
+ category: 'performance',
792
+ severity: 'medium',
793
+ confidence: 'likely',
794
+ title: 'No Cache-Control Headers on API Responses',
795
+ check({ files }) {
796
+ const findings = [];
797
+ for (const [filepath, content] of files) {
798
+ if (!isSourceFile(filepath)) continue;
799
+ if (!filepath.includes('api/') && !filepath.includes('route')) continue;
800
+ if (content.includes('res.json(') || content.includes('res.send(')) {
801
+ if (!content.includes('Cache-Control') && !content.includes('cache-control') &&
802
+ !content.includes('setHeader') && !content.includes('cache')) {
803
+ findings.push({ ruleId: 'PERF-CACHE-001', category: 'performance', severity: 'medium',
804
+ title: 'API route sends responses without Cache-Control headers',
805
+ description: 'Add Cache-Control headers to allow browsers and CDNs to cache responses. Use max-age for public data, no-store for private.',
806
+ file: filepath, fix: null });
807
+ }
808
+ }
809
+ }
810
+ return findings;
811
+ },
812
+ },
813
+
814
+ // PERF-CACHE-002: No CDN for static assets
815
+ {
816
+ id: 'PERF-CACHE-002',
817
+ category: 'performance',
818
+ severity: 'medium',
819
+ confidence: 'likely',
820
+ title: 'No CDN for Static Assets',
821
+ check({ files, stack }) {
822
+ const findings = [];
823
+ const hasCDN = [...files.values()].some(c =>
824
+ c.includes('cloudfront') || c.includes('cloudflare') || c.includes('fastly') ||
825
+ c.includes('cdn.') || c.includes('NEXT_PUBLIC_CDN') || c.includes('assetPrefix') ||
826
+ c.includes('vercel') || c.includes('netlify')
827
+ );
828
+ if (!hasCDN) {
829
+ const hasStaticAssets = [...files.keys()].some(f =>
830
+ f.includes('/public/') || f.includes('/static/') || f.includes('/assets/')
831
+ );
832
+ if (hasStaticAssets) {
833
+ findings.push({ ruleId: 'PERF-CACHE-002', category: 'performance', severity: 'medium',
834
+ title: 'No CDN detected for static assets',
835
+ description: 'Serve static assets through a CDN (Cloudflare, CloudFront, Vercel, Netlify) to reduce latency and origin load.',
836
+ fix: null });
837
+ }
838
+ }
839
+ return findings;
840
+ },
841
+ },
842
+
843
+ // PERF-CACHE-003: Memoization missing for expensive function
844
+ {
845
+ id: 'PERF-CACHE-003',
846
+ category: 'performance',
847
+ severity: 'medium',
848
+ confidence: 'likely',
849
+ title: 'Expensive Computation Not Memoized',
850
+ check({ files }) {
851
+ const findings = [];
852
+ for (const [filepath, content] of files) {
853
+ if (!isSourceFile(filepath)) continue;
854
+ const lines = content.split('\n');
855
+ for (let i = 0; i < lines.length; i++) {
856
+ if (lines[i].match(/for\s*\(.*\bof\b|\bforEach\b|\bmap\b/)) {
857
+ const block = lines.slice(i, Math.min(i + 5, lines.length)).join('\n');
858
+ if (block.match(/JSON\.parse|JSON\.stringify|bcrypt|crypto\.|Buffer\.from/)) {
859
+ findings.push({ ruleId: 'PERF-CACHE-003', category: 'performance', severity: 'medium',
860
+ title: 'Expensive operation (JSON/crypto/Buffer) called in loop without caching',
861
+ description: 'Cache the result outside the loop or use memoization to avoid redundant expensive operations.',
862
+ file: filepath, line: i + 1, fix: null });
863
+ }
864
+ }
865
+ }
866
+ }
867
+ return findings;
868
+ },
869
+ },
870
+
871
+ // PERF-CACHE-004: Infinite cache TTL
872
+ {
873
+ id: 'PERF-CACHE-004',
874
+ category: 'performance',
875
+ severity: 'low',
876
+ confidence: 'suggestion',
877
+ title: 'Cache Without TTL',
878
+ check({ files }) {
879
+ const findings = [];
880
+ for (const [filepath, content] of files) {
881
+ if (!isSourceFile(filepath)) continue;
882
+ if (content.includes('redis') || content.includes('cache')) {
883
+ if (content.match(/\.set\s*\(/) && !content.match(/\bEX\b|\bPX\b|\bttl\b|\bexpire\b|\bTTL\b/i)) {
884
+ findings.push({ ruleId: 'PERF-CACHE-004', category: 'performance', severity: 'low',
885
+ title: 'Cache set without TTL/expiry — data may never be invalidated',
886
+ description: 'Always set a TTL on cached data to prevent stale data and unbounded memory growth.',
887
+ file: filepath, fix: null });
888
+ }
889
+ }
890
+ }
891
+ return findings;
892
+ },
893
+ },
894
+
895
+ // PERF-FE-001: No gzip/brotli compression middleware
896
+ {
897
+ id: 'PERF-FE-001',
898
+ category: 'performance',
899
+ severity: 'high',
900
+ confidence: 'likely',
901
+ title: 'No Response Compression Middleware',
902
+ check({ files, stack }) {
903
+ const findings = [];
904
+ if (!['express', 'fastify', 'koa', 'hono'].includes(stack.framework)) return findings;
905
+ const allDeps = { ...stack.dependencies, ...stack.devDependencies };
906
+ const hasCompression = 'compression' in allDeps || '@fastify/compress' in allDeps ||
907
+ 'koa-compress' in allDeps || 'shrink-ray' in allDeps;
908
+ const hasCompressCode = [...files.values()].some(c =>
909
+ c.includes('compression') || c.includes('compress') || c.includes('brotli')
910
+ );
911
+ if (!hasCompression && !hasCompressCode) {
912
+ findings.push({ ruleId: 'PERF-FE-001', category: 'performance', severity: 'high',
913
+ title: 'No gzip/brotli compression middleware detected',
914
+ description: 'Add compression middleware to reduce response sizes by 60-80%. Install the `compression` package for Express.',
915
+ fix: null });
916
+ }
917
+ return findings;
918
+ },
919
+ },
920
+
921
+ // PERF-FE-002: Importing all of lodash
922
+ {
923
+ id: 'PERF-FE-002',
924
+ category: 'performance',
925
+ severity: 'medium',
926
+ confidence: 'likely',
927
+ title: 'Full Lodash Import',
928
+ check({ files }) {
929
+ const findings = [];
930
+ for (const [filepath, content] of files) {
931
+ if (!isSourceFile(filepath)) continue;
932
+ const lines = content.split('\n');
933
+ for (let i = 0; i < lines.length; i++) {
934
+ if (lines[i].match(/(?:import|require)\s*(?:\*\s+as\s+_|_\s*=\s*require)\s*(?:from\s*)?['"]lodash['"]/)) {
935
+ findings.push({ ruleId: 'PERF-FE-002', category: 'performance', severity: 'medium',
936
+ title: 'Importing entire lodash adds ~70KB to bundle',
937
+ description: "Import individual functions: import debounce from 'lodash/debounce' or use lodash-es for tree shaking.",
938
+ file: filepath, line: i + 1, fix: null });
939
+ }
940
+ }
941
+ }
942
+ return findings;
943
+ },
944
+ },
945
+
946
+ // PERF-FE-003: No debounce on search input
947
+ {
948
+ id: 'PERF-FE-003',
949
+ category: 'performance',
950
+ severity: 'medium',
951
+ confidence: 'likely',
952
+ title: 'Missing Debounce on Input Handler',
953
+ check({ files }) {
954
+ const findings = [];
955
+ for (const [filepath, content] of files) {
956
+ if (!filepath.match(/\.(jsx|tsx)$/)) continue;
957
+ if (content.match(/onChange\s*=|onInput\s*=/) && content.match(/fetch\s*\(|axios\.|api\./)) {
958
+ if (!content.includes('debounce') && !content.includes('useDebounce')) {
959
+ findings.push({ ruleId: 'PERF-FE-003', category: 'performance', severity: 'medium',
960
+ title: 'API call on input change without debounce',
961
+ description: 'Without debouncing, every keystroke fires an API call. Use debounce(fn, 300) to limit calls.',
962
+ file: filepath, fix: null });
963
+ }
964
+ }
965
+ }
966
+ return findings;
967
+ },
968
+ },
969
+
970
+ // PERF-FE-004: No virtual scrolling for long lists
971
+ {
972
+ id: 'PERF-FE-004',
973
+ category: 'performance',
974
+ severity: 'medium',
975
+ confidence: 'likely',
976
+ title: 'No Virtual Scrolling for Large Lists',
977
+ check({ files, stack }) {
978
+ const findings = [];
979
+ if (!['react', 'nextjs', 'vue'].includes(stack.framework)) return findings;
980
+ const allDeps = { ...stack.dependencies, ...stack.devDependencies };
981
+ const hasVirtual = 'react-window' in allDeps || 'react-virtual' in allDeps ||
982
+ 'react-virtualized' in allDeps || '@tanstack/react-virtual' in allDeps ||
983
+ 'vue-virtual-scroller' in allDeps;
984
+ if (!hasVirtual) {
985
+ const hasLongList = [...files.values()].some(c =>
986
+ c.match(/\.map\s*\(.*=>\s*(?:<|\()/s) && (c.includes('items') || c.includes('list') || c.includes('rows'))
987
+ );
988
+ if (hasLongList) {
989
+ findings.push({ ruleId: 'PERF-FE-004', category: 'performance', severity: 'medium',
990
+ title: 'No virtual scrolling library detected — long lists will render all DOM nodes',
991
+ description: 'Use react-window or @tanstack/react-virtual for lists that may exceed 100 items.',
992
+ fix: null });
993
+ }
994
+ }
995
+ return findings;
996
+ },
997
+ },
998
+
999
+ // PERF-FE-005: Render-blocking scripts
1000
+ {
1001
+ id: 'PERF-FE-005',
1002
+ category: 'performance',
1003
+ severity: 'medium',
1004
+ confidence: 'likely',
1005
+ title: 'Render-Blocking Scripts',
1006
+ check({ files }) {
1007
+ const findings = [];
1008
+ for (const [filepath, content] of files) {
1009
+ if (!filepath.match(/\.(html|jsx|tsx)$/)) continue;
1010
+ const lines = content.split('\n');
1011
+ for (let i = 0; i < lines.length; i++) {
1012
+ if (lines[i].match(/<script\s+src=/i) && !lines[i].match(/async|defer/i)) {
1013
+ findings.push({ ruleId: 'PERF-FE-005', category: 'performance', severity: 'medium',
1014
+ title: '<script src> without async or defer blocks page rendering',
1015
+ description: 'Add defer (for non-critical scripts) or async to prevent blocking HTML parsing.',
1016
+ file: filepath, line: i + 1, fix: null });
1017
+ }
1018
+ }
1019
+ }
1020
+ return findings;
1021
+ },
1022
+ },
1023
+
1024
+ // PERF-API-001: Sequential awaits that could be parallel
1025
+ {
1026
+ id: 'PERF-API-001',
1027
+ category: 'performance',
1028
+ severity: 'medium',
1029
+ confidence: 'likely',
1030
+ title: 'Sequential Awaits Should Be Parallel',
1031
+ check({ files }) {
1032
+ const findings = [];
1033
+ for (const [filepath, content] of files) {
1034
+ if (!isSourceFile(filepath)) continue;
1035
+ const lines = content.split('\n');
1036
+ for (let i = 0; i < lines.length - 1; i++) {
1037
+ if (lines[i].match(/^\s*(?:const|let)\s+\w+\s*=\s*await\s+/) &&
1038
+ lines[i + 1].match(/^\s*(?:const|let)\s+\w+\s*=\s*await\s+/)) {
1039
+ const a = lines[i].match(/await\s+(\w+)/)?.[1];
1040
+ const b = lines[i + 1].match(/await\s+(\w+)/)?.[1];
1041
+ if (a && b && a !== b) {
1042
+ findings.push({ ruleId: 'PERF-API-001', category: 'performance', severity: 'medium',
1043
+ title: 'Sequential awaits could be parallelized with Promise.all()',
1044
+ description: `Replace sequential awaits with const [a, b] = await Promise.all([...]) to run independent operations in parallel.`,
1045
+ file: filepath, line: i + 1, fix: null });
1046
+ }
1047
+ }
1048
+ }
1049
+ }
1050
+ return findings;
1051
+ },
1052
+ },
1053
+
1054
+ // PERF-API-002: Large file loaded into memory
1055
+ {
1056
+ id: 'PERF-API-002',
1057
+ category: 'performance',
1058
+ severity: 'high',
1059
+ confidence: 'likely',
1060
+ title: 'File Read Into Memory Instead of Streamed',
1061
+ check({ files }) {
1062
+ const findings = [];
1063
+ for (const [filepath, content] of files) {
1064
+ if (!isSourceFile(filepath)) continue;
1065
+ const lines = content.split('\n');
1066
+ for (let i = 0; i < lines.length; i++) {
1067
+ if (lines[i].match(/fs\.readFile(?:Sync)?\s*\(/) &&
1068
+ (lines[i].includes('res.send') || content.substring(content.indexOf(lines[i])).match(/res\.(?:send|json|download)\s*\(/))) {
1069
+ findings.push({ ruleId: 'PERF-API-002', category: 'performance', severity: 'high',
1070
+ title: 'Reading entire file into memory before sending response',
1071
+ description: 'Use fs.createReadStream() and pipe to response to avoid loading large files into memory.',
1072
+ file: filepath, line: i + 1, fix: null });
1073
+ }
1074
+ }
1075
+ }
1076
+ return findings;
1077
+ },
1078
+ },
1079
+
1080
+ // PERF-API-003: Synchronous crypto in request handler
1081
+ {
1082
+ id: 'PERF-API-003',
1083
+ category: 'performance',
1084
+ severity: 'high',
1085
+ confidence: 'likely',
1086
+ title: 'Synchronous Crypto in Request Handler',
1087
+ check({ files }) {
1088
+ const findings = [];
1089
+ for (const [filepath, content] of files) {
1090
+ if (!isSourceFile(filepath)) continue;
1091
+ if (!filepath.includes('api/') && !filepath.includes('route') && !filepath.includes('handler')) continue;
1092
+ const lines = content.split('\n');
1093
+ for (let i = 0; i < lines.length; i++) {
1094
+ if (lines[i].match(/crypto\.(?:pbkdf2Sync|scryptSync|generateKeyPairSync|createCipheriv)/)) {
1095
+ findings.push({ ruleId: 'PERF-API-003', category: 'performance', severity: 'high',
1096
+ title: 'Synchronous crypto operation blocks event loop in request handler',
1097
+ description: 'Use async variants (pbkdf2, scrypt) or run in a worker thread to avoid blocking.',
1098
+ file: filepath, line: i + 1, fix: null });
1099
+ }
1100
+ }
1101
+ }
1102
+ return findings;
1103
+ },
1104
+ },
1105
+
1106
+ // PERF-API-004: No body size limit
1107
+ {
1108
+ id: 'PERF-API-004',
1109
+ category: 'performance',
1110
+ severity: 'high',
1111
+ confidence: 'likely',
1112
+ title: 'No Request Body Size Limit',
1113
+ check({ files, stack }) {
1114
+ const findings = [];
1115
+ if (!['express', 'fastify', 'koa'].includes(stack.framework)) return findings;
1116
+ const hasBodyLimit = [...files.values()].some(c =>
1117
+ c.match(/limit\s*:\s*['"`]\d+[kmg]b?['"`]/i) ||
1118
+ c.includes('bodyLimit') || c.includes('requestBodySize') ||
1119
+ c.match(/express\.json\s*\(\s*\{[^}]*limit/)
1120
+ );
1121
+ if (!hasBodyLimit) {
1122
+ findings.push({ ruleId: 'PERF-API-004', category: 'performance', severity: 'high',
1123
+ title: 'No request body size limit configured',
1124
+ description: 'Without a limit, large payloads can exhaust memory. Set limit in bodyParser/express.json({ limit: "1mb" }).',
1125
+ fix: null });
1126
+ }
1127
+ return findings;
1128
+ },
1129
+ },
1130
+
1131
+ // PERF-MEM-001: Event listeners without cleanup
1132
+ {
1133
+ id: 'PERF-MEM-001',
1134
+ category: 'performance',
1135
+ severity: 'medium',
1136
+ confidence: 'likely',
1137
+ title: 'Event Listeners Added Without Cleanup',
1138
+ check({ files }) {
1139
+ const findings = [];
1140
+ for (const [filepath, content] of files) {
1141
+ if (!isSourceFile(filepath)) continue;
1142
+ const addCount = (content.match(/addEventListener\s*\(/g) || []).length;
1143
+ const removeCount = (content.match(/removeEventListener\s*\(/g) || []).length;
1144
+ if (addCount > removeCount + 1) {
1145
+ findings.push({ ruleId: 'PERF-MEM-001', category: 'performance', severity: 'medium',
1146
+ title: `${addCount} addEventListener calls but only ${removeCount} removeEventListener — potential memory leak`,
1147
+ description: 'Always pair addEventListener with removeEventListener in cleanup (useEffect return, componentWillUnmount, etc.).',
1148
+ file: filepath, fix: null });
1149
+ }
1150
+ }
1151
+ return findings;
1152
+ },
1153
+ },
1154
+
1155
+ // PERF-MEM-002: setInterval without clearInterval
1156
+ {
1157
+ id: 'PERF-MEM-002',
1158
+ category: 'performance',
1159
+ severity: 'medium',
1160
+ confidence: 'likely',
1161
+ title: 'setInterval Without clearInterval',
1162
+ check({ files }) {
1163
+ const findings = [];
1164
+ for (const [filepath, content] of files) {
1165
+ if (!isSourceFile(filepath)) continue;
1166
+ const setCount = (content.match(/\bsetInterval\s*\(/g) || []).length;
1167
+ const clearCount = (content.match(/\bclearInterval\s*\(/g) || []).length;
1168
+ if (setCount > clearCount) {
1169
+ findings.push({ ruleId: 'PERF-MEM-002', category: 'performance', severity: 'medium',
1170
+ title: 'setInterval without corresponding clearInterval — memory/CPU leak',
1171
+ description: 'Store the interval ID and call clearInterval() in cleanup to prevent accumulation.',
1172
+ file: filepath, fix: null });
1173
+ }
1174
+ }
1175
+ return findings;
1176
+ },
1177
+ },
1178
+
1179
+ // PERF-MEM-003: String concatenation in loop
1180
+ {
1181
+ id: 'PERF-MEM-003',
1182
+ category: 'performance',
1183
+ severity: 'low',
1184
+ confidence: 'suggestion',
1185
+ title: 'String Concatenation in Loop',
1186
+ check({ files }) {
1187
+ const findings = [];
1188
+ for (const [filepath, content] of files) {
1189
+ if (!isSourceFile(filepath)) continue;
1190
+ const lines = content.split('\n');
1191
+ for (let i = 0; i < lines.length; i++) {
1192
+ if (lines[i].match(/for\s*\(|while\s*\(/)) {
1193
+ const block = lines.slice(i, Math.min(i + 10, lines.length)).join('\n');
1194
+ if (block.match(/\w+\s*\+=\s*['"`]|str\s*\+=|html\s*\+=|result\s*\+=/)) {
1195
+ findings.push({ ruleId: 'PERF-MEM-003', category: 'performance', severity: 'low',
1196
+ title: 'String concatenation in loop — use array.join() instead',
1197
+ description: 'Each += creates a new string object. Collect parts in an array and use .join("") at the end.',
1198
+ file: filepath, line: i + 1, fix: null });
1199
+ }
1200
+ }
1201
+ }
1202
+ }
1203
+ return findings;
1204
+ },
1205
+ },
1206
+
1207
+ // PERF-MEM-004: Growing array in module scope
1208
+ {
1209
+ id: 'PERF-MEM-004',
1210
+ category: 'performance',
1211
+ severity: 'medium',
1212
+ confidence: 'likely',
1213
+ title: 'Unbounded Array in Module Scope',
1214
+ check({ files }) {
1215
+ const findings = [];
1216
+ for (const [filepath, content] of files) {
1217
+ if (!isSourceFile(filepath)) continue;
1218
+ const lines = content.split('\n');
1219
+ for (let i = 0; i < lines.length; i++) {
1220
+ if (lines[i].match(/^(?:const|let|var)\s+\w+\s*=\s*\[\s*\]/)) {
1221
+ const varName = lines[i].match(/(?:const|let|var)\s+(\w+)/)?.[1];
1222
+ if (varName && content.includes(`${varName}.push(`) && !content.includes(`${varName}.splice`) && !content.includes(`${varName}.shift`)) {
1223
+ findings.push({ ruleId: 'PERF-MEM-004', category: 'performance', severity: 'medium',
1224
+ title: `Module-level array "${varName}" grows unbounded — memory leak risk`,
1225
+ description: 'Arrays at module scope persist for the process lifetime. Add a max size or use a circular buffer.',
1226
+ file: filepath, line: i + 1, fix: null });
1227
+ }
1228
+ }
1229
+ }
1230
+ }
1231
+ return findings;
1232
+ },
1233
+ },
1234
+
1235
+ // PERF-ASYNC-001: CPU-intensive work in main thread
1236
+ {
1237
+ id: 'PERF-ASYNC-001',
1238
+ category: 'performance',
1239
+ severity: 'high',
1240
+ confidence: 'likely',
1241
+ title: 'CPU-Intensive Work in Main Thread',
1242
+ check({ files, stack }) {
1243
+ const findings = [];
1244
+ if (stack.runtime !== 'node') return findings;
1245
+ for (const [filepath, content] of files) {
1246
+ if (!isSourceFile(filepath)) continue;
1247
+ if (filepath.includes('worker')) continue;
1248
+ const lines = content.split('\n');
1249
+ for (let i = 0; i < lines.length; i++) {
1250
+ if (lines[i].match(/JSON\.parse\s*\(|JSON\.stringify\s*\(/) &&
1251
+ (filepath.includes('api/') || filepath.includes('route') || filepath.includes('handler'))) {
1252
+ const line = lines[i];
1253
+ if (line.match(/JSON\.(parse|stringify)\s*\(\s*(?:req\.|body|data|payload|response)/)) {
1254
+ const nextLines = lines.slice(i, Math.min(i + 3, lines.length)).join('');
1255
+ if (nextLines.match(/JSON\.(parse|stringify)\s*\(/) && !content.includes('worker_threads')) {
1256
+ findings.push({ ruleId: 'PERF-ASYNC-001', category: 'performance', severity: 'high',
1257
+ title: 'Large JSON parsing in request handler blocks event loop',
1258
+ description: 'Move large JSON parsing/stringification to worker_threads to keep the event loop free.',
1259
+ file: filepath, line: i + 1, fix: null });
1260
+ break;
1261
+ }
1262
+ }
1263
+ }
1264
+ }
1265
+ }
1266
+ return findings;
1267
+ },
1268
+ },
1269
+
1270
+ // PERF-ASYNC-002: Promise.all missing for parallel operations
1271
+ {
1272
+ id: 'PERF-ASYNC-002',
1273
+ category: 'performance',
1274
+ severity: 'medium',
1275
+ confidence: 'likely',
1276
+ title: 'Array.map with async Without Promise.all',
1277
+ check({ files }) {
1278
+ const findings = [];
1279
+ for (const [filepath, content] of files) {
1280
+ if (!isSourceFile(filepath)) continue;
1281
+ const lines = content.split('\n');
1282
+ for (let i = 0; i < lines.length; i++) {
1283
+ if (lines[i].match(/\.map\s*\(\s*async/) && !lines[i].includes('Promise.all')) {
1284
+ const context = lines.slice(Math.max(0, i - 2), i + 2).join('\n');
1285
+ if (!context.includes('Promise.all') && !context.includes('Promise.allSettled')) {
1286
+ findings.push({ ruleId: 'PERF-ASYNC-002', category: 'performance', severity: 'medium',
1287
+ title: '.map(async ...) without Promise.all — promises not awaited',
1288
+ description: 'Wrap with await Promise.all(items.map(async item => ...)) to actually await all operations.',
1289
+ file: filepath, line: i + 1, fix: null });
1290
+ }
1291
+ }
1292
+ }
1293
+ }
1294
+ return findings;
1295
+ },
1296
+ },
1297
+
1298
+ // PERF-MON-001: No APM or performance monitoring
1299
+ {
1300
+ id: 'PERF-MON-001',
1301
+ category: 'performance',
1302
+ severity: 'medium',
1303
+ confidence: 'likely',
1304
+ title: 'No Application Performance Monitoring',
1305
+ check({ files, stack }) {
1306
+ const findings = [];
1307
+ if (stack.runtime !== 'node') return findings;
1308
+ const allDeps = { ...stack.dependencies, ...stack.devDependencies };
1309
+ const apmLibs = ['newrelic', 'dd-trace', 'elastic-apm-node', '@opentelemetry/sdk-node',
1310
+ 'prom-client', '@sentry/node', 'applicationinsights', 'clinic'];
1311
+ if (!apmLibs.some(lib => lib in allDeps)) {
1312
+ const hasServer = [...files.values()].some(c => c.includes('.listen(') || c.includes('createServer'));
1313
+ if (hasServer) {
1314
+ findings.push({ ruleId: 'PERF-MON-001', category: 'performance', severity: 'medium',
1315
+ title: 'No APM or performance monitoring library detected',
1316
+ description: 'Add APM (Datadog, New Relic, OpenTelemetry) to track response times, throughput, and performance regressions in production.',
1317
+ fix: null });
1318
+ }
1319
+ }
1320
+ return findings;
1321
+ },
1322
+ },
1323
+
1324
+ // PERF-MON-002: No slow query logging
1325
+ {
1326
+ id: 'PERF-MON-002',
1327
+ category: 'performance',
1328
+ severity: 'medium',
1329
+ confidence: 'likely',
1330
+ title: 'No Slow Query Logging',
1331
+ check({ files, stack }) {
1332
+ const findings = [];
1333
+ const hasSlowQueryLog = [...files.values()].some(c =>
1334
+ c.includes('slow') && (c.includes('query') || c.includes('log')) ||
1335
+ c.includes('log_min_duration') || c.includes('slowQuery') || c.includes('slow_query')
1336
+ );
1337
+ if (!hasSlowQueryLog && stack.database) {
1338
+ findings.push({ ruleId: 'PERF-MON-002', category: 'performance', severity: 'medium',
1339
+ title: 'No slow query logging configured',
1340
+ description: 'Enable slow query logging to identify performance bottlenecks. Use log_min_duration_statement in PostgreSQL or slow_query_log in MySQL.',
1341
+ fix: null });
1342
+ }
1343
+ return findings;
1344
+ },
1345
+ },
1346
+
1347
+ // PERF-MON-003: No response time tracking
1348
+ {
1349
+ id: 'PERF-MON-003',
1350
+ category: 'performance',
1351
+ severity: 'low',
1352
+ confidence: 'suggestion',
1353
+ title: 'No Response Time Tracking',
1354
+ check({ files, stack }) {
1355
+ const findings = [];
1356
+ if (stack.runtime !== 'node') return findings;
1357
+ const hasResponseTime = [...files.values()].some(c =>
1358
+ c.includes('response-time') || c.includes('responseTime') ||
1359
+ c.includes('x-response-time') || c.includes('X-Response-Time') ||
1360
+ c.includes('performance.now') || c.includes('process.hrtime')
1361
+ );
1362
+ const hasServer = [...files.values()].some(c => c.includes('.listen('));
1363
+ if (!hasResponseTime && hasServer) {
1364
+ findings.push({ ruleId: 'PERF-MON-003', category: 'performance', severity: 'low',
1365
+ title: 'No response time tracking on API routes',
1366
+ description: 'Add response-time middleware or log process.hrtime() to track and alert on slow endpoints.',
1367
+ fix: null });
1368
+ }
1369
+ return findings;
1370
+ },
1371
+ },
1372
+
1373
+ // PERF-DB-009: Mongoose populate without field selection
1374
+ { id: 'PERF-DB-009', category: 'performance', severity: 'medium', confidence: 'likely', title: 'Mongoose populate Without Field Selection',
1375
+ check({ files }) {
1376
+ const findings = [];
1377
+ for (const [fp, c] of files) {
1378
+ if (!isSourceFile(fp)) continue;
1379
+ const lines = c.split('\n');
1380
+ for (let i = 0; i < lines.length; i++) {
1381
+ if (lines[i].match(/\.populate\s*\(\s*['"`]\w+['"`]\s*\)/) && !lines[i].match(/select|fields/)) {
1382
+ findings.push({ ruleId: 'PERF-DB-009', category: 'performance', severity: 'medium',
1383
+ title: 'Mongoose .populate() without field selection fetches entire documents',
1384
+ description: 'Use .populate("field", "name email") to select only needed fields from related documents.', file: fp, line: i + 1, fix: null });
1385
+ }
1386
+ }
1387
+ }
1388
+ return findings;
1389
+ },
1390
+ },
1391
+
1392
+ // PERF-DB-010: Aggregate without $match first
1393
+ { id: 'PERF-DB-010', category: 'performance', severity: 'high', confidence: 'likely', title: 'Aggregation Pipeline Without Early $match',
1394
+ check({ files }) {
1395
+ const findings = [];
1396
+ for (const [fp, c] of files) {
1397
+ if (!isSourceFile(fp)) continue;
1398
+ const lines = c.split('\n');
1399
+ for (let i = 0; i < lines.length; i++) {
1400
+ if (lines[i].match(/\.aggregate\s*\(\s*\[/)) {
1401
+ const block = lines.slice(i, Math.min(i + 5, lines.length)).join('\n');
1402
+ if (block.match(/\$group|\$sort|\$unwind/) && !block.match(/\$match/)) {
1403
+ findings.push({ ruleId: 'PERF-DB-010', category: 'performance', severity: 'high',
1404
+ title: 'MongoDB aggregation pipeline without $match — processing all documents',
1405
+ description: 'Add $match as the first stage to filter documents before expensive $group/$sort/$unwind operations.', file: fp, line: i + 1, fix: null });
1406
+ }
1407
+ }
1408
+ }
1409
+ }
1410
+ return findings;
1411
+ },
1412
+ },
1413
+
1414
+ // PERF-DB-011: Using findAll in Sequelize without where
1415
+ { id: 'PERF-DB-011', category: 'performance', severity: 'high', confidence: 'likely', title: 'Sequelize findAll Without Where Clause',
1416
+ check({ files }) {
1417
+ const findings = [];
1418
+ for (const [fp, c] of files) {
1419
+ if (!isSourceFile(fp)) continue;
1420
+ const lines = c.split('\n');
1421
+ for (let i = 0; i < lines.length; i++) {
1422
+ if (lines[i].match(/\.findAll\s*\(\s*\{/) || lines[i].match(/\.findAll\s*\(\s*\)/)) {
1423
+ const block = lines.slice(i, Math.min(i + 8, lines.length)).join('\n');
1424
+ if (!block.match(/where:|limit:|order:/) && fp.includes('api/') || fp.includes('route')) {
1425
+ findings.push({ ruleId: 'PERF-DB-011', category: 'performance', severity: 'high',
1426
+ title: 'Sequelize findAll without where/limit in API handler',
1427
+ description: 'Add where and limit clauses to avoid returning entire tables. Use pagination for list endpoints.', file: fp, line: i + 1, fix: null });
1428
+ }
1429
+ }
1430
+ }
1431
+ }
1432
+ return findings;
1433
+ },
1434
+ },
1435
+
1436
+ // PERF-DB-012: COUNT on non-indexed column
1437
+ { id: 'PERF-DB-012', category: 'performance', severity: 'medium', confidence: 'likely', title: 'COUNT Without Indexed Column',
1438
+ check({ files }) {
1439
+ const findings = [];
1440
+ for (const [fp, c] of files) {
1441
+ if (!isSourceFile(fp)) continue;
1442
+ const lines = c.split('\n');
1443
+ for (let i = 0; i < lines.length; i++) {
1444
+ if (lines[i].match(/SELECT\s+COUNT\s*\(\s*(?![\*])\w+\s*\)/i) && !lines[i].match(/COUNT\s*\(\s*\*\s*\)/i)) {
1445
+ findings.push({ ruleId: 'PERF-DB-012', category: 'performance', severity: 'medium',
1446
+ title: 'COUNT(column) on potentially unindexed column — use COUNT(*) or add index',
1447
+ description: 'COUNT(*) is optimized by most databases. For COUNT(column), ensure the column is indexed.', file: fp, line: i + 1, fix: null });
1448
+ }
1449
+ }
1450
+ }
1451
+ return findings;
1452
+ },
1453
+ },
1454
+
1455
+ // PERF-DB-013: Raw SQL in hot path without prepared statement
1456
+ { id: 'PERF-DB-013', category: 'performance', severity: 'medium', confidence: 'likely', title: 'Raw SQL Without Prepared Statement',
1457
+ check({ files }) {
1458
+ const findings = [];
1459
+ for (const [fp, c] of files) {
1460
+ if (!isSourceFile(fp)) continue;
1461
+ const lines = c.split('\n');
1462
+ for (let i = 0; i < lines.length; i++) {
1463
+ if (lines[i].match(/db\.query\s*\(\s*`|pool\.query\s*\(\s*`/) && lines[i].match(/\$\{/)) {
1464
+ findings.push({ ruleId: 'PERF-DB-013', category: 'performance', severity: 'medium',
1465
+ title: 'Raw SQL with template literal interpolation — no query plan reuse',
1466
+ description: 'Use parameterized queries ($1, $2) instead of template literals. Parameterized queries allow the DB to cache execution plans.', file: fp, line: i + 1, fix: null });
1467
+ }
1468
+ }
1469
+ }
1470
+ return findings;
1471
+ },
1472
+ },
1473
+
1474
+ // PERF-CACHE-005: No service worker for offline
1475
+ { id: 'PERF-CACHE-005', category: 'performance', severity: 'low', confidence: 'suggestion', title: 'No Service Worker',
1476
+ check({ files, stack }) {
1477
+ const findings = [];
1478
+ if (!['react', 'nextjs', 'vue', 'sveltekit'].includes(stack.framework)) return findings;
1479
+ const hasSW = [...files.keys()].some(f => f.includes('service-worker') || f.includes('sw.js') || f.includes('workbox'));
1480
+ const allDeps = { ...stack.dependencies, ...stack.devDependencies };
1481
+ const hasWorkbox = 'workbox-webpack-plugin' in allDeps || 'vite-plugin-pwa' in allDeps;
1482
+ if (!hasSW && !hasWorkbox) {
1483
+ findings.push({ ruleId: 'PERF-CACHE-005', category: 'performance', severity: 'low',
1484
+ title: 'No service worker for offline support or asset caching',
1485
+ description: 'A service worker can cache static assets and API responses, dramatically improving repeat load times. Use Workbox for easy setup.', fix: null });
1486
+ }
1487
+ return findings;
1488
+ },
1489
+ },
1490
+
1491
+ // PERF-FE-006: moment.js import
1492
+ { id: 'PERF-FE-006', category: 'performance', severity: 'medium', confidence: 'likely', title: 'moment.js Adds 67KB+ to Bundle',
1493
+ check({ files }) {
1494
+ const findings = [];
1495
+ for (const [fp, c] of files) {
1496
+ if (!isSourceFile(fp)) continue;
1497
+ if (c.match(/(?:import|require)\s*(?:\w+\s*=\s*require\s*\()?['"`]moment['"`]/)) {
1498
+ findings.push({ ruleId: 'PERF-FE-006', category: 'performance', severity: 'medium',
1499
+ title: "moment.js import adds 67KB+ to bundle — use date-fns or dayjs instead",
1500
+ description: 'date-fns is tree-shakeable and adds only the functions you use. dayjs is 2KB. Both are API-compatible alternatives.', file: fp, fix: null });
1501
+ }
1502
+ }
1503
+ return findings;
1504
+ },
1505
+ },
1506
+
1507
+ // PERF-FE-007: No preconnect for critical third-party origins
1508
+ { id: 'PERF-FE-007', category: 'performance', severity: 'low', confidence: 'suggestion', title: 'No Resource Hints for Third-Party Origins',
1509
+ check({ files }) {
1510
+ const findings = [];
1511
+ for (const [fp, c] of files) {
1512
+ if (!fp.match(/\.(html)$/) && !fp.match(/_document\.(jsx|tsx)$/)) continue;
1513
+ const hasThirdParty = c.match(/fonts\.googleapis|cdnjs\.cloudflare|unpkg\.com|cdn\.jsdelivr/);
1514
+ const hasHints = c.match(/rel=["'](?:preconnect|dns-prefetch)["']/);
1515
+ if (hasThirdParty && !hasHints) {
1516
+ findings.push({ ruleId: 'PERF-FE-007', category: 'performance', severity: 'low',
1517
+ title: 'Third-party origins without <link rel="preconnect"> hints',
1518
+ description: 'Add <link rel="preconnect" href="https://fonts.googleapis.com"> to establish connections early and reduce latency by 100-200ms.', file: fp, fix: null });
1519
+ }
1520
+ }
1521
+ return findings;
1522
+ },
1523
+ },
1524
+
1525
+ // PERF-FE-008: font-display not set
1526
+ { id: 'PERF-FE-008', category: 'performance', severity: 'medium', confidence: 'likely', title: 'Web Font Without font-display',
1527
+ check({ files }) {
1528
+ const findings = [];
1529
+ for (const [fp, c] of files) {
1530
+ if (!fp.match(/\.(css|scss|sass)$/)) continue;
1531
+ if (c.includes('@font-face') && !c.includes('font-display')) {
1532
+ findings.push({ ruleId: 'PERF-FE-008', category: 'performance', severity: 'medium',
1533
+ title: '@font-face without font-display — invisible text during font load (FOIT)',
1534
+ description: 'Add font-display: swap to show fallback text immediately. Prevents invisible text while fonts load.', file: fp, fix: null });
1535
+ }
1536
+ }
1537
+ return findings;
1538
+ },
1539
+ },
1540
+
1541
+ // PERF-API-005: No HTTP compression Accept-Encoding
1542
+ { id: 'PERF-API-005', category: 'performance', severity: 'medium', confidence: 'likely', title: 'API Responses Not Compressed',
1543
+ check({ files, stack }) {
1544
+ const findings = [];
1545
+ if (!['express', 'koa', 'fastify'].includes(stack.framework)) return findings;
1546
+ const allDeps = { ...stack.dependencies, ...stack.devDependencies };
1547
+ const hasCompression = 'compression' in allDeps || '@fastify/compress' in allDeps || 'koa-compress' in allDeps;
1548
+ if (!hasCompression && ![...files.values()].some(c => c.includes('compression') || c.includes('brotli'))) {
1549
+ findings.push({ ruleId: 'PERF-API-005', category: 'performance', severity: 'medium',
1550
+ title: 'No response compression configured — sending uncompressed API responses',
1551
+ description: 'Add compression middleware. JSON responses typically compress 70-85%, dramatically reducing bandwidth and improving mobile performance.', fix: null });
1552
+ }
1553
+ return findings;
1554
+ },
1555
+ },
1556
+
1557
+ // PERF-API-006: Returning full objects when partial needed
1558
+ { id: 'PERF-API-006', category: 'performance', severity: 'low', confidence: 'suggestion', title: 'API Returns Full Objects Without Field Filtering',
1559
+ check({ files }) {
1560
+ const findings = [];
1561
+ for (const [fp, c] of files) {
1562
+ if (!isSourceFile(fp)) continue;
1563
+ if (!(fp.includes('api/') || fp.includes('route'))) continue;
1564
+ if (c.match(/res\.json\s*\(\s*\w+\s*\)/) && !c.match(/fields|select|pick|omit|projection/)) {
1565
+ if (c.match(/findMany|findAll|find\s*\(\s*\{/)) {
1566
+ findings.push({ ruleId: 'PERF-API-006', category: 'performance', severity: 'low',
1567
+ title: 'API returns full DB records without field filtering',
1568
+ description: 'Return only fields the client needs. Large objects waste bandwidth and expose unnecessary data.', file: fp, fix: null });
1569
+ }
1570
+ }
1571
+ }
1572
+ return findings;
1573
+ },
1574
+ },
1575
+
1576
+ // PERF-API-007: No request timeout on outbound HTTP calls
1577
+ { id: 'PERF-API-007', category: 'performance', severity: 'high', confidence: 'likely', title: 'Outbound HTTP Calls Without Timeout',
1578
+ check({ files }) {
1579
+ const findings = [];
1580
+ for (const [fp, c] of files) {
1581
+ if (!isSourceFile(fp)) continue;
1582
+ const lines = c.split('\n');
1583
+ for (let i = 0; i < lines.length; i++) {
1584
+ if (lines[i].match(/\bfetch\s*\(\s*['"`]https?:\/\//) && !lines[i].match(/signal|timeout/)) {
1585
+ const block = lines.slice(i, Math.min(i + 5, lines.length)).join('\n');
1586
+ if (!block.match(/signal|AbortController|timeout/)) {
1587
+ findings.push({ ruleId: 'PERF-API-007', category: 'performance', severity: 'high',
1588
+ title: 'fetch() without timeout — can hang indefinitely if remote server is slow',
1589
+ description: 'Use AbortController with setTimeout: const controller = new AbortController(); setTimeout(() => controller.abort(), 5000);', file: fp, line: i + 1, fix: null });
1590
+ }
1591
+ }
1592
+ }
1593
+ }
1594
+ return findings;
1595
+ },
1596
+ },
1597
+
1598
+ // PERF-MEM-005: Regex compiled inside loop
1599
+ { id: 'PERF-MEM-005', category: 'performance', severity: 'medium', confidence: 'likely', title: 'RegExp Compiled Inside Loop',
1600
+ check({ files }) {
1601
+ const findings = [];
1602
+ for (const [fp, c] of files) {
1603
+ if (!isSourceFile(fp)) continue;
1604
+ const lines = c.split('\n');
1605
+ for (let i = 0; i < lines.length; i++) {
1606
+ if (lines[i].match(/for\s*\(|\.forEach\s*\(|\.map\s*\(|\.filter\s*\(/)) {
1607
+ const block = lines.slice(i, Math.min(i + 8, lines.length)).join('\n');
1608
+ if (block.match(/new\s+RegExp\s*\(/)) {
1609
+ findings.push({ ruleId: 'PERF-MEM-005', category: 'performance', severity: 'medium',
1610
+ title: 'new RegExp() constructed inside loop — should be compiled once outside',
1611
+ description: 'Move RegExp construction outside the loop. const re = new RegExp(...); then use re inside the loop.', file: fp, line: i + 1, fix: null });
1612
+ }
1613
+ }
1614
+ }
1615
+ }
1616
+ return findings;
1617
+ },
1618
+ },
1619
+
1620
+ // PERF-MEM-006: Large base64 in memory
1621
+ { id: 'PERF-MEM-006', category: 'performance', severity: 'medium', confidence: 'likely', title: 'Base64 Encoding Binary Data in Memory',
1622
+ check({ files }) {
1623
+ const findings = [];
1624
+ for (const [fp, c] of files) {
1625
+ if (!isSourceFile(fp)) continue;
1626
+ if (c.match(/\.toString\s*\(\s*['"`]base64['"`]\s*\)/) && (fp.includes('api/') || fp.includes('route'))) {
1627
+ findings.push({ ruleId: 'PERF-MEM-006', category: 'performance', severity: 'medium',
1628
+ title: 'Converting binary data to base64 in request handler — increases memory usage 33%',
1629
+ description: 'Stream binary data directly instead of base64-encoding it in memory. Use streaming for file uploads/downloads.', file: fp, fix: null });
1630
+ }
1631
+ }
1632
+ return findings;
1633
+ },
1634
+ },
1635
+
1636
+ // PERF-ASYNC-003: No concurrency limit on parallel ops
1637
+ { id: 'PERF-ASYNC-003', category: 'performance', severity: 'high', confidence: 'likely', title: 'No Concurrency Limit on Parallel Operations',
1638
+ check({ files, stack }) {
1639
+ const findings = [];
1640
+ const allDeps = { ...stack.dependencies, ...stack.devDependencies };
1641
+ for (const [fp, c] of files) {
1642
+ if (!isSourceFile(fp)) continue;
1643
+ if (c.match(/Promise\.all\s*\(\s*\w+\.map/) && !c.match(/pLimit|p-limit|bottleneck|throat|concurrency/)) {
1644
+ if ((c.match(/Promise\.all/g) || []).length > 1) {
1645
+ findings.push({ ruleId: 'PERF-ASYNC-003', category: 'performance', severity: 'high',
1646
+ title: 'Promise.all on unbounded array — can overwhelm database/API with concurrent requests',
1647
+ description: 'Use p-limit to cap concurrency: const limit = pLimit(10); await Promise.all(items.map(item => limit(() => process(item))))', file: fp, fix: null });
1648
+ }
1649
+ }
1650
+ }
1651
+ return findings;
1652
+ },
1653
+ },
1654
+
1655
+ // PERF-ASYNC-004: Synchronous require in async function
1656
+ { id: 'PERF-ASYNC-004', category: 'performance', severity: 'medium', confidence: 'likely', title: 'Dynamic require() in Hot Path',
1657
+ check({ files }) {
1658
+ const findings = [];
1659
+ for (const [fp, c] of files) {
1660
+ if (!isSourceFile(fp)) continue;
1661
+ const lines = c.split('\n');
1662
+ for (let i = 0; i < lines.length; i++) {
1663
+ if (lines[i].match(/\brequire\s*\(/) && (fp.includes('api/') || fp.includes('route') || fp.includes('handler'))) {
1664
+ const fnCtx = lines.slice(Math.max(0, i - 20), i).join('\n');
1665
+ if (fnCtx.match(/async\s+function|=\s*async\s*\(|router\.\w+/)) {
1666
+ findings.push({ ruleId: 'PERF-ASYNC-004', category: 'performance', severity: 'medium',
1667
+ title: 'require() called inside async function/route handler — synchronous disk I/O on every request',
1668
+ description: 'Move require() to the top of the file. Dynamic requires in route handlers block the event loop on first call.', file: fp, line: i + 1, fix: null });
1669
+ break;
1670
+ }
1671
+ }
1672
+ }
1673
+ }
1674
+ return findings;
1675
+ },
1676
+ },
1677
+
1678
+ // PERF-MON-004: No distributed tracing
1679
+ { id: 'PERF-MON-004', category: 'performance', severity: 'medium', confidence: 'likely', title: 'No Distributed Tracing',
1680
+ check({ files, stack }) {
1681
+ const findings = [];
1682
+ if (stack.runtime !== 'node') return findings;
1683
+ const allDeps = { ...stack.dependencies, ...stack.devDependencies };
1684
+ const hasTracing = '@opentelemetry/sdk-node' in allDeps || '@opentelemetry/api' in allDeps ||
1685
+ 'dd-trace' in allDeps || 'jaeger-client' in allDeps || 'zipkin' in allDeps;
1686
+ if (!hasTracing && [...files.values()].some(c => c.match(/fetch\(|axios\.|got\(/))) {
1687
+ findings.push({ ruleId: 'PERF-MON-004', category: 'performance', severity: 'medium',
1688
+ title: 'No distributed tracing configured',
1689
+ description: 'Add OpenTelemetry to trace requests across services. Without tracing, finding slow operations in distributed systems is extremely difficult.', fix: null });
1690
+ }
1691
+ return findings;
1692
+ },
1693
+ },
1694
+
1695
+ // PERF-MON-005: No performance budget
1696
+ { id: 'PERF-MON-005', category: 'performance', severity: 'low', confidence: 'suggestion', title: 'No Performance Budget',
1697
+ check({ files, stack }) {
1698
+ const findings = [];
1699
+ const allDeps = { ...stack.dependencies, ...stack.devDependencies };
1700
+ const hasBudget = 'bundlesize' in allDeps || 'size-limit' in allDeps ||
1701
+ [...files.keys()].some(f => f.includes('lighthouserc') || f.includes('budget'));
1702
+ if (!hasBudget && [...files.keys()].some(f => f.match(/\.(jsx|tsx)$/))) {
1703
+ findings.push({ ruleId: 'PERF-MON-005', category: 'performance', severity: 'low',
1704
+ title: 'No performance budget (Lighthouse CI, size-limit) configured',
1705
+ description: 'Add size-limit or Lighthouse CI to fail builds when bundle size or performance scores regress.', fix: null });
1706
+ }
1707
+ return findings;
1708
+ },
1709
+ },
1710
+
1711
+ // PERF-LOAD-011: No queue for background jobs
1712
+ { id: 'PERF-LOAD-011', category: 'performance', severity: 'high', confidence: 'likely', title: 'No Job Queue for Background Processing',
1713
+ check({ files, stack }) {
1714
+ const findings = [];
1715
+ if (stack.runtime !== 'node') return findings;
1716
+ const allDeps = { ...stack.dependencies, ...stack.devDependencies };
1717
+ const hasQueue = ['bull', 'bullmq', '@bull-board/api', 'bee-queue', 'agenda', 'kue', 'pg-boss'].some(d => d in allDeps);
1718
+ const hasHeavyWork = [...files.values()].some(c =>
1719
+ c.match(/sendEmail|sendMail|generatePDF|processImage|resize|compress|export/i)
1720
+ );
1721
+ if (!hasQueue && hasHeavyWork) {
1722
+ findings.push({ ruleId: 'PERF-LOAD-011', category: 'performance', severity: 'high',
1723
+ title: 'Heavy operations (email/PDF/image) processed synchronously in request handler',
1724
+ description: 'Use a job queue (BullMQ, Agenda) to process heavy operations asynchronously. Respond immediately and process in background.', fix: null });
1725
+ }
1726
+ return findings;
1727
+ },
1728
+ },
1729
+
1730
+ // PERF-LOAD-012: In-memory session with no external store
1731
+ { id: 'PERF-LOAD-012', category: 'performance', severity: 'high', confidence: 'likely', title: 'In-Memory Session Store',
1732
+ check({ files, stack }) {
1733
+ const findings = [];
1734
+ const allDeps = { ...stack.dependencies, ...stack.devDependencies };
1735
+ const hasSession = 'express-session' in allDeps;
1736
+ const hasExternalStore = ['connect-redis', 'connect-mongo', 'connect-pg-simple'].some(d => d in allDeps);
1737
+ if (hasSession && !hasExternalStore) {
1738
+ findings.push({ ruleId: 'PERF-LOAD-012', category: 'performance', severity: 'high',
1739
+ title: 'express-session using in-memory store — sessions lost on restart, not scalable',
1740
+ description: 'Use connect-redis or connect-pg-simple to store sessions externally. In-memory sessions prevent horizontal scaling and are lost on restart.', fix: null });
1741
+ }
1742
+ return findings;
1743
+ },
1744
+ },
1745
+
1746
+ // PERF-DB-014: Missing composite index for common query patterns
1747
+ { id: 'PERF-DB-014', category: 'performance', severity: 'medium', confidence: 'likely', title: 'No Composite Index for Multi-Column Queries',
1748
+ check({ files }) {
1749
+ const findings = [];
1750
+ for (const [fp, c] of files) {
1751
+ if (!isSourceFile(fp)) continue;
1752
+ const lines = c.split('\n');
1753
+ for (let i = 0; i < lines.length; i++) {
1754
+ if (lines[i].match(/WHERE\s+\w+\s*=.*AND\s+\w+\s*=/i)) {
1755
+ const nearbySchema = c.match(/index|Index|INDEX/);
1756
+ if (!nearbySchema) {
1757
+ findings.push({ ruleId: 'PERF-DB-014', category: 'performance', severity: 'medium', title: 'Multi-column WHERE clause without composite index', description: 'Create a composite index for the columns used together in WHERE clauses. ORDER: put equality columns first, then range columns.', file: fp, line: i + 1, fix: null });
1758
+ }
1759
+ }
1760
+ }
1761
+ }
1762
+ return findings;
1763
+ },
1764
+ },
1765
+
1766
+ // PERF-DB-015: Full text search on LIKE
1767
+ { id: 'PERF-DB-015', category: 'performance', severity: 'high', confidence: 'likely', title: 'Full Text Search Using LIKE — Use FTS Index',
1768
+ check({ files }) {
1769
+ const findings = [];
1770
+ for (const [fp, c] of files) {
1771
+ if (!isSourceFile(fp)) continue;
1772
+ const lines = c.split('\n');
1773
+ for (let i = 0; i < lines.length; i++) {
1774
+ if (lines[i].match(/LIKE\s+['"]%.*%['"]/i)) {
1775
+ findings.push({ ruleId: 'PERF-DB-015', category: 'performance', severity: 'high', title: 'Full text search using LIKE %term% — full table scan, use FTS index', description: 'Use PostgreSQL full-text search (tsvector/tsquery), MySQL FULLTEXT index, or Elasticsearch. LIKE %term% cannot use indexes.', file: fp, line: i + 1, fix: null });
1776
+ }
1777
+ }
1778
+ }
1779
+ return findings;
1780
+ },
1781
+ },
1782
+
1783
+ // PERF-CACHE-006: No cache warming strategy
1784
+ { id: 'PERF-CACHE-006', category: 'performance', severity: 'low', confidence: 'suggestion', title: 'No Cache Warming Strategy',
1785
+ check({ files, stack }) {
1786
+ const findings = [];
1787
+ const allDeps = { ...stack.dependencies, ...stack.devDependencies };
1788
+ const hasCache = ['redis', 'ioredis', 'memcached', 'node-cache'].some(d => d in allDeps);
1789
+ const hasWarm = [...files.values()].some(c => c.match(/warm.*cache|cache.*warm|preload|prefill|prewarm/i));
1790
+ if (hasCache && !hasWarm) {
1791
+ findings.push({ ruleId: 'PERF-CACHE-006', category: 'performance', severity: 'low', title: 'Cache configured without warming strategy — cold start causes latency spikes', description: 'Pre-populate cache on startup or after deploy. Cold caches cause latency spikes after deployments and cache flushes.', fix: null });
1792
+ }
1793
+ return findings;
1794
+ },
1795
+ },
1796
+
1797
+ // PERF-CACHE-007: Cache key not namespaced
1798
+ { id: 'PERF-CACHE-007', category: 'performance', severity: 'medium', confidence: 'likely', title: 'Cache Keys Not Namespaced',
1799
+ check({ files }) {
1800
+ const findings = [];
1801
+ for (const [fp, c] of files) {
1802
+ if (!isSourceFile(fp)) continue;
1803
+ const lines = c.split('\n');
1804
+ for (let i = 0; i < lines.length; i++) {
1805
+ if (lines[i].match(/redis\.set\(['"](?!app:|api:|user:|session:)\w{1,20}['"]/i) || lines[i].match(/cache\.set\(['"](?!app:|api:|user:|session:)\w{1,20}['"]/i)) {
1806
+ findings.push({ ruleId: 'PERF-CACHE-007', category: 'performance', severity: 'medium', title: 'Cache key without namespace prefix — key collisions between services', description: "Prefix cache keys with service/version: 'user:v2:123'. Unnamespaced keys collide between microservices and versions.", file: fp, line: i + 1, fix: null });
1807
+ }
1808
+ }
1809
+ }
1810
+ return findings;
1811
+ },
1812
+ },
1813
+
1814
+ // PERF-API-008: Returning full objects in list endpoints
1815
+ { id: 'PERF-API-008', category: 'performance', severity: 'medium', confidence: 'likely', title: 'List Endpoint Returns Full Objects',
1816
+ check({ files }) {
1817
+ const findings = [];
1818
+ for (const [fp, c] of files) {
1819
+ if (!isSourceFile(fp)) continue;
1820
+ const lines = c.split('\n');
1821
+ for (let i = 0; i < lines.length; i++) {
1822
+ if (lines[i].match(/router\.get\(['"]\/\w+s['"]/i)) {
1823
+ const handler = lines.slice(i, i + 20).join('\n');
1824
+ if (handler.match(/find\(\)|findAll\(\)|findMany\(\)/) && !handler.match(/select|attributes|projection/)) {
1825
+ findings.push({ ruleId: 'PERF-API-008', category: 'performance', severity: 'medium', title: 'List endpoint returns full objects — consider field projection', description: 'Return only the fields needed by the UI. List APIs with full objects transfer 10-100x more data than necessary.', file: fp, line: i + 1, fix: null });
1826
+ }
1827
+ }
1828
+ }
1829
+ }
1830
+ return findings;
1831
+ },
1832
+ },
1833
+
1834
+ // PERF-API-009: No HTTP/2 push or server-sent events for real-time
1835
+ { id: 'PERF-API-009', category: 'performance', severity: 'low', confidence: 'suggestion', title: 'Polling Instead of Server-Sent Events',
1836
+ check({ files }) {
1837
+ const findings = [];
1838
+ for (const [fp, c] of files) {
1839
+ if (!isSourceFile(fp)) continue;
1840
+ const lines = c.split('\n');
1841
+ for (let i = 0; i < lines.length; i++) {
1842
+ if (lines[i].match(/setInterval\s*\(.*fetch|polling.*interval|poll.*\d+000/i)) {
1843
+ findings.push({ ruleId: 'PERF-API-009', category: 'performance', severity: 'low', title: 'Client polling detected — use Server-Sent Events or WebSocket', description: 'Replace frequent polling with SSE (EventSource) or WebSocket. Polling wastes bandwidth; SSE/WebSocket only send data when it changes.', file: fp, line: i + 1, fix: null });
1844
+ }
1845
+ }
1846
+ }
1847
+ return findings;
1848
+ },
1849
+ },
1850
+
1851
+ // PERF-FE-009: No lazy loading for images
1852
+ { id: 'PERF-FE-009', category: 'performance', severity: 'medium', confidence: 'likely', title: 'Images Without Lazy Loading',
1853
+ check({ files }) {
1854
+ const findings = [];
1855
+ for (const [fp, c] of files) {
1856
+ if (!fp.match(/\.(html|jsx|tsx|vue)$/)) continue;
1857
+ const lines = c.split('\n');
1858
+ let imgCount = 0, lazyCount = 0;
1859
+ for (const line of lines) {
1860
+ if (line.match(/<img\s/i)) { imgCount++; if (line.match(/loading=["']lazy|lazy/i)) lazyCount++; }
1861
+ }
1862
+ if (imgCount > 3 && lazyCount === 0) {
1863
+ findings.push({ ruleId: 'PERF-FE-009', category: 'performance', severity: 'medium', title: `${imgCount} images without loading="lazy" — all loaded on page load`, description: 'Add loading="lazy" to images below the fold. Lazy loading reduces initial page weight and speeds first contentful paint.', file: fp, fix: null });
1864
+ }
1865
+ }
1866
+ return findings;
1867
+ },
1868
+ },
1869
+
1870
+ // PERF-FE-010: Large unoptimized SVG files inline
1871
+ { id: 'PERF-FE-010', category: 'performance', severity: 'low', confidence: 'suggestion', title: 'Large Inline SVG Without Optimization',
1872
+ check({ files }) {
1873
+ const findings = [];
1874
+ for (const [fp, c] of files) {
1875
+ if (!fp.endsWith('.svg') && !fp.match(/\.(jsx|tsx|vue|html)$/)) continue;
1876
+ const svgSize = (c.match(/<svg/gi) || []).length;
1877
+ const hasOptimizer = c.match(/svgo|@svgr\/webpack|svgr/i);
1878
+ if (fp.endsWith('.svg') && c.length > 10000 && !hasOptimizer) {
1879
+ findings.push({ ruleId: 'PERF-FE-010', category: 'performance', severity: 'low', title: `SVG file ${Math.round(c.length / 1024)}KB — run through SVGO optimizer`, description: 'Run SVGs through SVGO. Unoptimized SVGs often contain redundant metadata that SVGO removes, reducing size 50-80%.', file: fp, fix: null });
1880
+ }
1881
+ }
1882
+ return findings;
1883
+ },
1884
+ },
1885
+
1886
+ // PERF-FE-011: No resource hints for critical third-party domains
1887
+ { id: 'PERF-FE-011', category: 'performance', severity: 'medium', confidence: 'likely', title: 'No Resource Hints for Critical Resources',
1888
+ check({ files }) {
1889
+ const findings = [];
1890
+ for (const [fp, c] of files) {
1891
+ if (!fp.match(/\.(html|jsx|tsx)$/) && !fp.match(/document\.(tsx|jsx|js)/)) continue;
1892
+ const hasThirdParty = c.match(/fonts\.googleapis\.com|cdn\.|cdnjs\.|unpkg\.|jsdelivr\./i);
1893
+ const hasPreconnect = c.match(/rel=["']preconnect|rel=["']dns-prefetch/i);
1894
+ if (hasThirdParty && !hasPreconnect) {
1895
+ findings.push({ ruleId: 'PERF-FE-011', category: 'performance', severity: 'medium', title: 'Third-party resources without preconnect hints — DNS lookup delays first render', description: 'Add <link rel="preconnect" href="https://fonts.googleapis.com">. Preconnect eliminates DNS + TCP + TLS handshake time for critical third-party resources.', file: fp, fix: null });
1896
+ }
1897
+ }
1898
+ return findings;
1899
+ },
1900
+ },
1901
+
1902
+ // PERF-MEM-007: Buffer.concat in loop
1903
+ { id: 'PERF-MEM-007', category: 'performance', severity: 'medium', confidence: 'likely', title: 'Buffer.concat or Array.concat in Loop',
1904
+ check({ files }) {
1905
+ const findings = [];
1906
+ for (const [fp, c] of files) {
1907
+ if (!isSourceFile(fp)) continue;
1908
+ const lines = c.split('\n');
1909
+ for (let i = 0; i < lines.length; i++) {
1910
+ if (lines[i].match(/for\s*\(|while\s*\(|forEach|\.map\s*\(/)) {
1911
+ const block = lines.slice(i, i + 10).join('\n');
1912
+ if (block.match(/Buffer\.concat|\.concat\(/)) {
1913
+ findings.push({ ruleId: 'PERF-MEM-007', category: 'performance', severity: 'medium', title: 'Buffer.concat or Array.concat inside loop — O(n²) memory allocations', description: 'Collect items in array, concat once outside loop: Buffer.concat(chunks). Concat inside loops reallocates the entire buffer each iteration.', file: fp, line: i + 1, fix: null });
1914
+ }
1915
+ }
1916
+ }
1917
+ }
1918
+ return findings;
1919
+ },
1920
+ },
1921
+
1922
+ // PERF-MEM-008: Large object stored in session
1923
+ { id: 'PERF-MEM-008', category: 'performance', severity: 'medium', confidence: 'likely', title: 'Large Object Stored in Session',
1924
+ check({ files }) {
1925
+ const findings = [];
1926
+ for (const [fp, c] of files) {
1927
+ if (!isSourceFile(fp)) continue;
1928
+ const lines = c.split('\n');
1929
+ for (let i = 0; i < lines.length; i++) {
1930
+ if (lines[i].match(/req\.session\.\w+\s*=/) && !lines[i].match(/req\.session\.userId|req\.session\.id|req\.session\.token/)) {
1931
+ findings.push({ ruleId: 'PERF-MEM-008', category: 'performance', severity: 'medium', title: 'Large object stored in session — bloats session storage, slows serialization', description: 'Store only user ID in session; fetch full data as needed. Large session objects increase Redis memory usage and serialization time.', file: fp, line: i + 1, fix: null });
1932
+ }
1933
+ }
1934
+ }
1935
+ return findings;
1936
+ },
1937
+ },
1938
+
1939
+ // PERF-ASYNC-005: Not using worker threads for CPU-bound tasks
1940
+ { id: 'PERF-ASYNC-005', category: 'performance', severity: 'medium', confidence: 'likely', title: 'CPU-Intensive Code Without Worker Threads',
1941
+ check({ files }) {
1942
+ const findings = [];
1943
+ for (const [fp, c] of files) {
1944
+ if (!isSourceFile(fp)) continue;
1945
+ const lines = c.split('\n');
1946
+ for (let i = 0; i < lines.length; i++) {
1947
+ if (lines[i].match(/imagemagick|sharp\.\w+|ffmpeg|pdf.*generate|xlsx.*create|csv.*parse/i) && !lines[i].match(/worker_threads|Worker\(|workerPool/i)) {
1948
+ findings.push({ ruleId: 'PERF-ASYNC-005', category: 'performance', severity: 'medium', title: 'CPU-intensive operation (image/PDF/CSV) in main thread — blocks event loop', description: 'Offload to worker_threads or a job queue (Bull, BullMQ). CPU work in the main thread blocks all concurrent HTTP requests.', file: fp, line: i + 1, fix: null });
1949
+ }
1950
+ }
1951
+ }
1952
+ return findings;
1953
+ },
1954
+ },
1955
+
1956
+ // PERF-ASYNC-006: No request batching for GraphQL
1957
+ { id: 'PERF-ASYNC-006', category: 'performance', severity: 'medium', confidence: 'definite', title: 'GraphQL Without DataLoader (N+1)',
1958
+ check({ files, stack }) {
1959
+ const findings = [];
1960
+ const allDeps = { ...stack.dependencies, ...stack.devDependencies };
1961
+ const hasGraphQL = 'graphql' in allDeps || '@apollo/server' in allDeps || 'apollo-server' in allDeps;
1962
+ const hasDataLoader = 'dataloader' in allDeps;
1963
+ if (hasGraphQL && !hasDataLoader) {
1964
+ findings.push({ ruleId: 'PERF-ASYNC-006', category: 'performance', severity: 'medium', title: 'GraphQL without DataLoader — N+1 query problem in resolvers', description: 'Use DataLoader for batching and caching database calls in GraphQL resolvers. Without it, each resolved field may trigger a separate DB query.', fix: null });
1965
+ }
1966
+ return findings;
1967
+ },
1968
+ },
1969
+
1970
+ // PERF-MON-006: No performance benchmarks
1971
+ { id: 'PERF-MON-006', category: 'performance', severity: 'low', confidence: 'suggestion', title: 'No Performance Benchmarks in CI',
1972
+ check({ files }) {
1973
+ const findings = [];
1974
+ const hasBench = [...files.values()].some(c => c.match(/benchmark|bench\b|autocannon|k6|artillery|wrk|ab\s+-n/i)) || [...files.keys()].some(f => f.match(/bench|perf/i));
1975
+ if (!hasBench) {
1976
+ findings.push({ ruleId: 'PERF-MON-006', category: 'performance', severity: 'low', title: 'No performance benchmarks — regressions not automatically detected', description: 'Add benchmarks with autocannon or k6. CI benchmarks catch performance regressions before they reach production.', fix: null });
1977
+ }
1978
+ return findings;
1979
+ },
1980
+ },
1981
+
1982
+ // PERF-MON-007: No database slow query log
1983
+ { id: 'PERF-MON-007', category: 'performance', severity: 'medium', confidence: 'likely', title: 'No Database Slow Query Monitoring',
1984
+ check({ files }) {
1985
+ const findings = [];
1986
+ const allCode = [...files.values()].join('\n');
1987
+ const hasSlowQuery = allCode.match(/slow.*query|query.*time|slowQueryThreshold|mongoose.*debug|sequelize.*logging|knex.*debug/i);
1988
+ if (!hasSlowQuery) {
1989
+ findings.push({ ruleId: 'PERF-MON-007', category: 'performance', severity: 'medium', title: 'No slow query logging configured — slow queries go undetected', description: 'Enable slow query logs: mongoose.set("debug", true), or configure slow_query_log in MySQL/PostgreSQL. Slow queries are the most common performance bottleneck.', fix: null });
1990
+ }
1991
+ return findings;
1992
+ },
1993
+ },
1994
+
1995
+ // PERF-MON-008: No Core Web Vitals monitoring
1996
+ { id: 'PERF-MON-008', category: 'performance', severity: 'medium', confidence: 'likely', title: 'No Core Web Vitals Monitoring',
1997
+ check({ files, stack }) {
1998
+ const findings = [];
1999
+ const allDeps = { ...stack.dependencies, ...stack.devDependencies };
2000
+ const hasFrontend = [...files.keys()].some(f => f.match(/\.(jsx|tsx|vue|svelte)$/));
2001
+ const hasCWV = 'web-vitals' in allDeps || [...files.values()].some(c => c.match(/web-vitals|getCLS|getLCP|getFID|getFCP|getTTFB/i));
2002
+ if (hasFrontend && !hasCWV) {
2003
+ findings.push({ ruleId: 'PERF-MON-008', category: 'performance', severity: 'medium', title: 'No Core Web Vitals monitoring — LCP, CLS, FID not tracked', description: 'Add web-vitals library and report to your analytics. Core Web Vitals (LCP <2.5s, CLS <0.1, FID <100ms) directly affect Google search ranking.', fix: null });
2004
+ }
2005
+ return findings;
2006
+ },
2007
+ },
2008
+
2009
+ // PERF-LOAD-013: No queue for background jobs
2010
+ { id: 'PERF-LOAD-013', category: 'performance', severity: 'high', confidence: 'likely', title: 'Long Operations Executed Synchronously in Request',
2011
+ check({ files }) {
2012
+ const findings = [];
2013
+ for (const [fp, c] of files) {
2014
+ if (!isSourceFile(fp)) continue;
2015
+ const lines = c.split('\n');
2016
+ for (let i = 0; i < lines.length; i++) {
2017
+ if (lines[i].match(/app\.(post|put|patch)\s*\(['"]/i)) {
2018
+ const handler = lines.slice(i, i + 30).join('\n');
2019
+ if (handler.match(/sendEmail|sharp\.|pdf.*generate|resize|convert|render.*template/i) && !handler.match(/queue|bull|bee-queue|kue|agenda/i)) {
2020
+ findings.push({ ruleId: 'PERF-LOAD-013', category: 'performance', severity: 'high', title: 'Long-running operation (email/PDF/image) in request handler — delays response', description: 'Queue slow operations with Bull or BullMQ. Return 202 Accepted and process asynchronously. Synchronous processing blocks the response and wastes connections.', file: fp, line: i + 1, fix: null });
2021
+ }
2022
+ }
2023
+ }
2024
+ }
2025
+ return findings;
2026
+ },
2027
+ },
2028
+
2029
+ // PERF-LOAD-014: WebSocket without heartbeat
2030
+ { id: 'PERF-LOAD-014', category: 'performance', severity: 'medium', confidence: 'likely', title: 'WebSocket Without Heartbeat/Ping',
2031
+ check({ files }) {
2032
+ const findings = [];
2033
+ for (const [fp, c] of files) {
2034
+ if (!isSourceFile(fp)) continue;
2035
+ if (c.match(/new WebSocket|WebSocketServer|io\.on\('connection'\)/i)) {
2036
+ if (!c.match(/ping|pong|heartbeat|keepAlive|pingInterval/i)) {
2037
+ findings.push({ ruleId: 'PERF-LOAD-014', category: 'performance', severity: 'medium', title: 'WebSocket without ping/pong heartbeat — dead connections accumulate', description: 'Implement ping/pong heartbeats (pingInterval: 30000). Without them, dead connections are not detected and accumulate, exhausting server capacity.', file: fp, fix: null });
2038
+ }
2039
+ }
2040
+ }
2041
+ return findings;
2042
+ },
2043
+ },
2044
+
2045
+ // PERF-DB-016: Using ORM without eager loading (lazy loaded N+1)
2046
+ { id: 'PERF-DB-016', category: 'performance', severity: 'high', confidence: 'likely', title: 'ORM Without Eager Loading Configured',
2047
+ check({ files }) {
2048
+ const findings = [];
2049
+ for (const [fp, c] of files) {
2050
+ if (!isSourceFile(fp)) continue;
2051
+ const lines = c.split('\n');
2052
+ for (let i = 0; i < lines.length; i++) {
2053
+ if (lines[i].match(/\.findAll\(\)|\.findMany\(\)|\.find\(\)/i)) {
2054
+ if (!lines[i].match(/include|populate|eager|join|with/i)) {
2055
+ const handlerLines = lines.slice(i + 1, i + 15).join('\n');
2056
+ if (handlerLines.match(/await.*\.get|await.*\.\w+\(\)|for.*of.*result/i)) {
2057
+ findings.push({ ruleId: 'PERF-DB-016', category: 'performance', severity: 'high', title: 'ORM query without include/populate — N+1 queries on related records', description: 'Use include/populate to eagerly load relations: findMany({ include: { posts: true } }). Lazy loading N relations = N+1 queries.', file: fp, line: i + 1, fix: null });
2058
+ }
2059
+ }
2060
+ }
2061
+ }
2062
+ }
2063
+ return findings;
2064
+ },
2065
+ },
2066
+
2067
+ // PERF-DB-017: Synchronous Sequelize operation
2068
+ { id: 'PERF-DB-017', category: 'performance', severity: 'high', confidence: 'likely', title: 'Missing await on ORM Operation',
2069
+ check({ files }) {
2070
+ const findings = [];
2071
+ for (const [fp, c] of files) {
2072
+ if (!isSourceFile(fp)) continue;
2073
+ const lines = c.split('\n');
2074
+ for (let i = 0; i < lines.length; i++) {
2075
+ if (lines[i].match(/User\.\w+\(|Post\.\w+\(|Product\.\w+\(/) && !lines[i].match(/await |\.then\(|return /i)) {
2076
+ if (lines[i].match(/\.findAll|\.findOne|\.create|\.update|\.destroy|\.findByPk/i)) {
2077
+ findings.push({ ruleId: 'PERF-DB-017', category: 'performance', severity: 'high', title: 'ORM method called without await — returns Promise, not data', description: 'Add await before ORM calls. Without await, you get a Promise object, not the actual data, causing undefined errors downstream.', file: fp, line: i + 1, fix: null });
2078
+ }
2079
+ }
2080
+ }
2081
+ }
2082
+ return findings;
2083
+ },
2084
+ },
2085
+
2086
+ // PERF-CACHE-008: No stale-while-revalidate pattern
2087
+ { id: 'PERF-CACHE-008', category: 'performance', severity: 'low', confidence: 'suggestion', title: 'Cache Without Stale-While-Revalidate',
2088
+ check({ files }) {
2089
+ const findings = [];
2090
+ const hasCache = [...files.values()].some(c => c.match(/redis|cache|memcached/i));
2091
+ const hasSWR = [...files.values()].some(c => c.match(/stale-while-revalidate|swr\b|staleWhileRevalidate|cache.*revalidate/i));
2092
+ if (hasCache && !hasSWR) {
2093
+ findings.push({ ruleId: 'PERF-CACHE-008', category: 'performance', severity: 'low', title: 'Cache without stale-while-revalidate — cache misses cause latency spikes', description: 'Implement stale-while-revalidate: serve stale data immediately, refresh in background. Eliminates cache miss latency while keeping data fresh.', fix: null });
2094
+ }
2095
+ return findings;
2096
+ },
2097
+ },
2098
+
2099
+ // PERF-FE-012: Third-party scripts blocking render
2100
+ { id: 'PERF-FE-012', category: 'performance', severity: 'high', confidence: 'likely', title: 'Third-Party Scripts Without async/defer',
2101
+ check({ files }) {
2102
+ const findings = [];
2103
+ for (const [fp, c] of files) {
2104
+ if (!fp.match(/\.(html)$/)) continue;
2105
+ const lines = c.split('\n');
2106
+ for (let i = 0; i < lines.length; i++) {
2107
+ if (lines[i].match(/<script[^>]+src=["']https?:\/\//i) && !lines[i].match(/async|defer/i)) {
2108
+ findings.push({ ruleId: 'PERF-FE-012', category: 'performance', severity: 'high', title: 'Third-party script without async/defer — blocks HTML parsing', description: 'Add async or defer attribute. Synchronous third-party scripts (analytics, ads) block page rendering and inflate Time to Interactive.', file: fp, line: i + 1, fix: null });
2109
+ }
2110
+ }
2111
+ }
2112
+ return findings;
2113
+ },
2114
+ },
2115
+
2116
+ // PERF-FE-013: No browser caching for static assets
2117
+ { id: 'PERF-FE-013', category: 'performance', severity: 'medium', confidence: 'likely', title: 'Static Assets Without Long-Term Caching',
2118
+ check({ files }) {
2119
+ const findings = [];
2120
+ const hasCacheHeaders = [...files.values()].some(c => c.match(/Cache-Control.*max-age|immutable|public.*max-age|stale-while-revalidate/i));
2121
+ const hasStaticFiles = [...files.values()].some(c => c.match(/express\.static|serve-static|static.*middleware/i));
2122
+ if (hasStaticFiles && !hasCacheHeaders) {
2123
+ findings.push({ ruleId: 'PERF-FE-013', category: 'performance', severity: 'medium', title: 'Static file serving without Cache-Control max-age headers', description: 'Set Cache-Control: public, max-age=31536000, immutable for hashed assets. Content-hashed filenames allow permanent caching.', fix: null });
2124
+ }
2125
+ return findings;
2126
+ },
2127
+ },
2128
+
2129
+ // PERF-API-010: Synchronous crypto operations
2130
+ { id: 'PERF-API-010', category: 'performance', severity: 'medium', confidence: 'likely', title: 'Synchronous bcrypt/Crypto in Request Handler',
2131
+ check({ files }) {
2132
+ const findings = [];
2133
+ for (const [fp, c] of files) {
2134
+ if (!isSourceFile(fp)) continue;
2135
+ const lines = c.split('\n');
2136
+ for (let i = 0; i < lines.length; i++) {
2137
+ if (lines[i].match(/bcrypt\.hashSync|bcrypt\.compareSync|pbkdf2Sync|scryptSync/)) {
2138
+ findings.push({ ruleId: 'PERF-API-010', category: 'performance', severity: 'medium', title: 'Synchronous bcrypt/crypto in request handler — blocks event loop', description: 'Use async: await bcrypt.hash(password, 10). bcryptSync blocks the Node.js event loop for 50-300ms, stalling all concurrent requests.', file: fp, line: i + 1, fix: null });
2139
+ }
2140
+ }
2141
+ }
2142
+ return findings;
2143
+ },
2144
+ },
2145
+
2146
+ // PERF-API-011: No response caching for GET endpoints
2147
+ { id: 'PERF-API-011', category: 'performance', severity: 'medium', confidence: 'likely', title: 'GET Endpoints Without Response Caching',
2148
+ check({ files }) {
2149
+ const findings = [];
2150
+ for (const [fp, c] of files) {
2151
+ if (!isSourceFile(fp)) continue;
2152
+ const lines = c.split('\n');
2153
+ let getCount = 0, cacheCount = 0;
2154
+ for (let i = 0; i < lines.length; i++) {
2155
+ if (lines[i].match(/router\.get|app\.get/i)) getCount++;
2156
+ if (lines[i].match(/Cache-Control|cache\.set|redis\.set|apicache|cache-manager/i)) cacheCount++;
2157
+ }
2158
+ if (getCount > 5 && cacheCount === 0) {
2159
+ findings.push({ ruleId: 'PERF-API-011', category: 'performance', severity: 'medium', title: `${getCount} GET routes with no response caching — repeated identical queries hit database`, description: 'Cache GET responses: res.set("Cache-Control", "public, max-age=60"). Even 60-second caching dramatically reduces database load.', file: fp, fix: null });
2160
+ }
2161
+ }
2162
+ return findings;
2163
+ },
2164
+ },
2165
+
2166
+ // PERF-MEM-009: No object pooling for frequently created objects
2167
+ { id: 'PERF-MEM-009', category: 'performance', severity: 'low', confidence: 'suggestion', title: 'No Object Pooling for High-Frequency Objects',
2168
+ check({ files }) {
2169
+ const findings = [];
2170
+ for (const [fp, c] of files) {
2171
+ if (!isSourceFile(fp)) continue;
2172
+ const lines = c.split('\n');
2173
+ for (let i = 0; i < lines.length; i++) {
2174
+ if (lines[i].match(/new\s+(?:Buffer|Uint8Array|ArrayBuffer)\s*\(\d+\)/) && !lines[i].match(/\/\//)) {
2175
+ const nearby = lines.slice(Math.max(0, i - 3), i + 3).join('\n');
2176
+ if (nearby.match(/forEach|map\s*\(|for\s*\(/)) {
2177
+ findings.push({ ruleId: 'PERF-MEM-009', category: 'performance', severity: 'low', title: 'New Buffer/TypedArray allocated in loop — consider object pooling', description: 'Pre-allocate buffers and reuse them. Creating new Buffers/ArrayBuffers in tight loops causes GC pressure.', file: fp, line: i + 1, fix: null });
2178
+ }
2179
+ }
2180
+ }
2181
+ }
2182
+ return findings;
2183
+ },
2184
+ },
2185
+
2186
+ // PERF-ASYNC-007: Blocking the event loop with complex array operations
2187
+ { id: 'PERF-ASYNC-007', category: 'performance', severity: 'medium', confidence: 'likely', title: 'Large Array Operations Blocking Event Loop',
2188
+ check({ files }) {
2189
+ const findings = [];
2190
+ for (const [fp, c] of files) {
2191
+ if (!isSourceFile(fp)) continue;
2192
+ const lines = c.split('\n');
2193
+ for (let i = 0; i < lines.length; i++) {
2194
+ if (lines[i].match(/\.sort\(|\.filter\(.*\.map\(|\.reduce\(/)) {
2195
+ const block = lines.slice(Math.max(0, i - 3), i + 3).join('\n');
2196
+ if (block.match(/\.length\s*>\s*(?:1000|10000|100000)|items\s*=|records\s*=|results\s*=/)) {
2197
+ findings.push({ ruleId: 'PERF-ASYNC-007', category: 'performance', severity: 'medium', title: 'Large array transformation without chunking — may block event loop', description: 'For arrays >10k items, chunk processing: split into batches with setImmediate() between chunks. Large synchronous operations starve the event loop.', file: fp, line: i + 1, fix: null });
2198
+ }
2199
+ }
2200
+ }
2201
+ }
2202
+ return findings;
2203
+ },
2204
+ },
2205
+
2206
+ // PERF-DB-018: No read replica for heavy read workloads
2207
+ { id: 'PERF-DB-018', category: 'performance', severity: 'medium', confidence: 'likely', title: 'No Read Replica for Heavy Read Workloads',
2208
+ check({ files }) {
2209
+ const findings = [];
2210
+ for (const [fp, c] of files) {
2211
+ if (!isSourceFile(fp)) continue;
2212
+ const lines = c.split('\n');
2213
+ let readCount = 0, readReplicaUsed = false;
2214
+ for (const line of lines) {
2215
+ if (line.match(/\.find\(|\.findAll\(|\.findMany\(|SELECT/i)) readCount++;
2216
+ if (line.match(/readReplica|read_replica|replicaUrl|slaveDb|readDb|slave/i)) readReplicaUsed = true;
2217
+ }
2218
+ if (readCount > 20 && !readReplicaUsed) {
2219
+ findings.push({ ruleId: 'PERF-DB-018', category: 'performance', severity: 'medium', title: `${readCount} read queries without read replica — all reads hit primary database`, description: 'Route read queries to a read replica. Heavy read traffic on the primary database competes with writes and increases write latency.', file: fp, fix: null });
2220
+ }
2221
+ }
2222
+ return findings;
2223
+ },
2224
+ },
2225
+
2226
+ // PERF-CACHE-009: Repeated identical API calls without deduplication
2227
+ { id: 'PERF-CACHE-009', category: 'performance', severity: 'medium', confidence: 'likely', title: 'No Request Deduplication for Concurrent Identical Requests',
2228
+ check({ files, stack }) {
2229
+ const findings = [];
2230
+ const allDeps = { ...stack.dependencies, ...stack.devDependencies };
2231
+ const hasDedup = ['dataloader', 'p-memoize', 'async-memoize', 'memoize-one'].some(d => d in allDeps);
2232
+ const hasExternalCalls = [...files.values()].some(c => c.match(/axios\.|fetch\(|got\./i));
2233
+ if (hasExternalCalls && !hasDedup) {
2234
+ findings.push({ ruleId: 'PERF-CACHE-009', category: 'performance', severity: 'medium', title: 'No request deduplication for concurrent identical API calls', description: 'Use DataLoader or p-memoize to deduplicate concurrent identical requests. Without deduplication, 100 concurrent users trigger 100 identical external API calls.', fix: null });
2235
+ }
2236
+ return findings;
2237
+ },
2238
+ },
2239
+
2240
+ // PERF-FE-014: No CSS purging for large CSS frameworks
2241
+ { id: 'PERF-FE-014', category: 'performance', severity: 'medium', confidence: 'likely', title: 'CSS Framework Without Purging Unused Styles',
2242
+ check({ files, stack }) {
2243
+ const findings = [];
2244
+ const allDeps = { ...stack.dependencies, ...stack.devDependencies };
2245
+ const hasLargeCSS = ['bootstrap', 'material-ui', '@mui/material', 'antd', 'bulma', 'foundation-sites'].some(d => d in allDeps);
2246
+ const hasPurge = [...files.values()].some(c => c.match(/purge|purgecss|content.*tailwind|JIT|usedExports/i));
2247
+ if (hasLargeCSS && !hasPurge) {
2248
+ findings.push({ ruleId: 'PERF-FE-014', category: 'performance', severity: 'medium', title: 'CSS framework imported without unused CSS purging', description: 'Configure PurgeCSS or tree-shaking. Full Bootstrap/Material-UI is 200-400KB. With purging, only used components are included.', fix: null });
2249
+ }
2250
+ return findings;
2251
+ },
2252
+ },
2253
+
2254
+ // PERF-API-012: Synchronous email sending in request
2255
+ { id: 'PERF-API-012', category: 'performance', severity: 'high', confidence: 'likely', title: 'Synchronous Email Sending in HTTP Request',
2256
+ check({ files }) {
2257
+ const findings = [];
2258
+ for (const [fp, c] of files) {
2259
+ if (!isSourceFile(fp)) continue;
2260
+ const lines = c.split('\n');
2261
+ for (let i = 0; i < lines.length; i++) {
2262
+ if (lines[i].match(/await.*sendMail|await.*sendEmail|await.*mailer\./i)) {
2263
+ const context = lines.slice(Math.max(0, i - 10), i).join('\n');
2264
+ if (context.match(/router\.|app\.(post|put|patch)|exports\.handler/i)) {
2265
+ findings.push({ ruleId: 'PERF-API-012', category: 'performance', severity: 'high', title: 'Email sent synchronously in HTTP handler — adds 200-3000ms to response time', description: 'Queue emails: emailQueue.add({ to, subject, body }). Return 202 Accepted immediately. Synchronous email sending holds HTTP connections and delays responses.', file: fp, line: i + 1, fix: null });
2266
+ }
2267
+ }
2268
+ }
2269
+ }
2270
+ return findings;
2271
+ },
2272
+ },
2273
+
2274
+ // PERF-MON-009: No alerting on high latency
2275
+ { id: 'PERF-MON-009', category: 'performance', severity: 'medium', confidence: 'likely', title: 'No Latency SLA Alerting',
2276
+ check({ files }) {
2277
+ const findings = [];
2278
+ const allCode = [...files.values()].join('\n');
2279
+ const hasAlert = allCode.match(/latency.*alert|p99.*alert|response.*time.*threshold|sla.*alert|duration.*alarm/i);
2280
+ const hasMetrics = allCode.match(/prom-client|datadog|cloudwatch|newrelic/i);
2281
+ if (hasMetrics && !hasAlert) {
2282
+ findings.push({ ruleId: 'PERF-MON-009', category: 'performance', severity: 'medium', title: 'Metrics configured without latency SLA alerting', description: 'Alert on P99 response time > 500ms. Without latency alerts, gradual performance degradation goes undetected until users complain.', fix: null });
2283
+ }
2284
+ return findings;
2285
+ },
2286
+ },
2287
+
2288
+ // PERF-LOAD-015: No pagination default limit
2289
+ { id: 'PERF-LOAD-015', category: 'performance', severity: 'high', confidence: 'likely', title: 'List API Without Maximum Page Size Limit',
2290
+ check({ files }) {
2291
+ const findings = [];
2292
+ for (const [fp, c] of files) {
2293
+ if (!isSourceFile(fp)) continue;
2294
+ const lines = c.split('\n');
2295
+ for (let i = 0; i < lines.length; i++) {
2296
+ if (lines[i].match(/limit.*req\.query|req\.query.*limit/i)) {
2297
+ const nearby = lines.slice(i, i + 5).join('\n');
2298
+ if (!nearby.match(/Math\.min|maxLimit|MAX_LIMIT|> \d+|Math\.max/)) {
2299
+ findings.push({ ruleId: 'PERF-LOAD-015', category: 'performance', severity: 'high', title: 'Pagination limit from user query without maximum cap', description: 'Cap limit: const limit = Math.min(parseInt(req.query.limit) || 20, 100). Without a max, users can request limit=100000 and load millions of rows.', file: fp, line: i + 1, fix: null });
2300
+ }
2301
+ }
2302
+ }
2303
+ }
2304
+ return findings;
2305
+ },
2306
+ },
2307
+
2308
+ // PERF-ASYNC-008: forEach with async/await (not parallelized)
2309
+ { id: 'PERF-ASYNC-008', category: 'performance', severity: 'medium', confidence: 'likely', title: 'async/await Inside forEach (Sequential, Not Parallel)',
2310
+ check({ files }) {
2311
+ const findings = [];
2312
+ for (const [fp, c] of files) {
2313
+ if (!isSourceFile(fp)) continue;
2314
+ const lines = c.split('\n');
2315
+ for (let i = 0; i < lines.length; i++) {
2316
+ if (lines[i].match(/\.forEach\s*\(\s*async/)) {
2317
+ findings.push({ ruleId: 'PERF-ASYNC-008', category: 'performance', severity: 'medium', title: 'async/await inside forEach — awaits are not waited, use Promise.all with map instead', description: 'Replace: await Promise.all(items.map(async item => { ... })). forEach ignores returned promises; items process without awaiting completion.', file: fp, line: i + 1, fix: null });
2318
+ }
2319
+ }
2320
+ }
2321
+ return findings;
2322
+ },
2323
+ },
2324
+
2325
+ // PERF-MEM-010: No streaming for large file uploads
2326
+ { id: 'PERF-MEM-010', category: 'performance', severity: 'high', confidence: 'likely', title: 'File Uploads Buffered in Memory',
2327
+ check({ files, stack }) {
2328
+ const findings = [];
2329
+ const allDeps = { ...stack.dependencies, ...stack.devDependencies };
2330
+ const hasUpload = ['multer', 'formidable', 'multiparty', 'busboy'].some(d => d in allDeps);
2331
+ const hasStreaming = [...files.values()].some(c => c.match(/pipe\(|createWriteStream|\.stream\(|S3.*upload.*stream/i));
2332
+ if (hasUpload && !hasStreaming) {
2333
+ findings.push({ ruleId: 'PERF-MEM-010', category: 'performance', severity: 'high', title: 'File upload handler without streaming to storage — entire file loaded into memory', description: 'Stream uploads directly to S3/disk: req.pipe(s3Stream). Buffering large files in memory causes OOM errors and limits upload size.', fix: null });
2334
+ }
2335
+ return findings;
2336
+ },
2337
+ },
2338
+
2339
+ // PERF-DB-019: Missing index on created_at for time-range queries
2340
+ { id: 'PERF-DB-019', category: 'performance', severity: 'medium', confidence: 'likely', title: 'No Index on Timestamp for Time-Range Queries',
2341
+ check({ files }) {
2342
+ const findings = [];
2343
+ for (const [fp, c] of files) {
2344
+ if (!fp.match(/migration|schema/i) || !isSourceFile(fp)) continue;
2345
+ if (c.match(/created_at|updatedAt|createdAt|timestamp/i) && !c.match(/\.index\('created_at'\)|index.*created_at|INDEX.*created_at/i)) {
2346
+ findings.push({ ruleId: 'PERF-DB-019', category: 'performance', severity: 'medium', title: 'Timestamp column without index — date range queries do full table scan', description: 'Add index: table.index("created_at"). Time-range queries (last 7 days) require full scans without an index on the timestamp column.', file: fp, fix: null });
2347
+ }
2348
+ }
2349
+ return findings;
2350
+ },
2351
+ },
2352
+ // PERF-CACHE-010: Redis KEYS command in production
2353
+ { id: 'PERF-CACHE-010', category: 'performance', severity: 'critical', confidence: 'definite', title: 'Redis KEYS Command Used in Production',
2354
+ check({ files }) {
2355
+ const findings = [];
2356
+ for (const [fp, c] of files) {
2357
+ if (!isSourceFile(fp)) continue;
2358
+ const lines = c.split('\n');
2359
+ for (let i = 0; i < lines.length; i++) {
2360
+ if (lines[i].match(/\.keys\s*\(['"`][\*?]/)) {
2361
+ findings.push({ ruleId: 'PERF-CACHE-010', category: 'performance', severity: 'critical', title: 'Redis KEYS * command blocks entire Redis for potentially seconds', description: 'Use SCAN instead of KEYS. SCAN is non-blocking and iterates in small batches. KEYS blocks Redis server-wide until complete.', file: fp, line: i + 1, fix: null });
2362
+ }
2363
+ }
2364
+ }
2365
+ return findings;
2366
+ },
2367
+ },
2368
+ // PERF-FE-015: No image dimensions specified
2369
+ { id: 'PERF-FE-015', category: 'performance', severity: 'medium', confidence: 'likely', title: 'Images Without Width/Height Attributes (CLS)',
2370
+ check({ files }) {
2371
+ const findings = [];
2372
+ for (const [fp, c] of files) {
2373
+ if (!fp.match(/\.(html)$/)) continue;
2374
+ const lines = c.split('\n');
2375
+ for (let i = 0; i < lines.length; i++) {
2376
+ if (lines[i].match(/<img\s/i) && !lines[i].match(/width=|height=|style=["'][^"']*width/i)) {
2377
+ findings.push({ ruleId: 'PERF-FE-015', category: 'performance', severity: 'medium', title: 'Image without width/height attributes — causes Cumulative Layout Shift', description: 'Add width and height attributes matching the image dimensions. Without them, the browser cannot reserve space, causing layout shift as images load.', file: fp, line: i + 1, fix: null });
2378
+ }
2379
+ }
2380
+ }
2381
+ return findings;
2382
+ },
2383
+ },
2384
+ // PERF-API-013: No HTTP caching headers on API responses
2385
+ { id: 'PERF-API-013', category: 'performance', severity: 'medium', confidence: 'likely', title: 'API Responses Without Cache-Control Headers',
2386
+ check({ files }) {
2387
+ const findings = [];
2388
+ for (const [fp, c] of files) {
2389
+ if (!isSourceFile(fp)) continue;
2390
+ const lines = c.split('\n');
2391
+ let getRoutes = 0, cacheControl = 0;
2392
+ for (const l of lines) {
2393
+ if (l.match(/router\.get\(|app\.get\(/i)) getRoutes++;
2394
+ if (l.match(/Cache-Control|cache-control/i)) cacheControl++;
2395
+ }
2396
+ if (getRoutes > 3 && cacheControl === 0) {
2397
+ findings.push({ ruleId: 'PERF-API-013', category: 'performance', severity: 'medium', title: `${getRoutes} GET routes without Cache-Control headers`, description: 'Set Cache-Control: public, max-age=60 on read-only endpoints. Without caching headers, every client request hits your server even for identical data.', file: fp, fix: null });
2398
+ }
2399
+ }
2400
+ return findings;
2401
+ },
2402
+ },
2403
+ // PERF-LOAD-016: No WebSocket connection limit
2404
+ { id: 'PERF-LOAD-016', category: 'performance', severity: 'medium', confidence: 'likely', title: 'No WebSocket Connection Limit',
2405
+ check({ files }) {
2406
+ const findings = [];
2407
+ for (const [fp, c] of files) {
2408
+ if (!isSourceFile(fp)) continue;
2409
+ if (c.match(/WebSocketServer|io\.on\('connection'\)|wss\.on\('connection'\)/i)) {
2410
+ if (!c.match(/maxConnections|connection.*limit|wss\.clients\.size|io\.engine\.clientsCount/i)) {
2411
+ findings.push({ ruleId: 'PERF-LOAD-016', category: 'performance', severity: 'medium', title: 'WebSocket server without connection limit — resource exhaustion under load', description: 'Add connection limits: if (wss.clients.size >= MAX_CONNECTIONS) socket.close(). Unlimited WebSocket connections exhaust file descriptors and memory.', file: fp, fix: null });
2412
+ }
2413
+ }
2414
+ }
2415
+ return findings;
2416
+ },
2417
+ },
2418
+ // PERF-MEM-011: Template compiled on every request
2419
+ { id: 'PERF-MEM-011', category: 'performance', severity: 'medium', confidence: 'likely', title: 'Template Compiled on Every Request',
2420
+ check({ files }) {
2421
+ const findings = [];
2422
+ for (const [fp, c] of files) {
2423
+ if (!isSourceFile(fp)) continue;
2424
+ const lines = c.split('\n');
2425
+ for (let i = 0; i < lines.length; i++) {
2426
+ if (lines[i].match(/Handlebars\.compile|Mustache\.render|ejs\.render|nunjucks\.renderString/i)) {
2427
+ const context = lines.slice(Math.max(0, i - 5), i).join('\n');
2428
+ if (context.match(/router\.|app\.(get|post)|handler|req,\s*res/)) {
2429
+ findings.push({ ruleId: 'PERF-MEM-011', category: 'performance', severity: 'medium', title: 'Template compiled inside request handler — recompile on every request', description: 'Compile templates at startup and cache: const tmpl = Handlebars.compile(templateString). Template compilation is expensive; cache the compiled function.', file: fp, line: i + 1, fix: null });
2430
+ }
2431
+ }
2432
+ }
2433
+ }
2434
+ return findings;
2435
+ },
2436
+ },
2437
+ ];
2438
+
2439
+ export default rules;
2440
+
2441
+ // PERF-112: React missing key prop in list
2442
+ rules.push({
2443
+ id: 'PERF-112', category: 'performance', severity: 'medium', confidence: 'likely', title: 'React list rendered without key prop',
2444
+ check({ files }) {
2445
+ const findings = [];
2446
+ for (const [fp, c] of files) {
2447
+ if (!fp.match(/\.(jsx|tsx)$/)) continue;
2448
+ const lines = c.split('\n');
2449
+ for (let i = 0; i < lines.length; i++) {
2450
+ if (/^\s*(\/\/|\/\*|\*)/.test(lines[i])) continue;
2451
+ if (/\.map\s*\([^)]*=>\s*[(<]/.test(lines[i]) && !/key\s*=/.test(lines[i])) {
2452
+ const ctx = lines.slice(i, Math.min(lines.length, i + 4)).join('\n');
2453
+ if (!/key\s*=/.test(ctx)) findings.push({ ruleId: 'PERF-112', category: 'performance', severity: 'medium', title: '.map() rendering without key prop — React reconciliation degraded', description: 'React needs stable keys on list items to reconcile efficiently. Missing keys cause unnecessary DOM re-renders.', file: fp, line: i + 1, fix: null });
2454
+ }
2455
+ }
2456
+ }
2457
+ return findings;
2458
+ },
2459
+ });
2460
+
2461
+ // PERF-113: Heavy computation in React render
2462
+ rules.push({
2463
+ id: 'PERF-113', category: 'performance', severity: 'high', confidence: 'likely', title: 'Expensive computation in React component body',
2464
+ check({ files }) {
2465
+ const findings = [];
2466
+ for (const [fp, c] of files) {
2467
+ if (!fp.match(/\.(jsx|tsx)$/)) continue;
2468
+ const lines = c.split('\n');
2469
+ for (let i = 0; i < lines.length; i++) {
2470
+ if (/^\s*(\/\/|\/\*|\*)/.test(lines[i])) continue;
2471
+ if (/\.sort\s*\(|\.filter\s*\(|\.reduce\s*\(/.test(lines[i]) && !/useMemo|useCallback/.test(lines[i])) {
2472
+ const ctx = lines.slice(Math.max(0, i - 8), i).join('\n');
2473
+ if (/return\s*\(|function.*Component|const\s+\w+\s*=\s*\(/.test(ctx) && !/useMemo|useCallback/.test(ctx)) {
2474
+ findings.push({ ruleId: 'PERF-113', category: 'performance', severity: 'high', title: 'Sort/filter/reduce inside render — runs on every render cycle', description: 'Move expensive array operations outside render or wrap with useMemo to prevent recalculation on every render.', file: fp, line: i + 1, fix: null });
2475
+ }
2476
+ }
2477
+ }
2478
+ }
2479
+ return findings;
2480
+ },
2481
+ });
2482
+
2483
+ // PERF-114: Regex recompiled in loop
2484
+ rules.push({
2485
+ id: 'PERF-114', category: 'performance', severity: 'medium', confidence: 'likely', title: 'new RegExp() inside loop — recompiled on every iteration',
2486
+ check({ files }) {
2487
+ const findings = [];
2488
+ for (const [fp, c] of files) {
2489
+ if (!isSourceFile(fp)) continue;
2490
+ const lines = c.split('\n');
2491
+ let loopDepth = 0;
2492
+ for (let i = 0; i < lines.length; i++) {
2493
+ if (/^\s*(\/\/|\/\*|\*)/.test(lines[i])) continue;
2494
+ if (/\b(?:for|while)\s*\(/.test(lines[i])) loopDepth++;
2495
+ if (/^\s*\}/.test(lines[i]) && loopDepth > 0) loopDepth--;
2496
+ if (loopDepth > 0 && /new\s+RegExp\s*\(/.test(lines[i])) {
2497
+ findings.push({ ruleId: 'PERF-114', category: 'performance', severity: 'medium', title: 'RegExp created inside loop — hoist outside for reuse', description: 'Creating RegExp inside loops recompiles the regex each iteration. Move the RegExp declaration outside the loop.', file: fp, line: i + 1, fix: null });
2498
+ }
2499
+ }
2500
+ }
2501
+ return findings;
2502
+ },
2503
+ });
2504
+
2505
+ // PERF-115: String concatenation in loop
2506
+ rules.push({
2507
+ id: 'PERF-115', category: 'performance', severity: 'medium', confidence: 'likely', title: 'String += in loop — use array.join() instead',
2508
+ check({ files }) {
2509
+ const findings = [];
2510
+ for (const [fp, c] of files) {
2511
+ if (!isSourceFile(fp)) continue;
2512
+ const lines = c.split('\n');
2513
+ let loopDepth = 0;
2514
+ for (let i = 0; i < lines.length; i++) {
2515
+ if (/^\s*(\/\/|\/\*|\*)/.test(lines[i])) continue;
2516
+ if (/\b(?:for|while)\s*\(/.test(lines[i])) loopDepth++;
2517
+ if (/^\s*\}/.test(lines[i]) && loopDepth > 0) loopDepth--;
2518
+ if (loopDepth > 0 && /\w+\s*\+=\s*['"`]/.test(lines[i])) {
2519
+ findings.push({ ruleId: 'PERF-115', category: 'performance', severity: 'medium', title: 'String += concatenation in loop — O(n²) allocations', description: 'String concatenation in loops creates a new string each iteration. Use array.push() then join("") at the end.', file: fp, line: i + 1, fix: null });
2520
+ }
2521
+ }
2522
+ }
2523
+ return findings;
2524
+ },
2525
+ });
2526
+
2527
+ // PERF-116: Recursive function without memoization
2528
+ rules.push({
2529
+ id: 'PERF-116', category: 'performance', severity: 'medium', confidence: 'likely', title: 'Recursive function without memoization — exponential complexity risk',
2530
+ check({ files }) {
2531
+ const findings = [];
2532
+ for (const [fp, c] of files) {
2533
+ if (!isSourceFile(fp)) continue;
2534
+ const lines = c.split('\n');
2535
+ for (let i = 0; i < lines.length; i++) {
2536
+ if (/^\s*(\/\/|\/\*|\*)/.test(lines[i])) continue;
2537
+ const fnMatch = lines[i].match(/function\s+(\w+)\s*\(/);
2538
+ if (fnMatch) {
2539
+ const fnName = fnMatch[1];
2540
+ const body = lines.slice(i, Math.min(lines.length, i + 25)).join('\n');
2541
+ const recCalls = (body.match(new RegExp(`\\b${fnName}\\s*\\(`, 'g')) || []).length;
2542
+ if (recCalls >= 2 && !/cache|memo|Map\s*\(|memoize/.test(body)) {
2543
+ findings.push({ ruleId: 'PERF-116', category: 'performance', severity: 'medium', title: `Recursive '${fnName}' with multiple calls — add memoization`, description: 'Functions with multiple recursive calls have exponential time complexity. Add a Map cache: if (cache.has(n)) return cache.get(n).', file: fp, line: i + 1, fix: null });
2544
+ }
2545
+ }
2546
+ }
2547
+ }
2548
+ return findings;
2549
+ },
2550
+ });
2551
+
2552
+ // PERF-117: DB connection created per request
2553
+ rules.push({
2554
+ id: 'PERF-117', category: 'performance', severity: 'high', confidence: 'likely', title: 'Database connection created per-request without pooling',
2555
+ check({ files }) {
2556
+ const findings = [];
2557
+ for (const [fp, c] of files) {
2558
+ if (!isSourceFile(fp)) continue;
2559
+ const lines = c.split('\n');
2560
+ for (let i = 0; i < lines.length; i++) {
2561
+ if (/^\s*(\/\/|\/\*|\*)/.test(lines[i])) continue;
2562
+ if (/new\s+(?:Pool|Client|MongoClient|mysql\.createConnection|Sequelize)\s*\(/.test(lines[i])) {
2563
+ const ctx = lines.slice(Math.max(0, i - 5), i).join('\n');
2564
+ if (/router\.|app\.(get|post|put|delete)|handler|async\s*\(req/.test(ctx)) {
2565
+ findings.push({ ruleId: 'PERF-117', category: 'performance', severity: 'high', title: 'DB connection opened inside request handler — no pooling', description: 'Creating a new DB connection per request causes connection overhead and can exhaust DB connection limits. Initialize a pool at startup.', file: fp, line: i + 1, fix: null });
2566
+ }
2567
+ }
2568
+ }
2569
+ }
2570
+ return findings;
2571
+ },
2572
+ });
2573
+
2574
+ // PERF-118: Sequential awaits on independent operations
2575
+ rules.push({
2576
+ id: 'PERF-118', category: 'performance', severity: 'medium', confidence: 'likely', title: 'Sequential awaits on independent operations — use Promise.all()',
2577
+ check({ files }) {
2578
+ const findings = [];
2579
+ for (const [fp, c] of files) {
2580
+ if (!isSourceFile(fp)) continue;
2581
+ const lines = c.split('\n');
2582
+ for (let i = 0; i < lines.length - 2; i++) {
2583
+ if (/^\s*(\/\/|\/\*|\*)/.test(lines[i])) continue;
2584
+ if (/^\s*(?:const|let)\s+\w+\s*=\s*await\s+/.test(lines[i]) &&
2585
+ /^\s*(?:const|let)\s+\w+\s*=\s*await\s+/.test(lines[i + 1])) {
2586
+ const var1 = (lines[i].match(/(?:const|let)\s+(\w+)/) || [])[1];
2587
+ if (var1 && !lines[i + 1].includes(var1)) {
2588
+ findings.push({ ruleId: 'PERF-118', category: 'performance', severity: 'medium', title: 'Sequential independent awaits — use Promise.all() for parallelism', description: 'Independent async operations awaited sequentially add their latencies. Use const [a, b] = await Promise.all([op1, op2]) instead.', file: fp, line: i + 1, fix: null });
2589
+ }
2590
+ }
2591
+ }
2592
+ }
2593
+ return findings;
2594
+ },
2595
+ });
2596
+
2597
+ // PERF-119: Event listeners without cleanup
2598
+ rules.push({
2599
+ id: 'PERF-119', category: 'performance', severity: 'high', confidence: 'likely', title: 'Multiple event listeners added without corresponding removal',
2600
+ check({ files }) {
2601
+ const findings = [];
2602
+ for (const [fp, c] of files) {
2603
+ if (!isSourceFile(fp)) continue;
2604
+ const adds = (c.match(/addEventListener|\.on\s*\(/g) || []).length;
2605
+ const removes = (c.match(/removeEventListener|\.off\s*\(|\.removeListener\s*\(/g) || []).length;
2606
+ if (adds > 2 && removes === 0) {
2607
+ findings.push({ ruleId: 'PERF-119', category: 'performance', severity: 'high', title: `${adds} event listeners with no removal — memory leak risk`, description: 'Event listeners accumulate if not removed. Add removeEventListener in cleanup, componentWillUnmount, or useEffect return.', file: fp, fix: null });
2608
+ }
2609
+ }
2610
+ return findings;
2611
+ },
2612
+ });
2613
+
2614
+ // PERF-120: Heavy library not lazily loaded
2615
+ rules.push({
2616
+ id: 'PERF-120', category: 'performance', severity: 'medium', confidence: 'likely', title: 'Large dependency imported statically — consider dynamic import()',
2617
+ check({ files }) {
2618
+ const findings = [];
2619
+ const heavyLibs = /^import\s+.*from\s+['"](?:lodash|moment|rxjs|pdf-lib|puppeteer|chart\.js|three|@tensorflow)['"]/im;
2620
+ for (const [fp, c] of files) {
2621
+ if (!isSourceFile(fp)) continue;
2622
+ const lines = c.split('\n');
2623
+ for (let i = 0; i < lines.length; i++) {
2624
+ if (/^\s*(\/\/|\/\*|\*)/.test(lines[i])) continue;
2625
+ if (heavyLibs.test(lines[i])) {
2626
+ findings.push({ ruleId: 'PERF-120', category: 'performance', severity: 'medium', title: 'Heavy library imported statically — consider lazy loading with import()', description: 'Large libraries increase initial bundle/startup time. Use dynamic import() for libraries not needed on every request.', file: fp, line: i + 1, fix: null });
2627
+ }
2628
+ }
2629
+ }
2630
+ return findings;
2631
+ },
2632
+ });
2633
+
2634
+ // PERF-121: Unthrottled scroll/resize handler
2635
+ rules.push({
2636
+ id: 'PERF-121', category: 'performance', severity: 'medium', confidence: 'likely', title: 'Scroll/resize event without debounce or throttle',
2637
+ check({ files }) {
2638
+ const findings = [];
2639
+ for (const [fp, c] of files) {
2640
+ if (!isSourceFile(fp)) continue;
2641
+ const lines = c.split('\n');
2642
+ for (let i = 0; i < lines.length; i++) {
2643
+ if (/^\s*(\/\/|\/\*|\*)/.test(lines[i])) continue;
2644
+ if (/addEventListener\s*\(\s*['"](?:resize|scroll|mousemove)['"]/.test(lines[i])) {
2645
+ const ctx = lines.slice(Math.max(0, i - 3), Math.min(lines.length, i + 8)).join('\n');
2646
+ if (!/debounce|throttle|requestAnimationFrame/.test(ctx)) {
2647
+ findings.push({ ruleId: 'PERF-121', category: 'performance', severity: 'medium', title: 'Resize/scroll handler without throttle — fires at 60+ fps', description: 'Scroll and resize events fire very frequently. Wrap handlers with debounce() or throttle() to limit execution.', file: fp, line: i + 1, fix: null });
2648
+ }
2649
+ }
2650
+ }
2651
+ }
2652
+ return findings;
2653
+ },
2654
+ });
2655
+
2656
+ // PERF-122: Synchronous crypto in request handler
2657
+ rules.push({
2658
+ id: 'PERF-122', category: 'performance', severity: 'high', confidence: 'likely', title: 'Synchronous crypto operation blocks Node.js event loop',
2659
+ check({ files }) {
2660
+ const findings = [];
2661
+ for (const [fp, c] of files) {
2662
+ if (!isSourceFile(fp)) continue;
2663
+ const lines = c.split('\n');
2664
+ for (let i = 0; i < lines.length; i++) {
2665
+ if (/^\s*(\/\/|\/\*|\*)/.test(lines[i])) continue;
2666
+ if (/bcrypt\.hashSync|bcrypt\.compareSync|crypto\.pbkdf2Sync|scryptSync/.test(lines[i])) {
2667
+ const ctx = lines.slice(Math.max(0, i - 8), i).join('\n');
2668
+ if (/router\.|app\.(get|post|put|delete)|handler|async\s*\(req/.test(ctx)) {
2669
+ findings.push({ ruleId: 'PERF-122', category: 'performance', severity: 'high', title: 'bcryptSync/pbkdf2Sync in request handler blocks all other requests', description: 'Synchronous crypto operations block Node.js event loop. Use async variants: bcrypt.hash(), crypto.pbkdf2(), crypto.scrypt().', file: fp, line: i + 1, fix: null });
2670
+ }
2671
+ }
2672
+ }
2673
+ }
2674
+ return findings;
2675
+ },
2676
+ });
2677
+
2678
+ // PERF-123: JSON.parse in tight loop
2679
+ rules.push({
2680
+ id: 'PERF-123', category: 'performance', severity: 'medium', confidence: 'likely', title: 'JSON.parse() inside loop — expensive deserialization per iteration',
2681
+ check({ files }) {
2682
+ const findings = [];
2683
+ for (const [fp, c] of files) {
2684
+ if (!isSourceFile(fp)) continue;
2685
+ const lines = c.split('\n');
2686
+ let loopDepth = 0;
2687
+ for (let i = 0; i < lines.length; i++) {
2688
+ if (/^\s*(\/\/|\/\*|\*)/.test(lines[i])) continue;
2689
+ if (/\b(?:for|while)\s*\(/.test(lines[i])) loopDepth++;
2690
+ if (/^\s*\}/.test(lines[i]) && loopDepth > 0) loopDepth--;
2691
+ if (loopDepth > 0 && /JSON\.parse\s*\(/.test(lines[i])) {
2692
+ findings.push({ ruleId: 'PERF-123', category: 'performance', severity: 'medium', title: 'JSON.parse() inside loop — parse once and cache', description: 'JSON.parse is CPU-intensive. If parsing the same data repeatedly, parse once and reuse the result.', file: fp, line: i + 1, fix: null });
2693
+ }
2694
+ }
2695
+ }
2696
+ return findings;
2697
+ },
2698
+ });
2699
+
2700
+ // PERF-124: Missing pagination on data fetching
2701
+ rules.push({
2702
+ id: 'PERF-124', category: 'performance', severity: 'medium', confidence: 'likely', title: 'Data fetch without explicit limit — could return unbounded results',
2703
+ check({ files }) {
2704
+ const findings = [];
2705
+ for (const [fp, c] of files) {
2706
+ if (!isSourceFile(fp)) continue;
2707
+ const lines = c.split('\n');
2708
+ for (let i = 0; i < lines.length; i++) {
2709
+ if (/^\s*(\/\/|\/\*|\*)/.test(lines[i])) continue;
2710
+ if (/\.find\s*\(\s*\{|\.findAll\s*\(\s*\{/.test(lines[i])) {
2711
+ const ctx = lines.slice(i, Math.min(lines.length, i + 6)).join('\n');
2712
+ if (!/limit|take|pageSize|LIMIT|skip|offset/.test(ctx)) {
2713
+ findings.push({ ruleId: 'PERF-124', category: 'performance', severity: 'medium', title: 'DB find() without limit — may return full table', description: 'Queries without a LIMIT can return millions of rows as data grows. Always specify take/limit for list queries.', file: fp, line: i + 1, fix: null });
2714
+ }
2715
+ }
2716
+ }
2717
+ }
2718
+ return findings;
2719
+ },
2720
+ });
2721
+
2722
+ // PERF-125: Object spread in hot loop
2723
+ rules.push({
2724
+ id: 'PERF-125', category: 'performance', severity: 'low', confidence: 'suggestion', title: 'Object spread inside tight loop — excessive allocations',
2725
+ check({ files }) {
2726
+ const findings = [];
2727
+ for (const [fp, c] of files) {
2728
+ if (!isSourceFile(fp)) continue;
2729
+ const lines = c.split('\n');
2730
+ let loopDepth = 0;
2731
+ for (let i = 0; i < lines.length; i++) {
2732
+ if (/^\s*(\/\/|\/\*|\*)/.test(lines[i])) continue;
2733
+ if (/\b(?:for|while)\s*\(/.test(lines[i])) loopDepth++;
2734
+ if (/^\s*\}/.test(lines[i]) && loopDepth > 0) loopDepth--;
2735
+ if (loopDepth > 0 && /\{\s*\.\.\.\w+/.test(lines[i])) {
2736
+ findings.push({ ruleId: 'PERF-125', category: 'performance', severity: 'low', title: 'Object spread in loop — allocates new object each iteration', description: 'Object spread {...obj} in tight loops creates many short-lived objects, increasing GC pressure. Consider mutating in-place if the object is not shared.', file: fp, line: i + 1, fix: null });
2737
+ }
2738
+ }
2739
+ }
2740
+ return findings;
2741
+ },
2742
+ });
2743
+
2744
+ // PERF-126 through PERF-155: Additional performance rules
2745
+
2746
+ // PERF-126: Missing database index for frequent query pattern
2747
+ rules.push({
2748
+ id: 'PERF-126', category: 'performance', severity: 'high', confidence: 'likely', title: 'Frequent query on field without explicit index',
2749
+ check({ files }) {
2750
+ const findings = [];
2751
+ for (const [fp, c] of files) {
2752
+ if (!isSourceFile(fp)) continue;
2753
+ const lines = c.split('\n');
2754
+ for (let i = 0; i < lines.length; i++) {
2755
+ if (/^\s*(\/\/|\/\*|\*)/.test(lines[i])) continue;
2756
+ if (/where\s*:\s*\{.*(?:email|username|slug|handle)\s*:/i.test(lines[i])) {
2757
+ if (!/@@index|createIndex|index:\s*true|@Index/i.test(c)) {
2758
+ findings.push({ ruleId: 'PERF-126', category: 'performance', severity: 'high', title: 'Query by email/username/slug without index — full table scan', description: 'Frequently queried fields like email, username, and slug should have database indexes to avoid sequential scans.', file: fp, line: i + 1, fix: null });
2759
+ }
2760
+ }
2761
+ }
2762
+ }
2763
+ return findings;
2764
+ },
2765
+ });
2766
+
2767
+ // PERF-127: Unbuffered large file read
2768
+ rules.push({
2769
+ id: 'PERF-127', category: 'performance', severity: 'medium', confidence: 'likely', title: 'Large file read into memory with readFileSync',
2770
+ check({ files }) {
2771
+ const findings = [];
2772
+ for (const [fp, c] of files) {
2773
+ if (!isSourceFile(fp)) continue;
2774
+ const lines = c.split('\n');
2775
+ for (let i = 0; i < lines.length; i++) {
2776
+ if (/^\s*(\/\/|\/\*|\*)/.test(lines[i])) continue;
2777
+ if (/fs\.readFileSync\s*\(/.test(lines[i])) {
2778
+ const ctx = lines.slice(Math.max(0, i - 5), i).join('\n');
2779
+ if (/router\.|app\.(get|post)|handler|async\s*\(req/.test(ctx)) {
2780
+ findings.push({ ruleId: 'PERF-127', category: 'performance', severity: 'medium', title: 'readFileSync in request handler — blocks event loop and loads entire file into memory', description: 'Use fs.createReadStream() to pipe large files to the response, or fs.readFile() for async reads.', file: fp, line: i + 1, fix: null });
2781
+ }
2782
+ }
2783
+ }
2784
+ }
2785
+ return findings;
2786
+ },
2787
+ });
2788
+
2789
+ // PERF-128: Unoptimized image without lazy loading
2790
+ rules.push({
2791
+ id: 'PERF-128', category: 'performance', severity: 'medium', confidence: 'likely', title: 'Image tag without loading="lazy"',
2792
+ check({ files }) {
2793
+ const findings = [];
2794
+ for (const [fp, c] of files) {
2795
+ if (!fp.match(/\.(jsx|tsx|html)$/)) continue;
2796
+ const lines = c.split('\n');
2797
+ for (let i = 0; i < lines.length; i++) {
2798
+ if (/^\s*(\/\/|\/\*|\*)/.test(lines[i])) continue;
2799
+ if (/<img\b/.test(lines[i]) && !/loading\s*=|priority|eager/i.test(lines[i])) {
2800
+ const ctx = lines.slice(i, Math.min(lines.length, i + 3)).join(' ');
2801
+ if (!/loading\s*=/.test(ctx)) {
2802
+ findings.push({ ruleId: 'PERF-128', category: 'performance', severity: 'medium', title: '<img> without loading="lazy" — images loaded even when off-screen', description: 'Add loading="lazy" to below-fold images to defer loading until they\'re near the viewport, reducing initial page load time.', file: fp, line: i + 1, fix: null });
2803
+ }
2804
+ }
2805
+ }
2806
+ }
2807
+ return findings;
2808
+ },
2809
+ });
2810
+
2811
+ // PERF-129: Missing cache-control headers
2812
+ rules.push({
2813
+ id: 'PERF-129', category: 'performance', severity: 'medium', confidence: 'likely', title: 'Static file served without Cache-Control header',
2814
+ check({ files }) {
2815
+ const findings = [];
2816
+ for (const [fp, c] of files) {
2817
+ if (!isSourceFile(fp)) continue;
2818
+ if (!c.match(/express\.static|sendFile|res\.send.*\.js|res\.send.*\.css/i)) continue;
2819
+ if (!c.match(/Cache-Control|cacheControl|maxAge|cache-control/i)) {
2820
+ findings.push({ ruleId: 'PERF-129', category: 'performance', severity: 'medium', title: 'Static assets without Cache-Control header — no browser caching', description: 'Set Cache-Control: max-age=31536000, immutable for versioned static assets to enable browser caching and reduce repeated downloads.', file: fp, fix: null });
2821
+ }
2822
+ }
2823
+ return findings;
2824
+ },
2825
+ });
2826
+
2827
+ // PERF-130: Multiple React context consumers causing re-renders
2828
+ rules.push({
2829
+ id: 'PERF-130', category: 'performance', severity: 'medium', confidence: 'likely', title: 'Component consuming multiple contexts — may re-render unnecessarily',
2830
+ check({ files }) {
2831
+ const findings = [];
2832
+ for (const [fp, c] of files) {
2833
+ if (!fp.match(/\.(jsx|tsx)$/)) continue;
2834
+ const contextConsumers = (c.match(/useContext\s*\(/g) || []).length;
2835
+ if (contextConsumers > 3) {
2836
+ findings.push({ ruleId: 'PERF-130', category: 'performance', severity: 'medium', title: `Component uses ${contextConsumers} contexts — any context change triggers re-render`, description: 'Consuming many contexts makes a component re-render when any context value changes. Split contexts by update frequency or use memo selectors.', file: fp, fix: null });
2837
+ }
2838
+ }
2839
+ return findings;
2840
+ },
2841
+ });
2842
+
2843
+ // PERF-131: Inefficient Array.includes in hot loop
2844
+ rules.push({
2845
+ id: 'PERF-131', category: 'performance', severity: 'low', confidence: 'suggestion', title: 'Array.includes() in tight loop — use Set for O(1) lookup',
2846
+ check({ files }) {
2847
+ const findings = [];
2848
+ for (const [fp, c] of files) {
2849
+ if (!isSourceFile(fp)) continue;
2850
+ const lines = c.split('\n');
2851
+ let loopDepth = 0;
2852
+ for (let i = 0; i < lines.length; i++) {
2853
+ if (/^\s*(\/\/|\/\*|\*)/.test(lines[i])) continue;
2854
+ if (/\b(?:for|while)\s*\(/.test(lines[i])) loopDepth++;
2855
+ if (/^\s*\}/.test(lines[i]) && loopDepth > 0) loopDepth--;
2856
+ if (loopDepth > 0 && /\w+\s*\.\s*includes\s*\(/.test(lines[i]) && !/String\s*\.\s*prototype|typeof/.test(lines[i])) {
2857
+ findings.push({ ruleId: 'PERF-131', category: 'performance', severity: 'low', title: 'Array.includes() inside loop — O(n) per iteration, use Set for O(1)', description: 'Array.includes() is O(n). For repeated lookups, create a Set once and use Set.has() which is O(1).', file: fp, line: i + 1, fix: null });
2858
+ }
2859
+ }
2860
+ }
2861
+ return findings;
2862
+ },
2863
+ });
2864
+
2865
+ // PERF-132: Inefficient object key enumeration in loop
2866
+ rules.push({
2867
+ id: 'PERF-132', category: 'performance', severity: 'low', confidence: 'suggestion', title: 'Object.keys()/values()/entries() in tight loop',
2868
+ check({ files }) {
2869
+ const findings = [];
2870
+ for (const [fp, c] of files) {
2871
+ if (!isSourceFile(fp)) continue;
2872
+ const lines = c.split('\n');
2873
+ let loopDepth = 0;
2874
+ for (let i = 0; i < lines.length; i++) {
2875
+ if (/^\s*(\/\/|\/\*|\*)/.test(lines[i])) continue;
2876
+ if (/\b(?:for|while)\s*\(/.test(lines[i])) loopDepth++;
2877
+ if (/^\s*\}/.test(lines[i]) && loopDepth > 0) loopDepth--;
2878
+ if (loopDepth > 0 && /Object\.(?:keys|values|entries)\s*\(/.test(lines[i])) {
2879
+ findings.push({ ruleId: 'PERF-132', category: 'performance', severity: 'low', title: 'Object.keys/values/entries() inside loop — allocates new array each iteration', description: 'Calling Object.keys/values/entries() inside a loop creates a new array each time. Cache the result outside the loop.', file: fp, line: i + 1, fix: null });
2880
+ }
2881
+ }
2882
+ }
2883
+ return findings;
2884
+ },
2885
+ });
2886
+
2887
+ // PERF-133: Promise.all not used for independent operations
2888
+ rules.push({
2889
+ id: 'PERF-133', category: 'performance', severity: 'medium', confidence: 'likely', title: 'Three or more sequential awaits — consider Promise.all',
2890
+ check({ files }) {
2891
+ const findings = [];
2892
+ for (const [fp, c] of files) {
2893
+ if (!isSourceFile(fp)) continue;
2894
+ const lines = c.split('\n');
2895
+ for (let i = 0; i < lines.length - 3; i++) {
2896
+ if (/^\s*(\/\/|\/\*|\*)/.test(lines[i])) continue;
2897
+ const isAwait1 = /^\s*(?:const|let)\s+\w+\s*=\s*await\s+/.test(lines[i]);
2898
+ const isAwait2 = /^\s*(?:const|let)\s+\w+\s*=\s*await\s+/.test(lines[i + 1]);
2899
+ const isAwait3 = /^\s*(?:const|let)\s+\w+\s*=\s*await\s+/.test(lines[i + 2]);
2900
+ if (isAwait1 && isAwait2 && isAwait3) {
2901
+ findings.push({ ruleId: 'PERF-133', category: 'performance', severity: 'medium', title: '3+ sequential awaits — use Promise.all() if operations are independent', description: 'Three or more sequential await statements add their latencies. Check if they can run concurrently: const [a, b, c] = await Promise.all([p1, p2, p3]).', file: fp, line: i + 1, fix: null });
2902
+ i += 2;
2903
+ }
2904
+ }
2905
+ }
2906
+ return findings;
2907
+ },
2908
+ });
2909
+
2910
+ // PERF-134: fetch without timeout in critical path
2911
+ rules.push({
2912
+ id: 'PERF-134', category: 'performance', severity: 'medium', confidence: 'likely', title: 'fetch() without AbortController timeout',
2913
+ check({ files }) {
2914
+ const findings = [];
2915
+ for (const [fp, c] of files) {
2916
+ if (!isSourceFile(fp)) continue;
2917
+ const lines = c.split('\n');
2918
+ for (let i = 0; i < lines.length; i++) {
2919
+ if (/^\s*(\/\/|\/\*|\*)/.test(lines[i])) continue;
2920
+ if (/\bfetch\s*\(/.test(lines[i]) && !/AbortController|signal:|timeout:/i.test(lines[i])) {
2921
+ const ctx = lines.slice(Math.max(0, i - 2), Math.min(lines.length, i + 5)).join('\n');
2922
+ if (!/AbortController|signal:|timeout:/i.test(ctx)) {
2923
+ findings.push({ ruleId: 'PERF-134', category: 'performance', severity: 'medium', title: 'fetch() without timeout — hangs indefinitely on slow responses', description: 'Use AbortController with setTimeout to set request timeouts: const controller = new AbortController(); fetch(url, { signal: controller.signal }).', file: fp, line: i + 1, fix: null });
2924
+ }
2925
+ }
2926
+ }
2927
+ }
2928
+ return findings;
2929
+ },
2930
+ });
2931
+
2932
+ // PERF-135: Missing indexes on foreign keys
2933
+ rules.push({
2934
+ id: 'PERF-135', category: 'performance', severity: 'high', confidence: 'likely', title: 'Foreign key field without database index',
2935
+ check({ files }) {
2936
+ const findings = [];
2937
+ for (const [fp, c] of files) {
2938
+ if (!isSourceFile(fp)) continue;
2939
+ if (!/prisma|sequelize|typeorm/i.test(fp) && !/prisma|sequelize|typeorm/i.test(c)) continue;
2940
+ const lines = c.split('\n');
2941
+ for (let i = 0; i < lines.length; i++) {
2942
+ if (/^\s*(\/\/|\/\*|\*)/.test(lines[i])) continue;
2943
+ if (/\w+Id\s*Int|foreignKey\s*:|references\s*:/i.test(lines[i])) {
2944
+ const ctx = lines.slice(Math.max(0, i - 3), Math.min(lines.length, i + 5)).join('\n');
2945
+ if (!/index|@Index|@@index/i.test(ctx)) {
2946
+ findings.push({ ruleId: 'PERF-135', category: 'performance', severity: 'high', title: 'Foreign key field without index — JOIN queries cause full table scans', description: 'Foreign key columns used in JOINs and WHERE clauses should always have database indexes to prevent table scans.', file: fp, line: i + 1, fix: null });
2947
+ }
2948
+ }
2949
+ }
2950
+ }
2951
+ return findings;
2952
+ },
2953
+ });
2954
+
2955
+ // PERF-136 through PERF-162: More performance rules
2956
+
2957
+ // PERF-136: Missing pagination cursor
2958
+ rules.push({
2959
+ id: 'PERF-136', category: 'performance', severity: 'medium', confidence: 'likely', title: 'Offset-based pagination — use cursor-based for large datasets',
2960
+ check({ files }) {
2961
+ const findings = [];
2962
+ for (const [fp, c] of files) {
2963
+ if (!isSourceFile(fp)) continue;
2964
+ const lines = c.split('\n');
2965
+ for (let i = 0; i < lines.length; i++) {
2966
+ if (/^\s*(\/\/|\/\*|\*)/.test(lines[i])) continue;
2967
+ if (/\bskip\s*:\s*\w+\s*\*\s*\w+|\bOFFSET\s+\w+\s*\*/.test(lines[i])) {
2968
+ findings.push({ ruleId: 'PERF-136', category: 'performance', severity: 'medium', title: 'Offset pagination (OFFSET × page) gets slower on large tables', description: 'Offset pagination scans all skipped rows. For large tables, use cursor-based pagination: WHERE id > lastCursorId ORDER BY id LIMIT n.', file: fp, line: i + 1, fix: null });
2969
+ }
2970
+ }
2971
+ }
2972
+ return findings;
2973
+ },
2974
+ });
2975
+
2976
+ // PERF-137: Unnecessary await on non-async function
2977
+ rules.push({
2978
+ id: 'PERF-137', category: 'performance', severity: 'low', confidence: 'suggestion', title: 'await used on non-promise value — unnecessary micro-task',
2979
+ check({ files }) {
2980
+ const findings = [];
2981
+ for (const [fp, c] of files) {
2982
+ if (!isSourceFile(fp)) continue;
2983
+ const lines = c.split('\n');
2984
+ for (let i = 0; i < lines.length; i++) {
2985
+ if (/^\s*(\/\/|\/\*|\*)/.test(lines[i])) continue;
2986
+ // Detect await on JSON.parse, Array methods, simple math
2987
+ if (/await\s+(?:JSON\.parse|JSON\.stringify|parseInt|parseFloat|Math\.|Array\.from|Object\.keys)\s*\(/.test(lines[i])) {
2988
+ findings.push({ ruleId: 'PERF-137', category: 'performance', severity: 'low', title: 'await on synchronous function — unnecessary event loop tick', description: 'Awaiting non-promise values is harmless but adds an unnecessary microtask tick. Remove await from synchronous operations.', file: fp, line: i + 1, fix: null });
2989
+ }
2990
+ }
2991
+ }
2992
+ return findings;
2993
+ },
2994
+ });
2995
+
2996
+ // PERF-138: Multiple DOM queries for same element
2997
+ rules.push({
2998
+ id: 'PERF-138', category: 'performance', severity: 'low', confidence: 'suggestion', title: 'Same DOM element queried multiple times — cache the reference',
2999
+ check({ files }) {
3000
+ const findings = [];
3001
+ for (const [fp, c] of files) {
3002
+ if (!isSourceFile(fp)) continue;
3003
+ const domQueries = c.match(/document\.(?:getElementById|querySelector|getElementsByClassName)\s*\(['"][^'"]+['"]\)/g) || [];
3004
+ const queryMap = {};
3005
+ for (const q of domQueries) {
3006
+ queryMap[q] = (queryMap[q] || 0) + 1;
3007
+ }
3008
+ for (const [query, count] of Object.entries(queryMap)) {
3009
+ if (count > 2) {
3010
+ findings.push({ ruleId: 'PERF-138', category: 'performance', severity: 'low', title: `DOM query "${query.slice(0, 50)}" called ${count} times — cache in a variable`, description: 'DOM queries are expensive. Store the result in a variable and reuse it: const el = document.getElementById("id").', file: fp, fix: null });
3011
+ }
3012
+ }
3013
+ }
3014
+ return findings;
3015
+ },
3016
+ });
3017
+
3018
+ // PERF-139: Too many setState calls
3019
+ rules.push({
3020
+ id: 'PERF-139', category: 'performance', severity: 'medium', confidence: 'likely', title: 'Multiple setState calls in same function — batch updates',
3021
+ check({ files }) {
3022
+ const findings = [];
3023
+ for (const [fp, c] of files) {
3024
+ if (!fp.match(/\.(jsx|tsx)$/)) continue;
3025
+ const lines = c.split('\n');
3026
+ for (let i = 0; i < lines.length; i++) {
3027
+ if (/^\s*(\/\/|\/\*|\*)/.test(lines[i])) continue;
3028
+ if (/function\s+\w+|=>\s*\{/.test(lines[i])) {
3029
+ const block = lines.slice(i, Math.min(lines.length, i + 20)).join('\n');
3030
+ const setCount = (block.match(/setState\s*\(|set[A-Z]\w+\s*\(/g) || []).length;
3031
+ if (setCount >= 4) {
3032
+ findings.push({ ruleId: 'PERF-139', category: 'performance', severity: 'medium', title: `${setCount} setState calls in handler — causes ${setCount} re-renders`, description: 'Multiple setState calls in React 17- cause multiple re-renders. Batch them into a single setState or use useReducer for complex state updates.', file: fp, line: i + 1, fix: null });
3033
+ }
3034
+ }
3035
+ }
3036
+ }
3037
+ return findings;
3038
+ },
3039
+ });
3040
+
3041
+ // PERF-140: Missing gzip for static file serving
3042
+ rules.push({
3043
+ id: 'PERF-140', category: 'performance', severity: 'medium', confidence: 'likely', title: 'Express serving large static files without compression',
3044
+ check({ files }) {
3045
+ const findings = [];
3046
+ for (const [fp, c] of files) {
3047
+ if (!isSourceFile(fp)) continue;
3048
+ if (/express\.static\s*\(/.test(c) && !/compression|gzip|brotli|zlib/i.test(c)) {
3049
+ findings.push({ ruleId: 'PERF-140', category: 'performance', severity: 'medium', title: 'express.static without compression middleware — uncompressed static assets', description: 'Add compression middleware before express.static: app.use(require("compression")()); Gzip reduces text asset sizes by 60-80%.', file: fp, fix: null });
3050
+ }
3051
+ }
3052
+ return findings;
3053
+ },
3054
+ });
3055
+
3056
+ // PERF-141: Using Array.find then Array.indexOf
3057
+ rules.push({
3058
+ id: 'PERF-141', category: 'performance', severity: 'low', confidence: 'suggestion', title: 'Array.find() followed by Array.indexOf() — two passes over same array',
3059
+ check({ files }) {
3060
+ const findings = [];
3061
+ for (const [fp, c] of files) {
3062
+ if (!isSourceFile(fp)) continue;
3063
+ const lines = c.split('\n');
3064
+ for (let i = 0; i < lines.length - 3; i++) {
3065
+ if (/\.find\s*\(/.test(lines[i])) {
3066
+ const next = lines.slice(i + 1, Math.min(lines.length, i + 4)).join('\n');
3067
+ if (/\.indexOf\s*\(|\.findIndex\s*\(/.test(next)) {
3068
+ findings.push({ ruleId: 'PERF-141', category: 'performance', severity: 'low', title: '.find() then .indexOf()/.findIndex() — use findIndex() for both lookup and index', description: 'Use findIndex() to get both the match result and its index in a single pass.', file: fp, line: i + 1, fix: null });
3069
+ }
3070
+ }
3071
+ }
3072
+ }
3073
+ return findings;
3074
+ },
3075
+ });
3076
+
3077
+ // PERF-142: Missing HTTP/2 support
3078
+ rules.push({
3079
+ id: 'PERF-142', category: 'performance', severity: 'low', confidence: 'suggestion', title: 'HTTPS server not using HTTP/2',
3080
+ check({ files }) {
3081
+ const findings = [];
3082
+ for (const [fp, c] of files) {
3083
+ if (!isSourceFile(fp)) continue;
3084
+ if (/https\.createServer\s*\(|spdy\.createServer/i.test(c) && !/http2|http\/2|spdy|h2/i.test(c)) {
3085
+ findings.push({ ruleId: 'PERF-142', category: 'performance', severity: 'low', title: 'HTTPS server without HTTP/2 — missing multiplexing and header compression', description: 'HTTP/2 provides multiplexing (no head-of-line blocking), header compression, and server push. Use http2.createSecureServer() or a reverse proxy like nginx with HTTP/2.', file: fp, fix: null });
3086
+ }
3087
+ }
3088
+ return findings;
3089
+ },
3090
+ });
3091
+
3092
+ // PERF-143: Logging inside tight loop
3093
+ rules.push({
3094
+ id: 'PERF-143', category: 'performance', severity: 'medium', confidence: 'likely', title: 'console.log() inside loop — excessive I/O in hot path',
3095
+ check({ files }) {
3096
+ const findings = [];
3097
+ for (const [fp, c] of files) {
3098
+ if (!isSourceFile(fp)) continue;
3099
+ const lines = c.split('\n');
3100
+ let loopDepth = 0;
3101
+ for (let i = 0; i < lines.length; i++) {
3102
+ if (/^\s*(\/\/|\/\*|\*)/.test(lines[i])) continue;
3103
+ if (/\b(?:for|while)\s*\(/.test(lines[i])) loopDepth++;
3104
+ if (/^\s*\}/.test(lines[i]) && loopDepth > 0) loopDepth--;
3105
+ if (loopDepth > 0 && /console\.\w+\s*\(|logger\.\w+\s*\(/.test(lines[i])) {
3106
+ findings.push({ ruleId: 'PERF-143', category: 'performance', severity: 'medium', title: 'Logging inside loop — synchronous I/O on every iteration', description: 'Console/logger calls inside loops create excessive I/O. Collect items and log once after the loop, or use a sampling strategy.', file: fp, line: i + 1, fix: null });
3107
+ }
3108
+ }
3109
+ }
3110
+ return findings;
3111
+ },
3112
+ });
3113
+
3114
+ // PERF-144: Blocking DNS lookup in request handler
3115
+ rules.push({
3116
+ id: 'PERF-144', category: 'performance', severity: 'high', confidence: 'likely', title: 'Synchronous DNS lookup in request handler',
3117
+ check({ files }) {
3118
+ const findings = [];
3119
+ for (const [fp, c] of files) {
3120
+ if (!isSourceFile(fp)) continue;
3121
+ const lines = c.split('\n');
3122
+ for (let i = 0; i < lines.length; i++) {
3123
+ if (/dns\.lookupSync\s*\(/.test(lines[i])) findings.push({ ruleId: 'PERF-144', category: 'performance', severity: 'high', title: 'Synchronous DNS lookup blocks the event loop', description: 'Use dns.lookup() with a callback or dns.promises.lookup() for non-blocking DNS resolution.', file: fp, line: i + 1, fix: null });
3124
+ }
3125
+ }
3126
+ return findings;
3127
+ },
3128
+ });
3129
+
3130
+ // PERF-145: Missing database connection pooling (pg)
3131
+ rules.push({
3132
+ id: 'PERF-145', category: 'performance', severity: 'high', confidence: 'likely', title: 'pg.Client used instead of pg.Pool — no connection pooling',
3133
+ check({ files, stack }) {
3134
+ if (!stack.dependencies?.['pg']) return [];
3135
+ const findings = [];
3136
+ for (const [fp, c] of files) {
3137
+ if (!isSourceFile(fp)) continue;
3138
+ if (/new\s+Client\s*\(/.test(c) && !/new\s+Pool\s*\(/.test(c)) findings.push({ ruleId: 'PERF-145', category: 'performance', severity: 'high', title: 'Using pg.Client without Pool — creates new DB connection per query', description: 'Use pg.Pool to reuse database connections and avoid connection overhead on every request.', file: fp, fix: null });
3139
+ }
3140
+ return findings;
3141
+ },
3142
+ });
3143
+
3144
+ // PERF-146: Large JSON payload without streaming
3145
+ rules.push({
3146
+ id: 'PERF-146', category: 'performance', severity: 'medium', confidence: 'likely', title: 'JSON.stringify on potentially large object without streaming',
3147
+ check({ files }) {
3148
+ const findings = [];
3149
+ for (const [fp, c] of files) {
3150
+ if (!isSourceFile(fp)) continue;
3151
+ const lines = c.split('\n');
3152
+ for (let i = 0; i < lines.length; i++) {
3153
+ if (/res\.json\s*\(\s*await\s+\w+\.find\s*\(\s*\)/.test(lines[i])) findings.push({ ruleId: 'PERF-146', category: 'performance', severity: 'medium', title: 'Sending unbounded find() result as JSON — may return large payload', description: 'Add .limit() to database queries before sending as JSON response to prevent memory issues.', file: fp, line: i + 1, fix: null });
3154
+ }
3155
+ }
3156
+ return findings;
3157
+ },
3158
+ });
3159
+
3160
+ // PERF-147: Unnecessary full document update ($set vs full replace)
3161
+ rules.push({
3162
+ id: 'PERF-147', category: 'performance', severity: 'medium', confidence: 'likely', title: 'MongoDB full document replacement instead of $set update',
3163
+ check({ files }) {
3164
+ const findings = [];
3165
+ for (const [fp, c] of files) {
3166
+ if (!isSourceFile(fp)) continue;
3167
+ const lines = c.split('\n');
3168
+ for (let i = 0; i < lines.length; i++) {
3169
+ if (/\.(?:update|updateOne|updateMany)\s*\([^,]+,\s*(?:req\.|body\.|data\.|obj\.)/.test(lines[i]) && !/\$set/.test(lines[i])) findings.push({ ruleId: 'PERF-147', category: 'performance', severity: 'medium', title: 'MongoDB update without $set — replaces entire document', description: 'Use { $set: { field: value } } for partial updates to avoid replacing the full document.', file: fp, line: i + 1, fix: null });
3170
+ }
3171
+ }
3172
+ return findings;
3173
+ },
3174
+ });
3175
+
3176
+ // PERF-148: Unused middleware registered on all routes
3177
+ rules.push({
3178
+ id: 'PERF-148', category: 'performance', severity: 'low', confidence: 'suggestion', title: 'Heavy middleware applied to all routes globally',
3179
+ check({ files }) {
3180
+ const findings = [];
3181
+ for (const [fp, c] of files) {
3182
+ if (!isSourceFile(fp)) continue;
3183
+ if (/app\.use\s*\(\s*(?:multer|sharp|imagemin|pdf)/i.test(c)) findings.push({ ruleId: 'PERF-148', category: 'performance', severity: 'low', title: 'Heavy processing middleware applied globally via app.use()', description: 'Apply resource-intensive middleware only to specific routes that require it.', file: fp, fix: null });
3184
+ }
3185
+ return findings;
3186
+ },
3187
+ });
3188
+
3189
+ // PERF-149: Synchronous file stat in hot path
3190
+ rules.push({
3191
+ id: 'PERF-149', category: 'performance', severity: 'high', confidence: 'likely', title: 'fs.statSync/existsSync in request handler — blocks event loop',
3192
+ check({ files }) {
3193
+ const findings = [];
3194
+ for (const [fp, c] of files) {
3195
+ if (!isSourceFile(fp)) continue;
3196
+ const lines = c.split('\n');
3197
+ for (let i = 0; i < lines.length; i++) {
3198
+ if (/fs\.(?:statSync|existsSync|accessSync|lstatSync)\s*\(/.test(lines[i])) findings.push({ ruleId: 'PERF-149', category: 'performance', severity: 'high', title: 'Synchronous filesystem operation blocks the event loop', description: 'Use fs.promises.stat(), fs.promises.access() or the callback-based equivalents.', file: fp, line: i + 1, fix: null });
3199
+ }
3200
+ }
3201
+ return findings;
3202
+ },
3203
+ });
3204
+
3205
+ // PERF-150: String split + join in loop instead of replace
3206
+ rules.push({
3207
+ id: 'PERF-150', category: 'performance', severity: 'low', confidence: 'suggestion', title: 'String.split().join() in loop — use String.replace() with /g flag',
3208
+ check({ files }) {
3209
+ const findings = [];
3210
+ for (const [fp, c] of files) {
3211
+ if (!isSourceFile(fp)) continue;
3212
+ const lines = c.split('\n');
3213
+ for (let i = 0; i < lines.length; i++) {
3214
+ if (/\.split\s*\([^)]+\)\.join\s*\(/.test(lines[i]) && /for\s*\(|\.map\s*\(|\.forEach\s*\(/.test(lines[Math.max(0,i-3)] + lines[i])) findings.push({ ruleId: 'PERF-150', category: 'performance', severity: 'low', title: 'split().join() pattern in loop — use replace(/pattern/g) instead', description: 'String.replace() with a global flag is more efficient than split().join() for string replacement.', file: fp, line: i + 1, fix: null });
3215
+ }
3216
+ }
3217
+ return findings;
3218
+ },
3219
+ });
3220
+
3221
+ // PERF-151: Awaiting in forEach callback
3222
+ rules.push({
3223
+ id: 'PERF-151', category: 'performance', severity: 'high', confidence: 'likely', title: 'await inside Array.forEach — async not awaited properly',
3224
+ check({ files }) {
3225
+ const findings = [];
3226
+ for (const [fp, c] of files) {
3227
+ if (!isSourceFile(fp)) continue;
3228
+ const lines = c.split('\n');
3229
+ for (let i = 0; i < lines.length; i++) {
3230
+ if (/\.forEach\s*\(\s*async\s+/.test(lines[i])) findings.push({ ruleId: 'PERF-151', category: 'performance', severity: 'high', title: 'async callback in forEach — async operations not awaited', description: 'Use for...of with await or Promise.all(array.map(async () => {...})) to properly await async operations.', file: fp, line: i + 1, fix: null });
3231
+ }
3232
+ }
3233
+ return findings;
3234
+ },
3235
+ });
3236
+
3237
+ // PERF-152: Recursive function without memoization
3238
+ rules.push({
3239
+ id: 'PERF-152', category: 'performance', severity: 'medium', confidence: 'likely', title: 'Recursive function called without memoization',
3240
+ check({ files }) {
3241
+ const findings = [];
3242
+ for (const [fp, c] of files) {
3243
+ if (!isSourceFile(fp)) continue;
3244
+ const lines = c.split('\n');
3245
+ for (let i = 0; i < lines.length; i++) {
3246
+ const m = lines[i].match(/^(?:function|const)\s+(\w+)\s*(?:=\s*(?:function|\(.*\)\s*=>)|\()/);
3247
+ if (m) {
3248
+ const fname = m[1];
3249
+ const body = lines.slice(i, Math.min(lines.length, i + 30)).join('\n');
3250
+ if (new RegExp(`\\b${fname}\\s*\\(`).test(body.substring(fname.length + 5)) && !/memo|cache|memoize|Map|Set/.test(body)) {
3251
+ findings.push({ ruleId: 'PERF-152', category: 'performance', severity: 'medium', title: `Recursive function '${fname}' without memoization — may cause redundant computation`, description: 'Add memoization for recursive functions with overlapping subproblems to avoid exponential complexity.', file: fp, line: i + 1, fix: null });
3252
+ }
3253
+ }
3254
+ }
3255
+ }
3256
+ return findings;
3257
+ },
3258
+ });
3259
+
3260
+ // PERF-153: Mongoose lean() not used for read-only queries
3261
+ rules.push({
3262
+ id: 'PERF-153', category: 'performance', severity: 'medium', confidence: 'likely', title: 'Mongoose query without .lean() for read-only operation',
3263
+ check({ files, stack }) {
3264
+ if (!stack.dependencies?.['mongoose']) return [];
3265
+ const findings = [];
3266
+ for (const [fp, c] of files) {
3267
+ if (!isSourceFile(fp)) continue;
3268
+ const lines = c.split('\n');
3269
+ for (let i = 0; i < lines.length; i++) {
3270
+ if (/\.find\s*\([^)]*\)\s*$/.test(lines[i]) && !/\.lean\s*\(/.test(lines[i]) && !/save|update|remove/.test(lines.slice(i, i+5).join(''))) {
3271
+ findings.push({ ruleId: 'PERF-153', category: 'performance', severity: 'medium', title: 'Mongoose .find() without .lean() — returns full Mongoose documents', description: 'Add .lean() to read-only Mongoose queries to return plain JS objects and improve query performance.', file: fp, line: i + 1, fix: null });
3272
+ }
3273
+ }
3274
+ }
3275
+ return findings;
3276
+ },
3277
+ });
3278
+
3279
+ // PERF-154: Creating Buffer with new Buffer() (deprecated/slow)
3280
+ rules.push({
3281
+ id: 'PERF-154', category: 'performance', severity: 'medium', confidence: 'likely', title: 'new Buffer() deprecated — use Buffer.from/alloc/allocUnsafe',
3282
+ check({ files }) {
3283
+ const findings = [];
3284
+ for (const [fp, c] of files) {
3285
+ if (!isSourceFile(fp)) continue;
3286
+ const lines = c.split('\n');
3287
+ for (let i = 0; i < lines.length; i++) {
3288
+ if (/new\s+Buffer\s*\(/.test(lines[i])) findings.push({ ruleId: 'PERF-154', category: 'performance', severity: 'medium', title: 'new Buffer() is deprecated and insecure — use Buffer.from() or Buffer.alloc()', description: 'Replace new Buffer() with Buffer.from(data), Buffer.alloc(size), or Buffer.allocUnsafe(size).', file: fp, line: i + 1, fix: null });
3289
+ }
3290
+ }
3291
+ return findings;
3292
+ },
3293
+ });
3294
+
3295
+ // PERF-155: Unindexed text search in MongoDB
3296
+ rules.push({
3297
+ id: 'PERF-155', category: 'performance', severity: 'high', confidence: 'likely', title: 'MongoDB $regex query without text index — full collection scan',
3298
+ check({ files }) {
3299
+ const findings = [];
3300
+ for (const [fp, c] of files) {
3301
+ if (!isSourceFile(fp)) continue;
3302
+ const lines = c.split('\n');
3303
+ for (let i = 0; i < lines.length; i++) {
3304
+ if (/\$regex/.test(lines[i]) && /find\s*\(|findOne\s*\(/.test(lines[Math.max(0,i-2)] + lines[i])) findings.push({ ruleId: 'PERF-155', category: 'performance', severity: 'high', title: 'MongoDB $regex without text index causes full collection scan', description: 'Create a text index and use $text search for text searching, or add an index on the field.', file: fp, line: i + 1, fix: null });
3305
+ }
3306
+ }
3307
+ return findings;
3308
+ },
3309
+ });
3310
+
3311
+ // PERF-156: Express response not ended causing memory leak
3312
+ rules.push({
3313
+ id: 'PERF-156', category: 'performance', severity: 'high', confidence: 'likely', title: 'Express route handler may not send response in all code paths',
3314
+ check({ files }) {
3315
+ const findings = [];
3316
+ for (const [fp, c] of files) {
3317
+ if (!isSourceFile(fp)) continue;
3318
+ const lines = c.split('\n');
3319
+ for (let i = 0; i < lines.length; i++) {
3320
+ if (/(?:router|app)\.(?:get|post|put|delete)\s*\([^,]+,\s*(?:async\s+)?\(?(?:req|request)\s*,\s*(?:res|response)\)?/.test(lines[i])) {
3321
+ const body = lines.slice(i, Math.min(lines.length, i + 30)).join('\n');
3322
+ if (!/res\.(?:send|json|end|redirect|render|status|download|sendFile)/.test(body)) {
3323
+ findings.push({ ruleId: 'PERF-156', category: 'performance', severity: 'high', title: 'Route handler may not send a response — request will hang', description: 'Ensure all code paths in route handlers call res.send(), res.json(), res.end(), or next().', file: fp, line: i + 1, fix: null });
3324
+ }
3325
+ }
3326
+ }
3327
+ }
3328
+ return findings;
3329
+ },
3330
+ });
3331
+
3332
+ // PERF-157: useCallback/useMemo missing dependency array
3333
+ rules.push({
3334
+ id: 'PERF-157', category: 'performance', severity: 'medium', confidence: 'likely', title: 'useCallback or useMemo without dependency array',
3335
+ check({ files }) {
3336
+ const findings = [];
3337
+ for (const [fp, c] of files) {
3338
+ if (!isSourceFile(fp) || !fp.match(/\.[jt]sx?$/)) continue;
3339
+ const lines = c.split('\n');
3340
+ for (let i = 0; i < lines.length; i++) {
3341
+ if (/(?:useCallback|useMemo)\s*\(\s*(?:async\s+)?\(/.test(lines[i])) {
3342
+ const block = lines.slice(i, Math.min(lines.length, i + 8)).join('\n');
3343
+ if (!/\[\s*(?:\w|,|\s)*\]\s*\)/.test(block)) findings.push({ ruleId: 'PERF-157', category: 'performance', severity: 'medium', title: 'useCallback/useMemo without dependency array — recreated every render', description: 'Add a dependency array as the second argument to useCallback/useMemo.', file: fp, line: i + 1, fix: null });
3344
+ }
3345
+ }
3346
+ }
3347
+ return findings;
3348
+ },
3349
+ });
3350
+
3351
+ // PERF-158: Large bundle — importing entire lodash
3352
+ rules.push({
3353
+ id: 'PERF-158', category: 'performance', severity: 'medium', confidence: 'likely', title: 'Importing entire lodash library — increases bundle size',
3354
+ check({ files }) {
3355
+ const findings = [];
3356
+ for (const [fp, c] of files) {
3357
+ if (!isSourceFile(fp)) continue;
3358
+ if (/require\s*\(\s*['"]lodash['"]\s*\)|from\s+['"]lodash['"]/.test(c) && !/from\s+['"]lodash\//.test(c)) findings.push({ ruleId: 'PERF-158', category: 'performance', severity: 'medium', title: 'Importing all of lodash — use lodash/{function} for tree shaking', description: 'Import individual lodash functions: import debounce from "lodash/debounce" to reduce bundle size.', file: fp, fix: null });
3359
+ }
3360
+ return findings;
3361
+ },
3362
+ });
3363
+
3364
+ // PERF-159: Too many watchers in Mongoose schema
3365
+ rules.push({
3366
+ id: 'PERF-159', category: 'performance', severity: 'medium', confidence: 'likely', title: 'Multiple Mongoose pre/post hooks on same operation',
3367
+ check({ files, stack }) {
3368
+ if (!stack.dependencies?.['mongoose']) return [];
3369
+ const findings = [];
3370
+ for (const [fp, c] of files) {
3371
+ if (!isSourceFile(fp)) continue;
3372
+ const matches = c.match(/\.pre\s*\(\s*['"]save['"]/g) || [];
3373
+ if (matches.length > 3) findings.push({ ruleId: 'PERF-159', category: 'performance', severity: 'medium', title: `${matches.length} Mongoose pre('save') hooks — consolidate for performance`, description: 'Multiple pre-save hooks increase save latency. Consolidate into a single hook where possible.', file: fp, fix: null });
3374
+ }
3375
+ return findings;
3376
+ },
3377
+ });
3378
+
3379
+ // PERF-160: Missing HTTP response caching headers
3380
+ rules.push({
3381
+ id: 'PERF-160', category: 'performance', severity: 'low', confidence: 'suggestion', title: 'Static API endpoint without Cache-Control header',
3382
+ check({ files }) {
3383
+ const findings = [];
3384
+ for (const [fp, c] of files) {
3385
+ if (!isSourceFile(fp)) continue;
3386
+ const lines = c.split('\n');
3387
+ for (let i = 0; i < lines.length; i++) {
3388
+ if (/(?:router|app)\.get\s*\(\s*['"`][^'"`]*(?:\/list|\/all|\/config|\/settings)['"` ]/.test(lines[i])) {
3389
+ const block = lines.slice(i, Math.min(lines.length, i + 15)).join('\n');
3390
+ if (!/Cache-Control|cache-control|setHeader.*cache|etag/i.test(block)) findings.push({ ruleId: 'PERF-160', category: 'performance', severity: 'low', title: 'GET endpoint without Cache-Control header — responses not cached', description: 'Add Cache-Control headers to static/list endpoints to enable browser and proxy caching.', file: fp, line: i + 1, fix: null });
3391
+ }
3392
+ }
3393
+ }
3394
+ return findings;
3395
+ },
3396
+ });
3397
+
3398
+ // PERF-161: Multiple render cycles from state updates in React
3399
+ rules.push({
3400
+ id: 'PERF-161', category: 'performance', severity: 'medium', confidence: 'likely', title: 'Sequential useState calls causing multiple re-renders',
3401
+ check({ files }) {
3402
+ const findings = [];
3403
+ for (const [fp, c] of files) {
3404
+ if (!isSourceFile(fp) || !fp.match(/\.[jt]sx?$/)) continue;
3405
+ const lines = c.split('\n');
3406
+ for (let i = 0; i < lines.length; i++) {
3407
+ const setter = lines[i].match(/\bset([A-Z]\w+)\s*\(/);
3408
+ if (setter && i + 1 < lines.length) {
3409
+ const nextSetter = lines[i+1].match(/\bset([A-Z]\w+)\s*\(/);
3410
+ if (nextSetter && setter[1] !== nextSetter[1]) {
3411
+ findings.push({ ruleId: 'PERF-161', category: 'performance', severity: 'medium', title: 'Consecutive setState calls — consider combining into a single useReducer', description: 'Multiple sequential state updates cause multiple re-renders. Use useReducer or batch updates.', file: fp, line: i + 1, fix: null });
3412
+ }
3413
+ }
3414
+ }
3415
+ }
3416
+ return findings;
3417
+ },
3418
+ });
3419
+
3420
+ // PERF-162: Heavy computation in component render without worker
3421
+ rules.push({
3422
+ id: 'PERF-162', category: 'performance', severity: 'medium', confidence: 'likely', title: 'Cryptographic or heavy computation in React render — blocks UI',
3423
+ check({ files }) {
3424
+ const findings = [];
3425
+ for (const [fp, c] of files) {
3426
+ if (!isSourceFile(fp) || !fp.match(/\.[jt]sx?$/)) continue;
3427
+ if (/(?:crypto\.subtle|pbkdf2|scrypt|argon2|bcrypt)/.test(c) && /return\s*\(|React\.createElement/.test(c)) {
3428
+ findings.push({ ruleId: 'PERF-162', category: 'performance', severity: 'medium', title: 'Heavy cryptographic computation in React component — moves it to a Web Worker', description: 'Offload CPU-intensive operations to a Web Worker to prevent blocking the main thread.', file: fp, fix: null });
3429
+ }
3430
+ }
3431
+ return findings;
3432
+ },
3433
+ });
3434
+
3435
+ // PERF-163: Prisma N+1 without include
3436
+ rules.push({
3437
+ id: 'PERF-163', category: 'performance', severity: 'high', confidence: 'definite', title: 'Prisma query inside loop without include — N+1 query pattern',
3438
+ check({ files, stack }) {
3439
+ if (!stack.dependencies?.['@prisma/client']) return [];
3440
+ const findings = [];
3441
+ for (const [fp, c] of files) {
3442
+ if (!isSourceFile(fp)) continue;
3443
+ const lines = c.split('\n');
3444
+ let inLoop = false;
3445
+ for (let i = 0; i < lines.length; i++) {
3446
+ if (/for\s*\(|\.map\s*\(|\.forEach\s*\(/.test(lines[i])) inLoop = true;
3447
+ if (inLoop && /prisma\.\w+\.findUnique\s*\(|prisma\.\w+\.findFirst\s*\(/.test(lines[i])) {
3448
+ findings.push({ ruleId: 'PERF-163', category: 'performance', severity: 'high', title: 'Prisma findUnique/findFirst inside loop — N+1 queries', description: 'Use prisma.findMany with where: { id: { in: ids } } or include related data with include option.', file: fp, line: i + 1, fix: null });
3449
+ }
3450
+ if (/^\s*\}/.test(lines[i])) inLoop = false;
3451
+ }
3452
+ }
3453
+ return findings;
3454
+ },
3455
+ });