@vibecodeqa/cli 0.16.0 → 0.18.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +73 -63
- package/dist/check-meta.d.ts +1 -0
- package/dist/check-meta.js +58 -6
- package/dist/cli.js +48 -10
- package/dist/detect.js +24 -2
- package/dist/fs-utils.d.ts +4 -0
- package/dist/fs-utils.js +12 -6
- package/dist/report/html.d.ts +18 -9
- package/dist/report/html.js +108 -68
- package/dist/report/pages.d.ts +4 -4
- package/dist/report/pages.js +165 -115
- package/dist/report/sarif.d.ts +3 -0
- package/dist/report/sarif.js +67 -0
- package/dist/report/styles.d.ts +1 -1
- package/dist/report/styles.js +105 -33
- package/dist/report/svg.d.ts +17 -0
- package/dist/report/svg.js +99 -0
- package/dist/runners/accessibility.d.ts +3 -0
- package/dist/runners/accessibility.js +85 -0
- package/dist/runners/architecture.d.ts +2 -0
- package/dist/runners/architecture.js +232 -20
- package/dist/runners/code-coherence.d.ts +17 -0
- package/dist/runners/code-coherence.js +39 -0
- package/dist/runners/complexity.js +7 -37
- package/dist/runners/confusion.js +3 -31
- package/dist/runners/context.js +9 -40
- package/dist/runners/dependencies.js +28 -0
- package/dist/runners/doc-coherence.d.ts +14 -0
- package/dist/runners/doc-coherence.js +48 -0
- package/dist/runners/docs.js +7 -32
- package/dist/runners/duplication.js +9 -37
- package/dist/runners/lint.js +17 -0
- package/dist/runners/performance.d.ts +10 -0
- package/dist/runners/performance.js +174 -0
- package/dist/runners/react.d.ts +3 -0
- package/dist/runners/react.js +86 -0
- package/dist/runners/secrets.js +8 -29
- package/dist/runners/security.js +15 -38
- package/dist/runners/standards.js +3 -36
- package/dist/runners/structure.js +35 -55
- package/dist/runners/testing.js +2 -36
- package/dist/runners/type-safety.d.ts +1 -1
- package/dist/runners/type-safety.js +19 -37
- package/dist/runners/types-check.d.ts +1 -1
- package/dist/runners/types-check.js +38 -20
- package/dist/types.d.ts +5 -5
- package/package.json +11 -10
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
/** Doc Coherence — detects contradictions between documentation and code.
|
|
2
|
+
*
|
|
3
|
+
* Premium feature (powered by LLM). Scans README, CLAUDE.md, JSDoc, and inline
|
|
4
|
+
* comments for claims that contradict the actual code:
|
|
5
|
+
* - README says "supports X" but feature X was removed
|
|
6
|
+
* - JSDoc says "@param required" but param has a default
|
|
7
|
+
* - Comment says "never throws" but function has throw statements
|
|
8
|
+
* - CHANGELOG references files that no longer exist
|
|
9
|
+
* - API docs describe endpoints/functions that were renamed or deleted
|
|
10
|
+
*
|
|
11
|
+
* Currently returns a "coming soon" placeholder.
|
|
12
|
+
*/
|
|
13
|
+
import { existsSync } from "node:fs";
|
|
14
|
+
import { join } from "node:path";
|
|
15
|
+
import { getProductionFiles } from "../fs-utils.js";
|
|
16
|
+
export function runDocCoherence(cwd) {
|
|
17
|
+
const start = Date.now();
|
|
18
|
+
// Detect if docs exist (useful info even in placeholder mode)
|
|
19
|
+
const docFiles = [];
|
|
20
|
+
const candidates = ["README.md", "CLAUDE.md", "ARCHITECTURE.md", "CONTRIBUTING.md", "CHANGELOG.md", "API.md", "docs/README.md"];
|
|
21
|
+
for (const f of candidates) {
|
|
22
|
+
if (existsSync(join(cwd, f)))
|
|
23
|
+
docFiles.push(f);
|
|
24
|
+
}
|
|
25
|
+
let hasJSDoc = false;
|
|
26
|
+
try {
|
|
27
|
+
const files = getProductionFiles(cwd);
|
|
28
|
+
hasJSDoc = files.some((f) => /\/\*\*/.test(f.content));
|
|
29
|
+
}
|
|
30
|
+
catch {
|
|
31
|
+
// no source files
|
|
32
|
+
}
|
|
33
|
+
return {
|
|
34
|
+
name: "doc-coherence",
|
|
35
|
+
score: 0,
|
|
36
|
+
grade: "F",
|
|
37
|
+
details: {
|
|
38
|
+
premium: true,
|
|
39
|
+
comingSoon: true,
|
|
40
|
+
reason: "LLM-powered analysis — coming soon",
|
|
41
|
+
docFiles,
|
|
42
|
+
hasJSDoc,
|
|
43
|
+
description: "Detects contradictions between documentation and code. Finds stale README claims, incorrect JSDoc, outdated API docs, and misleading comments.",
|
|
44
|
+
},
|
|
45
|
+
issues: [],
|
|
46
|
+
duration: Date.now() - start,
|
|
47
|
+
};
|
|
48
|
+
}
|
package/dist/runners/docs.js
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
/** Documentation check — README, JSDoc, code comments. */
|
|
2
|
-
import { existsSync,
|
|
3
|
-
import {
|
|
2
|
+
import { existsSync, readFileSync } from "node:fs";
|
|
3
|
+
import { join } from "node:path";
|
|
4
|
+
import { getProductionFiles } from "../fs-utils.js";
|
|
4
5
|
import { gradeFromScore } from "../types.js";
|
|
5
6
|
export function runDocs(cwd) {
|
|
6
7
|
const start = Date.now();
|
|
@@ -34,21 +35,11 @@ export function runDocs(cwd) {
|
|
|
34
35
|
issues.push({ severity: "warning", message: "README has very little content", rule: "readme-sparse" });
|
|
35
36
|
}
|
|
36
37
|
// Check exported function documentation
|
|
37
|
-
const
|
|
38
|
-
const dirs = ["src", "web/src"];
|
|
39
|
-
for (const dir of dirs) {
|
|
40
|
-
try {
|
|
41
|
-
collectFiles(join(cwd, dir), files);
|
|
42
|
-
}
|
|
43
|
-
catch {
|
|
44
|
-
/* dir doesn't exist */
|
|
45
|
-
}
|
|
46
|
-
}
|
|
38
|
+
const sourceFiles = getProductionFiles(cwd);
|
|
47
39
|
let totalExports = 0;
|
|
48
40
|
let documentedExports = 0;
|
|
49
|
-
for (const
|
|
50
|
-
const
|
|
51
|
-
const lines = content.split("\n");
|
|
41
|
+
for (const sf of sourceFiles) {
|
|
42
|
+
const lines = sf.content.split("\n");
|
|
52
43
|
for (let i = 0; i < lines.length; i++) {
|
|
53
44
|
const line = lines[i].trim();
|
|
54
45
|
if (line.startsWith("export function ") ||
|
|
@@ -84,7 +75,7 @@ export function runDocs(cwd) {
|
|
|
84
75
|
score,
|
|
85
76
|
grade: gradeFromScore(score),
|
|
86
77
|
details: {
|
|
87
|
-
readmeLines: existsSync(
|
|
78
|
+
readmeLines: existsSync(join(cwd, "README.md")) ? readFileSync(join(cwd, "README.md"), "utf-8").split("\n").length : 0,
|
|
88
79
|
totalExports,
|
|
89
80
|
documentedExports,
|
|
90
81
|
documentedPct: totalExports > 0 ? `${Math.round((documentedExports / totalExports) * 100)}%` : "n/a",
|
|
@@ -93,19 +84,3 @@ export function runDocs(cwd) {
|
|
|
93
84
|
duration: Date.now() - start,
|
|
94
85
|
};
|
|
95
86
|
}
|
|
96
|
-
function collectFiles(dir, out) {
|
|
97
|
-
for (const entry of readdirSync(dir)) {
|
|
98
|
-
if (entry === "node_modules" || entry === "dist")
|
|
99
|
-
continue;
|
|
100
|
-
const full = join(dir, entry);
|
|
101
|
-
if (statSync(full).isDirectory()) {
|
|
102
|
-
collectFiles(full, out);
|
|
103
|
-
}
|
|
104
|
-
else {
|
|
105
|
-
const ext = extname(entry);
|
|
106
|
-
if ((ext === ".ts" || ext === ".tsx") && !entry.includes(".test.") && !entry.includes(".spec.")) {
|
|
107
|
-
out.push(full);
|
|
108
|
-
}
|
|
109
|
-
}
|
|
110
|
-
}
|
|
111
|
-
}
|
|
@@ -1,28 +1,18 @@
|
|
|
1
1
|
/** Code duplication detection — finds copy-pasted blocks. */
|
|
2
|
-
import {
|
|
3
|
-
import { extname, join } from "node:path";
|
|
2
|
+
import { getProductionFiles } from "../fs-utils.js";
|
|
4
3
|
import { gradeFromScore } from "../types.js";
|
|
5
4
|
const MIN_LINES = 6; // minimum duplicate block size
|
|
6
5
|
const MIN_TOKENS = 50; // minimum token count for a duplicate
|
|
7
6
|
export function runDuplication(cwd) {
|
|
8
7
|
const start = Date.now();
|
|
9
8
|
const issues = [];
|
|
10
|
-
const
|
|
11
|
-
|
|
12
|
-
for (const dir of dirs) {
|
|
13
|
-
try {
|
|
14
|
-
collectFiles(join(cwd, dir), files);
|
|
15
|
-
}
|
|
16
|
-
catch {
|
|
17
|
-
/* dir doesn't exist */
|
|
18
|
-
}
|
|
19
|
-
}
|
|
20
|
-
if (files.length < 2) {
|
|
9
|
+
const sourceFiles = getProductionFiles(cwd);
|
|
10
|
+
if (sourceFiles.length < 2) {
|
|
21
11
|
return {
|
|
22
12
|
name: "duplication",
|
|
23
13
|
score: 100,
|
|
24
14
|
grade: "A",
|
|
25
|
-
details: { filesScanned:
|
|
15
|
+
details: { filesScanned: sourceFiles.length, duplicates: 0 },
|
|
26
16
|
issues: [],
|
|
27
17
|
duration: Date.now() - start,
|
|
28
18
|
};
|
|
@@ -31,23 +21,21 @@ export function runDuplication(cwd) {
|
|
|
31
21
|
// Build a map of normalized line hashes → locations
|
|
32
22
|
const lineMap = new Map();
|
|
33
23
|
let totalSourceLines = 0;
|
|
34
|
-
for (const
|
|
35
|
-
const
|
|
36
|
-
const relPath = file.replace(`${cwd}/`, "");
|
|
37
|
-
const lines = content.split("\n");
|
|
24
|
+
for (const sf of sourceFiles) {
|
|
25
|
+
const lines = sf.content.split("\n");
|
|
38
26
|
totalSourceLines += lines.length;
|
|
39
27
|
for (let i = 0; i <= lines.length - MIN_LINES; i++) {
|
|
40
28
|
const block = lines
|
|
41
29
|
.slice(i, i + MIN_LINES)
|
|
42
30
|
.map((l) => l.trim())
|
|
43
|
-
.filter((l) => l.length > 0 && !l.startsWith("//") && !l.startsWith("*") && l !== "{" && l !== "}" && l !== "");
|
|
31
|
+
.filter((l) => l.length > 0 && !l.startsWith("//") && !l.startsWith("*") && !l.startsWith("import ") && !l.startsWith("export {") && l !== "{" && l !== "}" && l !== "");
|
|
44
32
|
if (block.length < MIN_LINES - 2)
|
|
45
33
|
continue; // too many empty/trivial lines
|
|
46
34
|
const key = block.join("\n");
|
|
47
35
|
if (key.length < MIN_TOKENS)
|
|
48
36
|
continue;
|
|
49
37
|
const locs = lineMap.get(key) || [];
|
|
50
|
-
locs.push({ file:
|
|
38
|
+
locs.push({ file: sf.path, line: i + 1 });
|
|
51
39
|
lineMap.set(key, locs);
|
|
52
40
|
}
|
|
53
41
|
}
|
|
@@ -86,24 +74,8 @@ export function runDuplication(cwd) {
|
|
|
86
74
|
name: "duplication",
|
|
87
75
|
score,
|
|
88
76
|
grade: gradeFromScore(score),
|
|
89
|
-
details: { filesScanned:
|
|
77
|
+
details: { filesScanned: sourceFiles.length, totalSourceLines, duplicateBlocks: duplicates.length, duplicationPct: `${dupPct}%` },
|
|
90
78
|
issues,
|
|
91
79
|
duration: Date.now() - start,
|
|
92
80
|
};
|
|
93
81
|
}
|
|
94
|
-
function collectFiles(dir, out) {
|
|
95
|
-
for (const entry of readdirSync(dir)) {
|
|
96
|
-
if (entry === "node_modules" || entry === "dist" || entry === ".git")
|
|
97
|
-
continue;
|
|
98
|
-
const full = join(dir, entry);
|
|
99
|
-
if (statSync(full).isDirectory()) {
|
|
100
|
-
collectFiles(full, out);
|
|
101
|
-
}
|
|
102
|
-
else {
|
|
103
|
-
const ext = extname(entry);
|
|
104
|
-
if ([".ts", ".tsx", ".js", ".jsx"].includes(ext) && !entry.includes(".test.") && !entry.includes(".spec.")) {
|
|
105
|
-
out.push(full);
|
|
106
|
-
}
|
|
107
|
-
}
|
|
108
|
-
}
|
|
109
|
-
}
|
package/dist/runners/lint.js
CHANGED
|
@@ -49,6 +49,23 @@ export function runLint(cwd, stack) {
|
|
|
49
49
|
/* eslint output parse failed */
|
|
50
50
|
}
|
|
51
51
|
}
|
|
52
|
+
else if (stack.linter === "dart_analyze") {
|
|
53
|
+
const { stdout } = run("dart analyze --format=machine 2>/dev/null || true", cwd);
|
|
54
|
+
// machine format: SEVERITY|TYPE|CODE|PATH|LINE|COL|LEN|MESSAGE
|
|
55
|
+
for (const line of stdout.split("\n")) {
|
|
56
|
+
const parts = line.split("|");
|
|
57
|
+
if (parts.length < 8)
|
|
58
|
+
continue;
|
|
59
|
+
const severity = parts[0] === "ERROR" ? "error" : parts[0] === "WARNING" ? "warning" : "info";
|
|
60
|
+
issues.push({
|
|
61
|
+
severity,
|
|
62
|
+
message: parts[7],
|
|
63
|
+
file: parts[3],
|
|
64
|
+
line: parseInt(parts[4], 10) || undefined,
|
|
65
|
+
rule: parts[2],
|
|
66
|
+
});
|
|
67
|
+
}
|
|
68
|
+
}
|
|
52
69
|
else {
|
|
53
70
|
return {
|
|
54
71
|
name: "lint",
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
/** Performance check — barrel imports, dynamic import opportunities, large bundles.
|
|
2
|
+
*
|
|
3
|
+
* Sub-checks:
|
|
4
|
+
* 1. Barrel import smell — index.ts re-exports that defeat tree-shaking
|
|
5
|
+
* 2. Heavy dependencies — bundled packages known to bloat output
|
|
6
|
+
* 3. Dynamic import opportunities — large imports that could be lazy-loaded
|
|
7
|
+
* 4. CSS-in-JS overhead — detects runtime CSS solutions vs zero-runtime alternatives
|
|
8
|
+
*/
|
|
9
|
+
import type { CheckResult } from "../types.js";
|
|
10
|
+
export declare function runPerformance(cwd: string): CheckResult;
|
|
@@ -0,0 +1,174 @@
|
|
|
1
|
+
/** Performance check — barrel imports, dynamic import opportunities, large bundles.
|
|
2
|
+
*
|
|
3
|
+
* Sub-checks:
|
|
4
|
+
* 1. Barrel import smell — index.ts re-exports that defeat tree-shaking
|
|
5
|
+
* 2. Heavy dependencies — bundled packages known to bloat output
|
|
6
|
+
* 3. Dynamic import opportunities — large imports that could be lazy-loaded
|
|
7
|
+
* 4. CSS-in-JS overhead — detects runtime CSS solutions vs zero-runtime alternatives
|
|
8
|
+
*/
|
|
9
|
+
import { existsSync, readdirSync, statSync } from "node:fs";
|
|
10
|
+
import { join } from "node:path";
|
|
11
|
+
import { getProductionFiles, readDeps } from "../fs-utils.js";
|
|
12
|
+
import { gradeFromScore } from "../types.js";
|
|
13
|
+
// Packages known to be heavy (bundled KB, approx)
|
|
14
|
+
const HEAVY_DEPS = {
|
|
15
|
+
moment: { kb: 300, alt: "date-fns or dayjs (2-7KB)" },
|
|
16
|
+
lodash: { kb: 70, alt: "lodash-es or native methods" },
|
|
17
|
+
"lodash.js": { kb: 70, alt: "lodash-es or native methods" },
|
|
18
|
+
rxjs: { kb: 50, alt: "only import operators you use" },
|
|
19
|
+
"@fortawesome/fontawesome-svg-core": { kb: 60, alt: "lucide-react or heroicons (tree-shakeable)" },
|
|
20
|
+
"@material-ui/core": { kb: 300, alt: "@mui/material with tree-shaking imports" },
|
|
21
|
+
"chart.js": { kb: 200, alt: "lightweight-charts or uPlot" },
|
|
22
|
+
firebase: { kb: 200, alt: "firebase/app + only needed modules" },
|
|
23
|
+
"aws-sdk": { kb: 400, alt: "@aws-sdk/client-* (v3 modular)" },
|
|
24
|
+
underscore: { kb: 25, alt: "native ES methods" },
|
|
25
|
+
};
|
|
26
|
+
export function runPerformance(cwd) {
|
|
27
|
+
const start = Date.now();
|
|
28
|
+
const issues = [];
|
|
29
|
+
const sourceFiles = getProductionFiles(cwd);
|
|
30
|
+
if (sourceFiles.length === 0) {
|
|
31
|
+
return {
|
|
32
|
+
name: "performance",
|
|
33
|
+
score: 100,
|
|
34
|
+
grade: "A",
|
|
35
|
+
details: { skipped: true, reason: "no source files" },
|
|
36
|
+
issues: [],
|
|
37
|
+
duration: Date.now() - start,
|
|
38
|
+
};
|
|
39
|
+
}
|
|
40
|
+
let barrelImports = 0;
|
|
41
|
+
let heavyDeps = 0;
|
|
42
|
+
let dynamicOpportunities = 0;
|
|
43
|
+
let cssInJsRuntime = 0;
|
|
44
|
+
// ── 1. Barrel import detection ──
|
|
45
|
+
// Find index.ts files that just re-export
|
|
46
|
+
for (const f of sourceFiles) {
|
|
47
|
+
if (f.base !== "index")
|
|
48
|
+
continue;
|
|
49
|
+
const lines = f.content.split("\n").filter((l) => l.trim().length > 0);
|
|
50
|
+
const exportLines = lines.filter((l) => /^export\s/.test(l.trim()));
|
|
51
|
+
// If >80% of non-empty lines are re-exports, it's a barrel
|
|
52
|
+
if (lines.length > 0 && exportLines.length / lines.length > 0.8 && exportLines.length >= 3) {
|
|
53
|
+
barrelImports++;
|
|
54
|
+
issues.push({
|
|
55
|
+
severity: "warning",
|
|
56
|
+
message: `Barrel file with ${exportLines.length} re-exports — defeats tree-shaking in many bundlers`,
|
|
57
|
+
file: f.path,
|
|
58
|
+
rule: "barrel-import",
|
|
59
|
+
});
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
// Check for imports from barrel files (importing from directory index)
|
|
63
|
+
for (const f of sourceFiles) {
|
|
64
|
+
const lines = f.content.split("\n");
|
|
65
|
+
for (let i = 0; i < lines.length; i++) {
|
|
66
|
+
const match = lines[i].match(/import\s+\{[^}]{50,}\}\s+from\s+['"](\.[^'"]+)['"]/);
|
|
67
|
+
if (match) {
|
|
68
|
+
issues.push({
|
|
69
|
+
severity: "info",
|
|
70
|
+
message: "Large destructured import — if from a barrel, only imported items should be bundled",
|
|
71
|
+
file: f.path,
|
|
72
|
+
line: i + 1,
|
|
73
|
+
rule: "large-import",
|
|
74
|
+
});
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
// ── 2. Heavy dependency detection ──
|
|
79
|
+
const deps = readDeps(cwd);
|
|
80
|
+
for (const [name, info] of Object.entries(HEAVY_DEPS)) {
|
|
81
|
+
if (deps[name]) {
|
|
82
|
+
heavyDeps++;
|
|
83
|
+
issues.push({
|
|
84
|
+
severity: "warning",
|
|
85
|
+
message: `${name} (~${info.kb}KB) — consider ${info.alt}`,
|
|
86
|
+
rule: "heavy-dependency",
|
|
87
|
+
});
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
// lodash without lodash-es (non-tree-shakeable)
|
|
91
|
+
if (deps.lodash && !deps["lodash-es"]) {
|
|
92
|
+
issues.push({
|
|
93
|
+
severity: "warning",
|
|
94
|
+
message: "lodash (not lodash-es) — CommonJS build defeats tree-shaking",
|
|
95
|
+
rule: "non-esm-dep",
|
|
96
|
+
});
|
|
97
|
+
}
|
|
98
|
+
// ── 3. Dynamic import opportunities ──
|
|
99
|
+
// Large conditional imports that could be lazy-loaded
|
|
100
|
+
for (const f of sourceFiles) {
|
|
101
|
+
const lines = f.content.split("\n");
|
|
102
|
+
for (let i = 0; i < lines.length; i++) {
|
|
103
|
+
const line = lines[i].trim();
|
|
104
|
+
// Static import of known-heavy visualization/editor libraries
|
|
105
|
+
if (/^import\s/.test(line) && /\b(monaco|codemirror|ace-builds|chart\.js|three|@react-three|recharts|d3)\b/.test(line)) {
|
|
106
|
+
dynamicOpportunities++;
|
|
107
|
+
issues.push({
|
|
108
|
+
severity: "info",
|
|
109
|
+
message: "Consider dynamic import() for heavy library — reduces initial bundle",
|
|
110
|
+
file: f.path,
|
|
111
|
+
line: i + 1,
|
|
112
|
+
rule: "dynamic-import-opportunity",
|
|
113
|
+
});
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
// ── 4. CSS-in-JS runtime overhead ──
|
|
118
|
+
const runtimeCss = ["styled-components", "@emotion/styled", "@emotion/react"];
|
|
119
|
+
for (const pkg of runtimeCss) {
|
|
120
|
+
if (deps[pkg]) {
|
|
121
|
+
cssInJsRuntime++;
|
|
122
|
+
issues.push({
|
|
123
|
+
severity: "info",
|
|
124
|
+
message: `${pkg} adds runtime CSS overhead — consider Tailwind, CSS Modules, or vanilla-extract`,
|
|
125
|
+
rule: "runtime-css",
|
|
126
|
+
});
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
// ── 5. Bundle size check (if dist/ exists) ──
|
|
130
|
+
let bundleSizeKB = 0;
|
|
131
|
+
const distDirs = ["dist", "build", ".next", "out"];
|
|
132
|
+
for (const d of distDirs) {
|
|
133
|
+
const distPath = join(cwd, d);
|
|
134
|
+
if (existsSync(distPath)) {
|
|
135
|
+
try {
|
|
136
|
+
bundleSizeKB = Math.round(dirSizeKB(distPath));
|
|
137
|
+
}
|
|
138
|
+
catch { /* can't read dist */ }
|
|
139
|
+
break;
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
// Score
|
|
143
|
+
const penalty = barrelImports * 3 + heavyDeps * 8 + dynamicOpportunities * 2 + cssInJsRuntime * 2;
|
|
144
|
+
const score = Math.max(0, Math.min(100, 100 - penalty));
|
|
145
|
+
return {
|
|
146
|
+
name: "performance",
|
|
147
|
+
score,
|
|
148
|
+
grade: gradeFromScore(score),
|
|
149
|
+
details: {
|
|
150
|
+
filesScanned: sourceFiles.length,
|
|
151
|
+
barrelImports,
|
|
152
|
+
heavyDeps,
|
|
153
|
+
dynamicOpportunities,
|
|
154
|
+
cssInJsRuntime,
|
|
155
|
+
...(bundleSizeKB > 0 ? { bundleSizeKB } : {}),
|
|
156
|
+
},
|
|
157
|
+
issues,
|
|
158
|
+
duration: Date.now() - start,
|
|
159
|
+
};
|
|
160
|
+
}
|
|
161
|
+
function dirSizeKB(dir) {
|
|
162
|
+
let total = 0;
|
|
163
|
+
for (const entry of readdirSync(dir)) {
|
|
164
|
+
const full = join(dir, entry);
|
|
165
|
+
const stat = statSync(full);
|
|
166
|
+
if (stat.isDirectory()) {
|
|
167
|
+
total += dirSizeKB(full);
|
|
168
|
+
}
|
|
169
|
+
else {
|
|
170
|
+
total += stat.size;
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
return total / 1024;
|
|
174
|
+
}
|
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
/** React-specific checks — hooks rules, conditional hooks, missing keys, prop spreading. */
|
|
2
|
+
import { gradeFromScore } from "../types.js";
|
|
3
|
+
import { getProductionFiles } from "../fs-utils.js";
|
|
4
|
+
export function runReact(cwd, stack) {
|
|
5
|
+
const start = Date.now();
|
|
6
|
+
if (stack.framework !== "react") {
|
|
7
|
+
return { name: "react", score: 100, grade: "A", details: { skipped: true, reason: "not a React project" }, issues: [], duration: Date.now() - start };
|
|
8
|
+
}
|
|
9
|
+
const files = getProductionFiles(cwd).filter((f) => f.ext === ".tsx" || f.ext === ".jsx");
|
|
10
|
+
if (files.length === 0) {
|
|
11
|
+
return { name: "react", score: 100, grade: "A", details: { skipped: true, reason: "no JSX/TSX files" }, issues: [], duration: Date.now() - start };
|
|
12
|
+
}
|
|
13
|
+
const issues = [];
|
|
14
|
+
let conditionalHooks = 0;
|
|
15
|
+
let missingKeys = 0;
|
|
16
|
+
let propSpreading = 0;
|
|
17
|
+
let inlineHandlers = 0;
|
|
18
|
+
let indexKeys = 0;
|
|
19
|
+
for (const f of files) {
|
|
20
|
+
const lines = f.content.split("\n");
|
|
21
|
+
// Track brace depth inside conditional blocks
|
|
22
|
+
let condBraceDepth = 0; // > 0 means we're inside a conditional's body
|
|
23
|
+
for (let i = 0; i < lines.length; i++) {
|
|
24
|
+
const line = lines[i];
|
|
25
|
+
const trimmed = line.trim();
|
|
26
|
+
// Skip comments
|
|
27
|
+
if (trimmed.startsWith("//") || trimmed.startsWith("*"))
|
|
28
|
+
continue;
|
|
29
|
+
// Count braces on this line
|
|
30
|
+
const opens = (trimmed.match(/\{/g) || []).length;
|
|
31
|
+
const closes = (trimmed.match(/\}/g) || []).length;
|
|
32
|
+
// Enter conditional: set depth to 1 on the opening brace
|
|
33
|
+
if (/\b(if|else|switch)\s*[\s(]/.test(trimmed) && opens > 0) {
|
|
34
|
+
condBraceDepth = 1;
|
|
35
|
+
}
|
|
36
|
+
else if (condBraceDepth > 0) {
|
|
37
|
+
condBraceDepth += opens - closes;
|
|
38
|
+
if (condBraceDepth < 0)
|
|
39
|
+
condBraceDepth = 0;
|
|
40
|
+
}
|
|
41
|
+
// 1. Hooks called inside conditionals
|
|
42
|
+
if (condBraceDepth > 0 && /\buse[A-Z]\w*\s*\(/.test(trimmed) && !/\/\//.test(trimmed.split("use")[0])) {
|
|
43
|
+
conditionalHooks++;
|
|
44
|
+
issues.push({ severity: "error", message: "Hook called inside conditional — violates Rules of Hooks", file: f.path, line: i + 1, rule: "conditional-hook" });
|
|
45
|
+
}
|
|
46
|
+
// 2. Missing key in .map() returning JSX
|
|
47
|
+
if (/\.map\s*\(/.test(trimmed)) {
|
|
48
|
+
// Look ahead for JSX return without key
|
|
49
|
+
const mapBlock = lines.slice(i, Math.min(i + 10, lines.length)).join("\n");
|
|
50
|
+
if (/<\w/.test(mapBlock) && !mapBlock.includes("key=") && !mapBlock.includes("key:")) {
|
|
51
|
+
missingKeys++;
|
|
52
|
+
issues.push({ severity: "error", message: "JSX in .map() without key prop", file: f.path, line: i + 1, rule: "missing-key" });
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
// 3. index as key
|
|
56
|
+
if (/key=\{(?:i|idx|index)\}/.test(trimmed) || /key=\{.*(?:, *(?:i|idx|index)\))/.test(trimmed)) {
|
|
57
|
+
indexKeys++;
|
|
58
|
+
issues.push({ severity: "warning", message: "Using index as key — can cause rendering bugs with reorderable lists", file: f.path, line: i + 1, rule: "index-key" });
|
|
59
|
+
}
|
|
60
|
+
// 4. Prop spreading ({...props} on DOM elements)
|
|
61
|
+
if (/\{\.\.\.(?!children)\w+\}/.test(trimmed) && /<[a-z]/.test(trimmed)) {
|
|
62
|
+
propSpreading++;
|
|
63
|
+
issues.push({ severity: "warning", message: "Spreading props onto DOM element — can pass unexpected attributes", file: f.path, line: i + 1, rule: "prop-spreading" });
|
|
64
|
+
}
|
|
65
|
+
// 5. Inline arrow functions in JSX event handlers (performance)
|
|
66
|
+
if (/on[A-Z]\w*=\{(?:\(\) =>|function)/.test(trimmed)) {
|
|
67
|
+
inlineHandlers++;
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
// Only warn about inline handlers if there are many
|
|
72
|
+
if (inlineHandlers > 15) {
|
|
73
|
+
issues.push({ severity: "warning", message: `${inlineHandlers} inline arrow functions in JSX handlers — extract to named functions for readability`, rule: "inline-handlers" });
|
|
74
|
+
}
|
|
75
|
+
const errors = issues.filter((i) => i.severity === "error").length;
|
|
76
|
+
const warnings = issues.filter((i) => i.severity === "warning").length;
|
|
77
|
+
const score = Math.max(0, Math.min(100, 100 - errors * 8 - warnings * 3));
|
|
78
|
+
return {
|
|
79
|
+
name: "react",
|
|
80
|
+
score,
|
|
81
|
+
grade: gradeFromScore(score),
|
|
82
|
+
details: { jsxFiles: files.length, conditionalHooks, missingKeys, indexKeys, propSpreading, inlineHandlers },
|
|
83
|
+
issues,
|
|
84
|
+
duration: Date.now() - start,
|
|
85
|
+
};
|
|
86
|
+
}
|
package/dist/runners/secrets.js
CHANGED
|
@@ -1,6 +1,5 @@
|
|
|
1
1
|
/** Secret detection — scans for hardcoded keys/tokens in source files. */
|
|
2
|
-
import {
|
|
3
|
-
import { extname, join } from "node:path";
|
|
2
|
+
import { collectAllFiles } from "../fs-utils.js";
|
|
4
3
|
import { gradeFromScore } from "../types.js";
|
|
5
4
|
const SECRET_PATTERNS = [
|
|
6
5
|
{ name: "AWS Access Key", pattern: /AKIA[0-9A-Z]{16}/ },
|
|
@@ -35,26 +34,23 @@ const SECRET_PATTERNS = [
|
|
|
35
34
|
export function runSecrets(cwd) {
|
|
36
35
|
const start = Date.now();
|
|
37
36
|
const issues = [];
|
|
38
|
-
const
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
const lines = content.split("\n");
|
|
37
|
+
const sourceFiles = collectAllFiles(cwd, { extraExts: true });
|
|
38
|
+
for (const sf of sourceFiles) {
|
|
39
|
+
// Skip test files and mock data
|
|
40
|
+
if (sf.isTest || sf.path.includes("__mock"))
|
|
41
|
+
continue;
|
|
42
|
+
const lines = sf.content.split("\n");
|
|
44
43
|
for (let i = 0; i < lines.length; i++) {
|
|
45
44
|
const line = lines[i];
|
|
46
45
|
// Skip comments
|
|
47
46
|
if (line.trim().startsWith("//") || line.trim().startsWith("*"))
|
|
48
47
|
continue;
|
|
49
|
-
// Skip test files and mock data
|
|
50
|
-
if (relPath.includes(".test.") || relPath.includes("__mock"))
|
|
51
|
-
continue;
|
|
52
48
|
for (const { name, pattern } of SECRET_PATTERNS) {
|
|
53
49
|
if (pattern.test(line)) {
|
|
54
50
|
issues.push({
|
|
55
51
|
severity: "error",
|
|
56
52
|
message: `Possible ${name}`,
|
|
57
|
-
file:
|
|
53
|
+
file: sf.path,
|
|
58
54
|
line: i + 1,
|
|
59
55
|
rule: "secret-detected",
|
|
60
56
|
});
|
|
@@ -72,20 +68,3 @@ export function runSecrets(cwd) {
|
|
|
72
68
|
duration: Date.now() - start,
|
|
73
69
|
};
|
|
74
70
|
}
|
|
75
|
-
function collectFiles(dir, out) {
|
|
76
|
-
for (const entry of readdirSync(dir)) {
|
|
77
|
-
if (["node_modules", "dist", ".git", ".vibe-check", "coverage", "test-results"].includes(entry))
|
|
78
|
-
continue;
|
|
79
|
-
const full = join(dir, entry);
|
|
80
|
-
const stat = statSync(full);
|
|
81
|
-
if (stat.isDirectory()) {
|
|
82
|
-
collectFiles(full, out);
|
|
83
|
-
}
|
|
84
|
-
else {
|
|
85
|
-
const ext = extname(entry);
|
|
86
|
-
if ([".ts", ".tsx", ".js", ".jsx", ".json", ".env", ".yaml", ".yml", ".toml"].includes(ext)) {
|
|
87
|
-
out.push(full);
|
|
88
|
-
}
|
|
89
|
-
}
|
|
90
|
-
}
|
|
91
|
-
}
|
package/dist/runners/security.js
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
/** Security analysis — beyond secrets, checks for vulnerable code patterns. */
|
|
2
|
-
import { existsSync,
|
|
3
|
-
import {
|
|
2
|
+
import { existsSync, readFileSync } from "node:fs";
|
|
3
|
+
import { join } from "node:path";
|
|
4
|
+
import { getProductionFiles } from "../fs-utils.js";
|
|
4
5
|
import { gradeFromScore } from "../types.js";
|
|
5
6
|
const PATTERNS = [
|
|
6
7
|
// XSS
|
|
@@ -44,7 +45,7 @@ const PATTERNS = [
|
|
|
44
45
|
},
|
|
45
46
|
{
|
|
46
47
|
name: "child_process.exec",
|
|
47
|
-
pattern: /\
|
|
48
|
+
pattern: /\b(?:child_process|cp)\.exec(?:Sync)?\s*\(|(?:^|\s)execSync\s*\(/,
|
|
48
49
|
severity: "warning",
|
|
49
50
|
message: "Command injection risk: prefer execFile with argument array",
|
|
50
51
|
cwe: "CWE-78",
|
|
@@ -105,7 +106,7 @@ const PATTERNS = [
|
|
|
105
106
|
// Sensitive data
|
|
106
107
|
{
|
|
107
108
|
name: "password in URL",
|
|
108
|
-
pattern: /(?:password|secret|token
|
|
109
|
+
pattern: /(?:password|secret|api_?token)=[^&\s'"]{8,}/i,
|
|
109
110
|
severity: "warning",
|
|
110
111
|
message: "Sensitive data in URL query string",
|
|
111
112
|
cwe: "CWE-598",
|
|
@@ -122,17 +123,8 @@ const PATTERNS = [
|
|
|
122
123
|
export function runSecurity(cwd) {
|
|
123
124
|
const start = Date.now();
|
|
124
125
|
const issues = [];
|
|
125
|
-
const
|
|
126
|
-
|
|
127
|
-
for (const dir of dirs) {
|
|
128
|
-
try {
|
|
129
|
-
collectFiles(join(cwd, dir), files);
|
|
130
|
-
}
|
|
131
|
-
catch {
|
|
132
|
-
/* dir doesn't exist */
|
|
133
|
-
}
|
|
134
|
-
}
|
|
135
|
-
if (files.length === 0) {
|
|
126
|
+
const sourceFiles = getProductionFiles(cwd);
|
|
127
|
+
if (sourceFiles.length === 0) {
|
|
136
128
|
return {
|
|
137
129
|
name: "security",
|
|
138
130
|
score: 100,
|
|
@@ -143,24 +135,25 @@ export function runSecurity(cwd) {
|
|
|
143
135
|
};
|
|
144
136
|
}
|
|
145
137
|
const cwePrefixes = new Set();
|
|
146
|
-
for (const
|
|
147
|
-
const
|
|
148
|
-
const relPath = file.replace(`${cwd}/`, "");
|
|
149
|
-
const lines = content.split("\n");
|
|
138
|
+
for (const sf of sourceFiles) {
|
|
139
|
+
const lines = sf.content.split("\n");
|
|
150
140
|
for (let i = 0; i < lines.length; i++) {
|
|
151
141
|
const line = lines[i];
|
|
152
142
|
const trimmed = line.trim();
|
|
153
143
|
if (trimmed.startsWith("//") || trimmed.startsWith("*"))
|
|
154
144
|
continue;
|
|
155
|
-
// Skip pattern/config definition lines (prevents false positives on own code)
|
|
145
|
+
// Skip pattern/config definition lines and string-heavy metadata (prevents false positives on own code)
|
|
156
146
|
if (/\bpattern\s*:|name:\s*["']|message:\s*["']|description:\s*["']|risk:\s*["']|recommendation:\s*["']/.test(trimmed))
|
|
157
147
|
continue;
|
|
148
|
+
// Skip lines that are primarily string content (check-meta descriptions, etc.)
|
|
149
|
+
if (/^\s*["'`].*["'`][,;]?\s*$/.test(line))
|
|
150
|
+
continue;
|
|
158
151
|
for (const p of PATTERNS) {
|
|
159
152
|
if (p.pattern.test(line)) {
|
|
160
153
|
issues.push({
|
|
161
154
|
severity: p.severity,
|
|
162
155
|
message: p.message,
|
|
163
|
-
file:
|
|
156
|
+
file: sf.path,
|
|
164
157
|
line: i + 1,
|
|
165
158
|
rule: p.cwe || p.name,
|
|
166
159
|
});
|
|
@@ -200,24 +193,8 @@ export function runSecurity(cwd) {
|
|
|
200
193
|
name: "security",
|
|
201
194
|
score,
|
|
202
195
|
grade: gradeFromScore(score),
|
|
203
|
-
details: { filesScanned:
|
|
196
|
+
details: { filesScanned: sourceFiles.length, patterns: issues.length, cweCategories: cwePrefixes.size, errors, warnings },
|
|
204
197
|
issues,
|
|
205
198
|
duration: Date.now() - start,
|
|
206
199
|
};
|
|
207
200
|
}
|
|
208
|
-
function collectFiles(dir, out) {
|
|
209
|
-
for (const entry of readdirSync(dir)) {
|
|
210
|
-
if (["node_modules", "dist", ".git", ".vibe-check", "coverage", "test-results"].includes(entry))
|
|
211
|
-
continue;
|
|
212
|
-
const full = join(dir, entry);
|
|
213
|
-
if (statSync(full).isDirectory()) {
|
|
214
|
-
collectFiles(full, out);
|
|
215
|
-
}
|
|
216
|
-
else {
|
|
217
|
-
const ext = extname(entry);
|
|
218
|
-
if ([".ts", ".tsx", ".js", ".jsx"].includes(ext) && !entry.includes(".test.") && !entry.includes(".spec.")) {
|
|
219
|
-
out.push(full);
|
|
220
|
-
}
|
|
221
|
-
}
|
|
222
|
-
}
|
|
223
|
-
}
|