@vibecodeqa/cli 0.17.0 → 0.18.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +73 -63
- package/dist/check-meta.d.ts +1 -0
- package/dist/check-meta.js +34 -2
- package/dist/cli.js +35 -10
- package/dist/detect.js +24 -2
- package/dist/fs-utils.d.ts +4 -0
- package/dist/fs-utils.js +12 -6
- package/dist/report/html.d.ts +17 -10
- package/dist/report/html.js +106 -73
- package/dist/report/pages.d.ts +2 -1
- package/dist/report/pages.js +88 -82
- package/dist/report/sarif.d.ts +3 -0
- package/dist/report/sarif.js +67 -0
- package/dist/report/styles.d.ts +1 -1
- package/dist/report/styles.js +82 -36
- package/dist/runners/architecture.d.ts +2 -0
- package/dist/runners/architecture.js +232 -20
- package/dist/runners/code-coherence.d.ts +17 -0
- package/dist/runners/code-coherence.js +39 -0
- package/dist/runners/complexity.js +7 -37
- package/dist/runners/confusion.js +3 -31
- package/dist/runners/context.js +9 -40
- package/dist/runners/dependencies.js +28 -0
- package/dist/runners/doc-coherence.d.ts +14 -0
- package/dist/runners/doc-coherence.js +48 -0
- package/dist/runners/docs.js +7 -32
- package/dist/runners/duplication.js +9 -37
- package/dist/runners/lint.js +17 -0
- package/dist/runners/performance.d.ts +10 -0
- package/dist/runners/performance.js +174 -0
- package/dist/runners/react.js +15 -10
- package/dist/runners/secrets.js +8 -29
- package/dist/runners/security.js +15 -38
- package/dist/runners/standards.js +3 -36
- package/dist/runners/structure.js +35 -55
- package/dist/runners/testing.js +2 -36
- package/dist/runners/type-safety.d.ts +1 -1
- package/dist/runners/type-safety.js +19 -37
- package/dist/runners/types-check.d.ts +1 -1
- package/dist/runners/types-check.js +38 -20
- package/dist/types.d.ts +5 -5
- package/package.json +11 -10
|
@@ -0,0 +1,174 @@
|
|
|
1
|
+
/** Performance check — barrel imports, dynamic import opportunities, large bundles.
|
|
2
|
+
*
|
|
3
|
+
* Sub-checks:
|
|
4
|
+
* 1. Barrel import smell — index.ts re-exports that defeat tree-shaking
|
|
5
|
+
* 2. Heavy dependencies — bundled packages known to bloat output
|
|
6
|
+
* 3. Dynamic import opportunities — large imports that could be lazy-loaded
|
|
7
|
+
* 4. CSS-in-JS overhead — detects runtime CSS solutions vs zero-runtime alternatives
|
|
8
|
+
*/
|
|
9
|
+
import { existsSync, readdirSync, statSync } from "node:fs";
|
|
10
|
+
import { join } from "node:path";
|
|
11
|
+
import { getProductionFiles, readDeps } from "../fs-utils.js";
|
|
12
|
+
import { gradeFromScore } from "../types.js";
|
|
13
|
+
// Packages known to be heavy (bundled KB, approx)
|
|
14
|
+
const HEAVY_DEPS = {
|
|
15
|
+
moment: { kb: 300, alt: "date-fns or dayjs (2-7KB)" },
|
|
16
|
+
lodash: { kb: 70, alt: "lodash-es or native methods" },
|
|
17
|
+
"lodash.js": { kb: 70, alt: "lodash-es or native methods" },
|
|
18
|
+
rxjs: { kb: 50, alt: "only import operators you use" },
|
|
19
|
+
"@fortawesome/fontawesome-svg-core": { kb: 60, alt: "lucide-react or heroicons (tree-shakeable)" },
|
|
20
|
+
"@material-ui/core": { kb: 300, alt: "@mui/material with tree-shaking imports" },
|
|
21
|
+
"chart.js": { kb: 200, alt: "lightweight-charts or uPlot" },
|
|
22
|
+
firebase: { kb: 200, alt: "firebase/app + only needed modules" },
|
|
23
|
+
"aws-sdk": { kb: 400, alt: "@aws-sdk/client-* (v3 modular)" },
|
|
24
|
+
underscore: { kb: 25, alt: "native ES methods" },
|
|
25
|
+
};
|
|
26
|
+
export function runPerformance(cwd) {
|
|
27
|
+
const start = Date.now();
|
|
28
|
+
const issues = [];
|
|
29
|
+
const sourceFiles = getProductionFiles(cwd);
|
|
30
|
+
if (sourceFiles.length === 0) {
|
|
31
|
+
return {
|
|
32
|
+
name: "performance",
|
|
33
|
+
score: 100,
|
|
34
|
+
grade: "A",
|
|
35
|
+
details: { skipped: true, reason: "no source files" },
|
|
36
|
+
issues: [],
|
|
37
|
+
duration: Date.now() - start,
|
|
38
|
+
};
|
|
39
|
+
}
|
|
40
|
+
let barrelImports = 0;
|
|
41
|
+
let heavyDeps = 0;
|
|
42
|
+
let dynamicOpportunities = 0;
|
|
43
|
+
let cssInJsRuntime = 0;
|
|
44
|
+
// ── 1. Barrel import detection ──
|
|
45
|
+
// Find index.ts files that just re-export
|
|
46
|
+
for (const f of sourceFiles) {
|
|
47
|
+
if (f.base !== "index")
|
|
48
|
+
continue;
|
|
49
|
+
const lines = f.content.split("\n").filter((l) => l.trim().length > 0);
|
|
50
|
+
const exportLines = lines.filter((l) => /^export\s/.test(l.trim()));
|
|
51
|
+
// If >80% of non-empty lines are re-exports, it's a barrel
|
|
52
|
+
if (lines.length > 0 && exportLines.length / lines.length > 0.8 && exportLines.length >= 3) {
|
|
53
|
+
barrelImports++;
|
|
54
|
+
issues.push({
|
|
55
|
+
severity: "warning",
|
|
56
|
+
message: `Barrel file with ${exportLines.length} re-exports — defeats tree-shaking in many bundlers`,
|
|
57
|
+
file: f.path,
|
|
58
|
+
rule: "barrel-import",
|
|
59
|
+
});
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
// Check for imports from barrel files (importing from directory index)
|
|
63
|
+
for (const f of sourceFiles) {
|
|
64
|
+
const lines = f.content.split("\n");
|
|
65
|
+
for (let i = 0; i < lines.length; i++) {
|
|
66
|
+
const match = lines[i].match(/import\s+\{[^}]{50,}\}\s+from\s+['"](\.[^'"]+)['"]/);
|
|
67
|
+
if (match) {
|
|
68
|
+
issues.push({
|
|
69
|
+
severity: "info",
|
|
70
|
+
message: "Large destructured import — if from a barrel, only imported items should be bundled",
|
|
71
|
+
file: f.path,
|
|
72
|
+
line: i + 1,
|
|
73
|
+
rule: "large-import",
|
|
74
|
+
});
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
// ── 2. Heavy dependency detection ──
|
|
79
|
+
const deps = readDeps(cwd);
|
|
80
|
+
for (const [name, info] of Object.entries(HEAVY_DEPS)) {
|
|
81
|
+
if (deps[name]) {
|
|
82
|
+
heavyDeps++;
|
|
83
|
+
issues.push({
|
|
84
|
+
severity: "warning",
|
|
85
|
+
message: `${name} (~${info.kb}KB) — consider ${info.alt}`,
|
|
86
|
+
rule: "heavy-dependency",
|
|
87
|
+
});
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
// lodash without lodash-es (non-tree-shakeable)
|
|
91
|
+
if (deps.lodash && !deps["lodash-es"]) {
|
|
92
|
+
issues.push({
|
|
93
|
+
severity: "warning",
|
|
94
|
+
message: "lodash (not lodash-es) — CommonJS build defeats tree-shaking",
|
|
95
|
+
rule: "non-esm-dep",
|
|
96
|
+
});
|
|
97
|
+
}
|
|
98
|
+
// ── 3. Dynamic import opportunities ──
|
|
99
|
+
// Large conditional imports that could be lazy-loaded
|
|
100
|
+
for (const f of sourceFiles) {
|
|
101
|
+
const lines = f.content.split("\n");
|
|
102
|
+
for (let i = 0; i < lines.length; i++) {
|
|
103
|
+
const line = lines[i].trim();
|
|
104
|
+
// Static import of known-heavy visualization/editor libraries
|
|
105
|
+
if (/^import\s/.test(line) && /\b(monaco|codemirror|ace-builds|chart\.js|three|@react-three|recharts|d3)\b/.test(line)) {
|
|
106
|
+
dynamicOpportunities++;
|
|
107
|
+
issues.push({
|
|
108
|
+
severity: "info",
|
|
109
|
+
message: "Consider dynamic import() for heavy library — reduces initial bundle",
|
|
110
|
+
file: f.path,
|
|
111
|
+
line: i + 1,
|
|
112
|
+
rule: "dynamic-import-opportunity",
|
|
113
|
+
});
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
// ── 4. CSS-in-JS runtime overhead ──
|
|
118
|
+
const runtimeCss = ["styled-components", "@emotion/styled", "@emotion/react"];
|
|
119
|
+
for (const pkg of runtimeCss) {
|
|
120
|
+
if (deps[pkg]) {
|
|
121
|
+
cssInJsRuntime++;
|
|
122
|
+
issues.push({
|
|
123
|
+
severity: "info",
|
|
124
|
+
message: `${pkg} adds runtime CSS overhead — consider Tailwind, CSS Modules, or vanilla-extract`,
|
|
125
|
+
rule: "runtime-css",
|
|
126
|
+
});
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
// ── 5. Bundle size check (if dist/ exists) ──
|
|
130
|
+
let bundleSizeKB = 0;
|
|
131
|
+
const distDirs = ["dist", "build", ".next", "out"];
|
|
132
|
+
for (const d of distDirs) {
|
|
133
|
+
const distPath = join(cwd, d);
|
|
134
|
+
if (existsSync(distPath)) {
|
|
135
|
+
try {
|
|
136
|
+
bundleSizeKB = Math.round(dirSizeKB(distPath));
|
|
137
|
+
}
|
|
138
|
+
catch { /* can't read dist */ }
|
|
139
|
+
break;
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
// Score
|
|
143
|
+
const penalty = barrelImports * 3 + heavyDeps * 8 + dynamicOpportunities * 2 + cssInJsRuntime * 2;
|
|
144
|
+
const score = Math.max(0, Math.min(100, 100 - penalty));
|
|
145
|
+
return {
|
|
146
|
+
name: "performance",
|
|
147
|
+
score,
|
|
148
|
+
grade: gradeFromScore(score),
|
|
149
|
+
details: {
|
|
150
|
+
filesScanned: sourceFiles.length,
|
|
151
|
+
barrelImports,
|
|
152
|
+
heavyDeps,
|
|
153
|
+
dynamicOpportunities,
|
|
154
|
+
cssInJsRuntime,
|
|
155
|
+
...(bundleSizeKB > 0 ? { bundleSizeKB } : {}),
|
|
156
|
+
},
|
|
157
|
+
issues,
|
|
158
|
+
duration: Date.now() - start,
|
|
159
|
+
};
|
|
160
|
+
}
|
|
161
|
+
function dirSizeKB(dir) {
|
|
162
|
+
let total = 0;
|
|
163
|
+
for (const entry of readdirSync(dir)) {
|
|
164
|
+
const full = join(dir, entry);
|
|
165
|
+
const stat = statSync(full);
|
|
166
|
+
if (stat.isDirectory()) {
|
|
167
|
+
total += dirSizeKB(full);
|
|
168
|
+
}
|
|
169
|
+
else {
|
|
170
|
+
total += stat.size;
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
return total / 1024;
|
|
174
|
+
}
|
package/dist/runners/react.js
CHANGED
|
@@ -18,23 +18,28 @@ export function runReact(cwd, stack) {
|
|
|
18
18
|
let indexKeys = 0;
|
|
19
19
|
for (const f of files) {
|
|
20
20
|
const lines = f.content.split("\n");
|
|
21
|
-
// Track
|
|
22
|
-
let
|
|
21
|
+
// Track brace depth inside conditional blocks
|
|
22
|
+
let condBraceDepth = 0; // > 0 means we're inside a conditional's body
|
|
23
23
|
for (let i = 0; i < lines.length; i++) {
|
|
24
24
|
const line = lines[i];
|
|
25
25
|
const trimmed = line.trim();
|
|
26
26
|
// Skip comments
|
|
27
27
|
if (trimmed.startsWith("//") || trimmed.startsWith("*"))
|
|
28
28
|
continue;
|
|
29
|
-
//
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
29
|
+
// Count braces on this line
|
|
30
|
+
const opens = (trimmed.match(/\{/g) || []).length;
|
|
31
|
+
const closes = (trimmed.match(/\}/g) || []).length;
|
|
32
|
+
// Enter conditional: set depth to 1 on the opening brace
|
|
33
|
+
if (/\b(if|else|switch)\s*[\s(]/.test(trimmed) && opens > 0) {
|
|
34
|
+
condBraceDepth = 1;
|
|
35
|
+
}
|
|
36
|
+
else if (condBraceDepth > 0) {
|
|
37
|
+
condBraceDepth += opens - closes;
|
|
38
|
+
if (condBraceDepth < 0)
|
|
39
|
+
condBraceDepth = 0;
|
|
40
|
+
}
|
|
36
41
|
// 1. Hooks called inside conditionals
|
|
37
|
-
if (
|
|
42
|
+
if (condBraceDepth > 0 && /\buse[A-Z]\w*\s*\(/.test(trimmed) && !/\/\//.test(trimmed.split("use")[0])) {
|
|
38
43
|
conditionalHooks++;
|
|
39
44
|
issues.push({ severity: "error", message: "Hook called inside conditional — violates Rules of Hooks", file: f.path, line: i + 1, rule: "conditional-hook" });
|
|
40
45
|
}
|
package/dist/runners/secrets.js
CHANGED
|
@@ -1,6 +1,5 @@
|
|
|
1
1
|
/** Secret detection — scans for hardcoded keys/tokens in source files. */
|
|
2
|
-
import {
|
|
3
|
-
import { extname, join } from "node:path";
|
|
2
|
+
import { collectAllFiles } from "../fs-utils.js";
|
|
4
3
|
import { gradeFromScore } from "../types.js";
|
|
5
4
|
const SECRET_PATTERNS = [
|
|
6
5
|
{ name: "AWS Access Key", pattern: /AKIA[0-9A-Z]{16}/ },
|
|
@@ -35,26 +34,23 @@ const SECRET_PATTERNS = [
|
|
|
35
34
|
export function runSecrets(cwd) {
|
|
36
35
|
const start = Date.now();
|
|
37
36
|
const issues = [];
|
|
38
|
-
const
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
const lines = content.split("\n");
|
|
37
|
+
const sourceFiles = collectAllFiles(cwd, { extraExts: true });
|
|
38
|
+
for (const sf of sourceFiles) {
|
|
39
|
+
// Skip test files and mock data
|
|
40
|
+
if (sf.isTest || sf.path.includes("__mock"))
|
|
41
|
+
continue;
|
|
42
|
+
const lines = sf.content.split("\n");
|
|
44
43
|
for (let i = 0; i < lines.length; i++) {
|
|
45
44
|
const line = lines[i];
|
|
46
45
|
// Skip comments
|
|
47
46
|
if (line.trim().startsWith("//") || line.trim().startsWith("*"))
|
|
48
47
|
continue;
|
|
49
|
-
// Skip test files and mock data
|
|
50
|
-
if (relPath.includes(".test.") || relPath.includes("__mock"))
|
|
51
|
-
continue;
|
|
52
48
|
for (const { name, pattern } of SECRET_PATTERNS) {
|
|
53
49
|
if (pattern.test(line)) {
|
|
54
50
|
issues.push({
|
|
55
51
|
severity: "error",
|
|
56
52
|
message: `Possible ${name}`,
|
|
57
|
-
file:
|
|
53
|
+
file: sf.path,
|
|
58
54
|
line: i + 1,
|
|
59
55
|
rule: "secret-detected",
|
|
60
56
|
});
|
|
@@ -72,20 +68,3 @@ export function runSecrets(cwd) {
|
|
|
72
68
|
duration: Date.now() - start,
|
|
73
69
|
};
|
|
74
70
|
}
|
|
75
|
-
function collectFiles(dir, out) {
|
|
76
|
-
for (const entry of readdirSync(dir)) {
|
|
77
|
-
if (["node_modules", "dist", ".git", ".vibe-check", "coverage", "test-results"].includes(entry))
|
|
78
|
-
continue;
|
|
79
|
-
const full = join(dir, entry);
|
|
80
|
-
const stat = statSync(full);
|
|
81
|
-
if (stat.isDirectory()) {
|
|
82
|
-
collectFiles(full, out);
|
|
83
|
-
}
|
|
84
|
-
else {
|
|
85
|
-
const ext = extname(entry);
|
|
86
|
-
if ([".ts", ".tsx", ".js", ".jsx", ".json", ".env", ".yaml", ".yml", ".toml"].includes(ext)) {
|
|
87
|
-
out.push(full);
|
|
88
|
-
}
|
|
89
|
-
}
|
|
90
|
-
}
|
|
91
|
-
}
|
package/dist/runners/security.js
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
/** Security analysis — beyond secrets, checks for vulnerable code patterns. */
|
|
2
|
-
import { existsSync,
|
|
3
|
-
import {
|
|
2
|
+
import { existsSync, readFileSync } from "node:fs";
|
|
3
|
+
import { join } from "node:path";
|
|
4
|
+
import { getProductionFiles } from "../fs-utils.js";
|
|
4
5
|
import { gradeFromScore } from "../types.js";
|
|
5
6
|
const PATTERNS = [
|
|
6
7
|
// XSS
|
|
@@ -44,7 +45,7 @@ const PATTERNS = [
|
|
|
44
45
|
},
|
|
45
46
|
{
|
|
46
47
|
name: "child_process.exec",
|
|
47
|
-
pattern: /\
|
|
48
|
+
pattern: /\b(?:child_process|cp)\.exec(?:Sync)?\s*\(|(?:^|\s)execSync\s*\(/,
|
|
48
49
|
severity: "warning",
|
|
49
50
|
message: "Command injection risk: prefer execFile with argument array",
|
|
50
51
|
cwe: "CWE-78",
|
|
@@ -105,7 +106,7 @@ const PATTERNS = [
|
|
|
105
106
|
// Sensitive data
|
|
106
107
|
{
|
|
107
108
|
name: "password in URL",
|
|
108
|
-
pattern: /(?:password|secret|token
|
|
109
|
+
pattern: /(?:password|secret|api_?token)=[^&\s'"]{8,}/i,
|
|
109
110
|
severity: "warning",
|
|
110
111
|
message: "Sensitive data in URL query string",
|
|
111
112
|
cwe: "CWE-598",
|
|
@@ -122,17 +123,8 @@ const PATTERNS = [
|
|
|
122
123
|
export function runSecurity(cwd) {
|
|
123
124
|
const start = Date.now();
|
|
124
125
|
const issues = [];
|
|
125
|
-
const
|
|
126
|
-
|
|
127
|
-
for (const dir of dirs) {
|
|
128
|
-
try {
|
|
129
|
-
collectFiles(join(cwd, dir), files);
|
|
130
|
-
}
|
|
131
|
-
catch {
|
|
132
|
-
/* dir doesn't exist */
|
|
133
|
-
}
|
|
134
|
-
}
|
|
135
|
-
if (files.length === 0) {
|
|
126
|
+
const sourceFiles = getProductionFiles(cwd);
|
|
127
|
+
if (sourceFiles.length === 0) {
|
|
136
128
|
return {
|
|
137
129
|
name: "security",
|
|
138
130
|
score: 100,
|
|
@@ -143,24 +135,25 @@ export function runSecurity(cwd) {
|
|
|
143
135
|
};
|
|
144
136
|
}
|
|
145
137
|
const cwePrefixes = new Set();
|
|
146
|
-
for (const
|
|
147
|
-
const
|
|
148
|
-
const relPath = file.replace(`${cwd}/`, "");
|
|
149
|
-
const lines = content.split("\n");
|
|
138
|
+
for (const sf of sourceFiles) {
|
|
139
|
+
const lines = sf.content.split("\n");
|
|
150
140
|
for (let i = 0; i < lines.length; i++) {
|
|
151
141
|
const line = lines[i];
|
|
152
142
|
const trimmed = line.trim();
|
|
153
143
|
if (trimmed.startsWith("//") || trimmed.startsWith("*"))
|
|
154
144
|
continue;
|
|
155
|
-
// Skip pattern/config definition lines (prevents false positives on own code)
|
|
145
|
+
// Skip pattern/config definition lines and string-heavy metadata (prevents false positives on own code)
|
|
156
146
|
if (/\bpattern\s*:|name:\s*["']|message:\s*["']|description:\s*["']|risk:\s*["']|recommendation:\s*["']/.test(trimmed))
|
|
157
147
|
continue;
|
|
148
|
+
// Skip lines that are primarily string content (check-meta descriptions, etc.)
|
|
149
|
+
if (/^\s*["'`].*["'`][,;]?\s*$/.test(line))
|
|
150
|
+
continue;
|
|
158
151
|
for (const p of PATTERNS) {
|
|
159
152
|
if (p.pattern.test(line)) {
|
|
160
153
|
issues.push({
|
|
161
154
|
severity: p.severity,
|
|
162
155
|
message: p.message,
|
|
163
|
-
file:
|
|
156
|
+
file: sf.path,
|
|
164
157
|
line: i + 1,
|
|
165
158
|
rule: p.cwe || p.name,
|
|
166
159
|
});
|
|
@@ -200,24 +193,8 @@ export function runSecurity(cwd) {
|
|
|
200
193
|
name: "security",
|
|
201
194
|
score,
|
|
202
195
|
grade: gradeFromScore(score),
|
|
203
|
-
details: { filesScanned:
|
|
196
|
+
details: { filesScanned: sourceFiles.length, patterns: issues.length, cweCategories: cwePrefixes.size, errors, warnings },
|
|
204
197
|
issues,
|
|
205
198
|
duration: Date.now() - start,
|
|
206
199
|
};
|
|
207
200
|
}
|
|
208
|
-
function collectFiles(dir, out) {
|
|
209
|
-
for (const entry of readdirSync(dir)) {
|
|
210
|
-
if (["node_modules", "dist", ".git", ".vibe-check", "coverage", "test-results"].includes(entry))
|
|
211
|
-
continue;
|
|
212
|
-
const full = join(dir, entry);
|
|
213
|
-
if (statSync(full).isDirectory()) {
|
|
214
|
-
collectFiles(full, out);
|
|
215
|
-
}
|
|
216
|
-
else {
|
|
217
|
-
const ext = extname(entry);
|
|
218
|
-
if ([".ts", ".tsx", ".js", ".jsx"].includes(ext) && !entry.includes(".test.") && !entry.includes(".spec.")) {
|
|
219
|
-
out.push(full);
|
|
220
|
-
}
|
|
221
|
-
}
|
|
222
|
-
}
|
|
223
|
-
}
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
/** Code standards check — naming conventions, anti-patterns, config hygiene. */
|
|
2
|
-
import {
|
|
2
|
+
import { readFileSync } from "node:fs";
|
|
3
3
|
import { basename, extname, join } from "node:path";
|
|
4
|
+
import { getProductionFiles, readDeps } from "../fs-utils.js";
|
|
4
5
|
import { gradeFromScore } from "../types.js";
|
|
5
6
|
const CODE_SMELLS = [
|
|
6
7
|
{
|
|
@@ -40,16 +41,7 @@ export function runStandards(cwd, stack) {
|
|
|
40
41
|
const start = Date.now();
|
|
41
42
|
const issues = [];
|
|
42
43
|
// Collect source files
|
|
43
|
-
const files =
|
|
44
|
-
const dirs = ["src", "web/src"];
|
|
45
|
-
for (const dir of dirs) {
|
|
46
|
-
try {
|
|
47
|
-
collectFiles(join(cwd, dir), cwd, files);
|
|
48
|
-
}
|
|
49
|
-
catch {
|
|
50
|
-
/* dir doesn't exist */
|
|
51
|
-
}
|
|
52
|
-
}
|
|
44
|
+
const files = getProductionFiles(cwd);
|
|
53
45
|
// ── File naming conventions ──
|
|
54
46
|
let namingViolations = 0;
|
|
55
47
|
for (const f of files) {
|
|
@@ -170,28 +162,3 @@ export function runStandards(cwd, stack) {
|
|
|
170
162
|
duration: Date.now() - start,
|
|
171
163
|
};
|
|
172
164
|
}
|
|
173
|
-
function collectFiles(dir, cwd, out) {
|
|
174
|
-
for (const entry of readdirSync(dir)) {
|
|
175
|
-
if (entry === "node_modules" || entry === "dist" || entry === ".git")
|
|
176
|
-
continue;
|
|
177
|
-
const full = join(dir, entry);
|
|
178
|
-
if (statSync(full).isDirectory()) {
|
|
179
|
-
collectFiles(full, cwd, out);
|
|
180
|
-
}
|
|
181
|
-
else {
|
|
182
|
-
const ext = extname(entry);
|
|
183
|
-
if ([".ts", ".tsx", ".js", ".jsx"].includes(ext) && !entry.includes(".test.") && !entry.includes(".spec.")) {
|
|
184
|
-
out.push({ path: full.replace(`${cwd}/`, ""), content: readFileSync(full, "utf-8") });
|
|
185
|
-
}
|
|
186
|
-
}
|
|
187
|
-
}
|
|
188
|
-
}
|
|
189
|
-
function readDeps(cwd) {
|
|
190
|
-
try {
|
|
191
|
-
const pkg = JSON.parse(readFileSync(join(cwd, "package.json"), "utf-8"));
|
|
192
|
-
return { ...pkg.dependencies, ...pkg.devDependencies };
|
|
193
|
-
}
|
|
194
|
-
catch {
|
|
195
|
-
return {};
|
|
196
|
-
}
|
|
197
|
-
}
|
|
@@ -1,19 +1,29 @@
|
|
|
1
1
|
/** Project structure check — does the repo have standard files and conventions? */
|
|
2
|
-
import { existsSync,
|
|
3
|
-
import {
|
|
2
|
+
import { existsSync, readFileSync } from "node:fs";
|
|
3
|
+
import { join } from "node:path";
|
|
4
|
+
import { collectSourceFiles } from "../fs-utils.js";
|
|
4
5
|
import { gradeFromScore } from "../types.js";
|
|
5
|
-
const
|
|
6
|
+
const NODE_FILES = [
|
|
6
7
|
{ name: "package.json", path: "package.json", required: true, description: "Package manifest" },
|
|
7
8
|
{ name: "tsconfig.json", path: "tsconfig.json", required: false, description: "TypeScript configuration" },
|
|
8
9
|
{ name: "LICENSE", path: "LICENSE", required: true, description: "Open source license" },
|
|
9
10
|
{ name: ".gitignore", path: ".gitignore", required: true, description: "Git ignore rules" },
|
|
10
11
|
{ name: "README.md", path: "README.md", required: false, description: "Project documentation" },
|
|
11
12
|
];
|
|
13
|
+
const DART_FILES = [
|
|
14
|
+
{ name: "pubspec.yaml", path: "pubspec.yaml", required: true, description: "Dart package manifest" },
|
|
15
|
+
{ name: "analysis_options.yaml", path: "analysis_options.yaml", required: true, description: "Dart analysis options" },
|
|
16
|
+
{ name: "LICENSE", path: "LICENSE", required: true, description: "Open source license" },
|
|
17
|
+
{ name: ".gitignore", path: ".gitignore", required: true, description: "Git ignore rules" },
|
|
18
|
+
{ name: "README.md", path: "README.md", required: false, description: "Project documentation" },
|
|
19
|
+
];
|
|
12
20
|
export function runStructure(cwd, stack) {
|
|
13
21
|
const start = Date.now();
|
|
14
22
|
const issues = [];
|
|
15
23
|
const found = [];
|
|
16
24
|
const missing = [];
|
|
25
|
+
const isDart = stack.language === "dart";
|
|
26
|
+
const EXPECTED_FILES = isDart ? DART_FILES : NODE_FILES;
|
|
17
27
|
// Check standard files
|
|
18
28
|
for (const fc of EXPECTED_FILES) {
|
|
19
29
|
// tsconfig is required only for TS projects
|
|
@@ -31,24 +41,24 @@ export function runStructure(cwd, stack) {
|
|
|
31
41
|
}
|
|
32
42
|
}
|
|
33
43
|
// Check for lockfile
|
|
34
|
-
const
|
|
44
|
+
const lockfiles = isDart ? ["pubspec.lock"] : ["pnpm-lock.yaml", "package-lock.json", "yarn.lock", "bun.lockb"];
|
|
45
|
+
const hasLock = lockfiles.some((f) => existsSync(join(cwd, f)));
|
|
35
46
|
if (hasLock) {
|
|
36
47
|
found.push("lockfile");
|
|
37
48
|
}
|
|
38
49
|
else {
|
|
39
50
|
issues.push({ severity: "warning", message: "No lockfile found — builds may not be reproducible", rule: "missing-lockfile" });
|
|
40
51
|
}
|
|
41
|
-
// Check for
|
|
42
|
-
const
|
|
52
|
+
// Check for source directory
|
|
53
|
+
const srcDirs = isDart ? ["lib"] : ["src", "web/src"];
|
|
54
|
+
const hasSrc = srcDirs.some((d) => existsSync(join(cwd, d)));
|
|
43
55
|
if (!hasSrc) {
|
|
44
|
-
issues.push({ severity: "error", message:
|
|
56
|
+
issues.push({ severity: "error", message: `No ${srcDirs[0]}/ directory found`, rule: "no-src" });
|
|
45
57
|
}
|
|
46
58
|
// Count source vs test files
|
|
47
|
-
const
|
|
48
|
-
const
|
|
49
|
-
|
|
50
|
-
const srcCount = srcFiles.length;
|
|
51
|
-
const testCount = testFiles.length;
|
|
59
|
+
const allFiles = collectSourceFiles(cwd, { includeTests: true });
|
|
60
|
+
const srcCount = allFiles.filter((f) => !f.isTest).length;
|
|
61
|
+
const testCount = allFiles.filter((f) => f.isTest).length;
|
|
52
62
|
const testRatio = srcCount > 0 ? testCount / srcCount : 0;
|
|
53
63
|
if (testCount === 0 && srcCount > 0) {
|
|
54
64
|
issues.push({ severity: "error", message: `No test files found (${srcCount} source files with zero tests)`, rule: "no-tests" });
|
|
@@ -60,17 +70,19 @@ export function runStructure(cwd, stack) {
|
|
|
60
70
|
rule: "low-test-ratio",
|
|
61
71
|
});
|
|
62
72
|
}
|
|
63
|
-
// Check
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
73
|
+
// Check manifest has essential config
|
|
74
|
+
if (!isDart) {
|
|
75
|
+
try {
|
|
76
|
+
const pkg = JSON.parse(readFileSync(join(cwd, "package.json"), "utf-8"));
|
|
77
|
+
const scripts = pkg.scripts || {};
|
|
78
|
+
if (!scripts.test)
|
|
79
|
+
issues.push({ severity: "warning", message: "No 'test' script in package.json", rule: "no-test-script" });
|
|
80
|
+
if (!scripts.build && !scripts.dev)
|
|
81
|
+
issues.push({ severity: "info", message: "No 'build' or 'dev' script in package.json", rule: "no-build-script" });
|
|
82
|
+
}
|
|
83
|
+
catch {
|
|
84
|
+
/* no package.json or parse error */
|
|
85
|
+
}
|
|
74
86
|
}
|
|
75
87
|
const errors = issues.filter((i) => i.severity === "error").length;
|
|
76
88
|
const warnings = issues.filter((i) => i.severity === "warning").length;
|
|
@@ -84,35 +96,3 @@ export function runStructure(cwd, stack) {
|
|
|
84
96
|
duration: Date.now() - start,
|
|
85
97
|
};
|
|
86
98
|
}
|
|
87
|
-
function collectAll(cwd, src, test) {
|
|
88
|
-
const dirs = ["src", "web/src"];
|
|
89
|
-
for (const dir of dirs) {
|
|
90
|
-
try {
|
|
91
|
-
walk(join(cwd, dir), src, test);
|
|
92
|
-
}
|
|
93
|
-
catch {
|
|
94
|
-
/* dir doesn't exist */
|
|
95
|
-
}
|
|
96
|
-
}
|
|
97
|
-
}
|
|
98
|
-
function walk(dir, src, test) {
|
|
99
|
-
for (const entry of readdirSync(dir)) {
|
|
100
|
-
if (entry === "node_modules" || entry === "dist")
|
|
101
|
-
continue;
|
|
102
|
-
const full = join(dir, entry);
|
|
103
|
-
if (statSync(full).isDirectory()) {
|
|
104
|
-
walk(full, src, test);
|
|
105
|
-
}
|
|
106
|
-
else {
|
|
107
|
-
const ext = extname(entry);
|
|
108
|
-
if ([".ts", ".tsx", ".js", ".jsx"].includes(ext)) {
|
|
109
|
-
if (entry.includes(".test.") || entry.includes(".spec.")) {
|
|
110
|
-
test.push(full);
|
|
111
|
-
}
|
|
112
|
-
else {
|
|
113
|
-
src.push(full);
|
|
114
|
-
}
|
|
115
|
-
}
|
|
116
|
-
}
|
|
117
|
-
}
|
|
118
|
-
}
|
package/dist/runners/testing.js
CHANGED
|
@@ -10,6 +10,7 @@
|
|
|
10
10
|
*/
|
|
11
11
|
import { existsSync, readdirSync, readFileSync, statSync } from "node:fs";
|
|
12
12
|
import { basename, extname, join } from "node:path";
|
|
13
|
+
import { getProductionFiles, readDeps } from "../fs-utils.js";
|
|
13
14
|
import { gradeFromScore } from "../types.js";
|
|
14
15
|
import { run } from "./exec.js";
|
|
15
16
|
// ── Classification rules ──
|
|
@@ -88,33 +89,7 @@ function walkTests(dir, cwd, out) {
|
|
|
88
89
|
}
|
|
89
90
|
}
|
|
90
91
|
function findSourceFiles(cwd) {
|
|
91
|
-
|
|
92
|
-
const dirs = ["src", "web/src"];
|
|
93
|
-
for (const dir of dirs) {
|
|
94
|
-
try {
|
|
95
|
-
walkSource(join(cwd, dir), cwd, files);
|
|
96
|
-
}
|
|
97
|
-
catch {
|
|
98
|
-
/* dir doesn't exist */
|
|
99
|
-
}
|
|
100
|
-
}
|
|
101
|
-
return files;
|
|
102
|
-
}
|
|
103
|
-
function walkSource(dir, cwd, out) {
|
|
104
|
-
for (const entry of readdirSync(dir)) {
|
|
105
|
-
if (entry === "node_modules" || entry === "dist")
|
|
106
|
-
continue;
|
|
107
|
-
const full = join(dir, entry);
|
|
108
|
-
if (statSync(full).isDirectory()) {
|
|
109
|
-
walkSource(full, cwd, out);
|
|
110
|
-
}
|
|
111
|
-
else {
|
|
112
|
-
const ext = extname(entry);
|
|
113
|
-
if ([".ts", ".tsx", ".js", ".jsx"].includes(ext) && !entry.includes(".test.") && !entry.includes(".spec.")) {
|
|
114
|
-
out.push(full.replace(`${cwd}/`, ""));
|
|
115
|
-
}
|
|
116
|
-
}
|
|
117
|
-
}
|
|
92
|
+
return getProductionFiles(cwd).map((f) => f.path);
|
|
118
93
|
}
|
|
119
94
|
// ── Pairing analysis ──
|
|
120
95
|
function computePairing(srcFiles, testFiles) {
|
|
@@ -151,15 +126,6 @@ function detectE2E(cwd) {
|
|
|
151
126
|
}
|
|
152
127
|
return { tool: "none", configured: false };
|
|
153
128
|
}
|
|
154
|
-
function readDeps(cwd) {
|
|
155
|
-
try {
|
|
156
|
-
const pkg = JSON.parse(readFileSync(join(cwd, "package.json"), "utf-8"));
|
|
157
|
-
return { ...pkg.dependencies, ...pkg.devDependencies };
|
|
158
|
-
}
|
|
159
|
-
catch {
|
|
160
|
-
return {};
|
|
161
|
-
}
|
|
162
|
-
}
|
|
163
129
|
// ── Coverage collection ──
|
|
164
130
|
function collectCoverage(cwd, stack) {
|
|
165
131
|
if (stack.testRunner === "none")
|
|
@@ -1,3 +1,3 @@
|
|
|
1
1
|
/** Type safety check — count unsafe patterns: `as any`, explicit `any`, non-null assertions. */
|
|
2
2
|
import type { CheckResult } from "../types.js";
|
|
3
|
-
export declare function runTypeSafety(cwd: string): CheckResult;
|
|
3
|
+
export declare function runTypeSafety(cwd: string, isDart?: boolean): CheckResult;
|