popeye-cli 1.8.0 → 1.9.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +47 -3
- package/cheatsheet.md +33 -0
- package/dist/cli/commands/index.d.ts +1 -0
- package/dist/cli/commands/index.d.ts.map +1 -1
- package/dist/cli/commands/index.js +1 -0
- package/dist/cli/commands/index.js.map +1 -1
- package/dist/cli/commands/review.d.ts +31 -0
- package/dist/cli/commands/review.d.ts.map +1 -0
- package/dist/cli/commands/review.js +156 -0
- package/dist/cli/commands/review.js.map +1 -0
- package/dist/cli/index.d.ts.map +1 -1
- package/dist/cli/index.js +2 -1
- package/dist/cli/index.js.map +1 -1
- package/dist/cli/interactive.d.ts.map +1 -1
- package/dist/cli/interactive.js +122 -61
- package/dist/cli/interactive.js.map +1 -1
- package/dist/types/audit.d.ts +623 -0
- package/dist/types/audit.d.ts.map +1 -0
- package/dist/types/audit.js +240 -0
- package/dist/types/audit.js.map +1 -0
- package/dist/types/workflow.d.ts +15 -0
- package/dist/types/workflow.d.ts.map +1 -1
- package/dist/types/workflow.js +5 -0
- package/dist/types/workflow.js.map +1 -1
- package/dist/workflow/audit-analyzer.d.ts +58 -0
- package/dist/workflow/audit-analyzer.d.ts.map +1 -0
- package/dist/workflow/audit-analyzer.js +438 -0
- package/dist/workflow/audit-analyzer.js.map +1 -0
- package/dist/workflow/audit-mode.d.ts +28 -0
- package/dist/workflow/audit-mode.d.ts.map +1 -0
- package/dist/workflow/audit-mode.js +169 -0
- package/dist/workflow/audit-mode.js.map +1 -0
- package/dist/workflow/audit-recovery.d.ts +61 -0
- package/dist/workflow/audit-recovery.d.ts.map +1 -0
- package/dist/workflow/audit-recovery.js +242 -0
- package/dist/workflow/audit-recovery.js.map +1 -0
- package/dist/workflow/audit-reporter.d.ts +65 -0
- package/dist/workflow/audit-reporter.d.ts.map +1 -0
- package/dist/workflow/audit-reporter.js +301 -0
- package/dist/workflow/audit-reporter.js.map +1 -0
- package/dist/workflow/audit-scanner.d.ts +87 -0
- package/dist/workflow/audit-scanner.d.ts.map +1 -0
- package/dist/workflow/audit-scanner.js +768 -0
- package/dist/workflow/audit-scanner.js.map +1 -0
- package/dist/workflow/index.d.ts +5 -0
- package/dist/workflow/index.d.ts.map +1 -1
- package/dist/workflow/index.js +5 -0
- package/dist/workflow/index.js.map +1 -1
- package/package.json +1 -1
- package/src/cli/commands/index.ts +1 -0
- package/src/cli/commands/review.ts +187 -0
- package/src/cli/index.ts +2 -0
- package/src/cli/interactive.ts +72 -4
- package/src/types/audit.ts +294 -0
- package/src/types/workflow.ts +15 -0
- package/src/workflow/audit-analyzer.ts +510 -0
- package/src/workflow/audit-mode.ts +240 -0
- package/src/workflow/audit-recovery.ts +284 -0
- package/src/workflow/audit-reporter.ts +370 -0
- package/src/workflow/audit-scanner.ts +873 -0
- package/src/workflow/index.ts +5 -0
- package/tests/cli/commands/review.test.ts +52 -0
- package/tests/types/audit.test.ts +250 -0
- package/tests/workflow/audit-analyzer.test.ts +281 -0
- package/tests/workflow/audit-mode.test.ts +114 -0
- package/tests/workflow/audit-recovery.test.ts +237 -0
- package/tests/workflow/audit-reporter.test.ts +254 -0
- package/tests/workflow/audit-scanner.test.ts +270 -0
|
@@ -0,0 +1,768 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Deterministic project scanner for the audit system.
|
|
3
|
+
*
|
|
4
|
+
* Scans the filesystem to detect workspace composition, per-component structure,
|
|
5
|
+
* dependency manifests, route files, LOC, and FE<->BE wiring mismatches.
|
|
6
|
+
* Reads docs in priority order: CLAUDE.md -> README.md -> other docs.
|
|
7
|
+
*/
|
|
8
|
+
import { promises as fs } from 'node:fs';
|
|
9
|
+
import path from 'node:path';
|
|
10
|
+
import { isWorkspace } from '../types/project.js';
|
|
11
|
+
// ---------------------------------------------------------------------------
|
|
12
|
+
// Constants (mirrors project-structure.ts patterns)
|
|
13
|
+
// ---------------------------------------------------------------------------
|
|
14
|
+
const SKIP_DIRS = new Set([
|
|
15
|
+
'node_modules', 'dist', 'build', '.git', '__pycache__', '.venv', 'venv',
|
|
16
|
+
'.next', '.turbo', '.cache', 'coverage', 'out', '.vercel', '.popeye',
|
|
17
|
+
]);
|
|
18
|
+
const SOURCE_EXTENSIONS = new Set(['.ts', '.tsx', '.js', '.jsx', '.py']);
|
|
19
|
+
const TEST_PATTERNS = [
|
|
20
|
+
/\.test\.[jt]sx?$/,
|
|
21
|
+
/\.spec\.[jt]sx?$/,
|
|
22
|
+
/test_.*\.py$/,
|
|
23
|
+
/.*_test\.py$/,
|
|
24
|
+
/tests?\/.*\.[jt]sx?$/,
|
|
25
|
+
/tests?\/.*\.py$/,
|
|
26
|
+
];
|
|
27
|
+
const CONFIG_FILES = new Set([
|
|
28
|
+
'package.json', 'tsconfig.json', 'vite.config.ts', 'vite.config.js',
|
|
29
|
+
'next.config.js', 'next.config.mjs', 'next.config.ts',
|
|
30
|
+
'tailwind.config.js', 'tailwind.config.ts', 'postcss.config.js',
|
|
31
|
+
'pyproject.toml', 'setup.py', 'setup.cfg', 'requirements.txt',
|
|
32
|
+
'docker-compose.yml', 'docker-compose.yaml', 'Dockerfile',
|
|
33
|
+
'.env.example', '.env.local.example', '.eslintrc.json', '.eslintrc.js',
|
|
34
|
+
'jest.config.ts', 'jest.config.js', 'vitest.config.ts',
|
|
35
|
+
]);
|
|
36
|
+
const FE_API_ENV_PATTERNS = [
|
|
37
|
+
/^VITE_API_URL$/i,
|
|
38
|
+
/^NEXT_PUBLIC_API_URL$/i,
|
|
39
|
+
/^REACT_APP_API_URL$/i,
|
|
40
|
+
/^VITE_API_BASE_URL$/i,
|
|
41
|
+
/^NEXT_PUBLIC_API_BASE_URL$/i,
|
|
42
|
+
/^VITE_BACKEND_URL$/i,
|
|
43
|
+
/^NEXT_PUBLIC_BACKEND_URL$/i,
|
|
44
|
+
];
|
|
45
|
+
const MAX_TREE_ENTRIES = 50;
|
|
46
|
+
const MAX_FILE_EXCERPT = 2000;
|
|
47
|
+
const MAX_DOC_READ = 8000;
|
|
48
|
+
// ---------------------------------------------------------------------------
|
|
49
|
+
// File utilities
|
|
50
|
+
// ---------------------------------------------------------------------------
|
|
51
|
+
/**
|
|
52
|
+
* Check if a path exists on the filesystem.
|
|
53
|
+
*
|
|
54
|
+
* @param p - Path to check.
|
|
55
|
+
* @returns True if the path exists.
|
|
56
|
+
*/
|
|
57
|
+
async function pathExists(p) {
|
|
58
|
+
try {
|
|
59
|
+
await fs.access(p);
|
|
60
|
+
return true;
|
|
61
|
+
}
|
|
62
|
+
catch {
|
|
63
|
+
return false;
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
/**
|
|
67
|
+
* Read a file and return its content, truncated to maxLen.
|
|
68
|
+
*
|
|
69
|
+
* @param filePath - Absolute file path.
|
|
70
|
+
* @param maxLen - Maximum characters to return.
|
|
71
|
+
* @returns File content string, or undefined if unreadable.
|
|
72
|
+
*/
|
|
73
|
+
async function safeRead(filePath, maxLen = MAX_DOC_READ) {
|
|
74
|
+
try {
|
|
75
|
+
const content = await fs.readFile(filePath, 'utf-8');
|
|
76
|
+
return content.length > maxLen ? content.slice(0, maxLen) + '\n... (truncated)' : content;
|
|
77
|
+
}
|
|
78
|
+
catch {
|
|
79
|
+
return undefined;
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
/**
|
|
83
|
+
* Count lines in a file.
|
|
84
|
+
*
|
|
85
|
+
* @param filePath - Absolute file path.
|
|
86
|
+
* @returns Number of lines, or 0 on error.
|
|
87
|
+
*/
|
|
88
|
+
async function countFileLines(filePath) {
|
|
89
|
+
try {
|
|
90
|
+
const content = await fs.readFile(filePath, 'utf-8');
|
|
91
|
+
return content.split('\n').length;
|
|
92
|
+
}
|
|
93
|
+
catch {
|
|
94
|
+
return 0;
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
/**
|
|
98
|
+
* Check if a relative path matches test file patterns.
|
|
99
|
+
*
|
|
100
|
+
* @param relPath - Relative file path.
|
|
101
|
+
* @returns True if the path looks like a test file.
|
|
102
|
+
*/
|
|
103
|
+
function isTestFile(relPath) {
|
|
104
|
+
return TEST_PATTERNS.some((p) => p.test(relPath));
|
|
105
|
+
}
|
|
106
|
+
/**
|
|
107
|
+
* Recursively walk a directory, yielding files and subdirectories.
|
|
108
|
+
*
|
|
109
|
+
* @param rootDir - Root directory to walk.
|
|
110
|
+
* @param maxDepth - Maximum recursion depth.
|
|
111
|
+
* @returns Array of WalkEntry objects.
|
|
112
|
+
*/
|
|
113
|
+
async function walkDir(rootDir, maxDepth = 8) {
|
|
114
|
+
const results = [];
|
|
115
|
+
async function recurse(dir, depth) {
|
|
116
|
+
if (depth > maxDepth)
|
|
117
|
+
return;
|
|
118
|
+
let entries;
|
|
119
|
+
try {
|
|
120
|
+
entries = await fs.readdir(dir, { withFileTypes: true });
|
|
121
|
+
}
|
|
122
|
+
catch {
|
|
123
|
+
return;
|
|
124
|
+
}
|
|
125
|
+
for (const entry of entries) {
|
|
126
|
+
if (SKIP_DIRS.has(entry.name))
|
|
127
|
+
continue;
|
|
128
|
+
const abs = path.join(dir, entry.name);
|
|
129
|
+
const rel = path.relative(rootDir, abs);
|
|
130
|
+
results.push({ relativePath: rel, absolutePath: abs, isDir: entry.isDirectory() });
|
|
131
|
+
if (entry.isDirectory()) {
|
|
132
|
+
await recurse(abs, depth + 1);
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
await recurse(rootDir, 0);
|
|
137
|
+
return results;
|
|
138
|
+
}
|
|
139
|
+
// ---------------------------------------------------------------------------
|
|
140
|
+
// Workspace composition detection
|
|
141
|
+
// ---------------------------------------------------------------------------
|
|
142
|
+
/**
|
|
143
|
+
* Detect workspace composition from the filesystem.
|
|
144
|
+
*
|
|
145
|
+
* Examines directory structure for signals of frontend, backend, website,
|
|
146
|
+
* shared, and infra components. Does NOT trust state.language — derives
|
|
147
|
+
* composition purely from filesystem evidence.
|
|
148
|
+
*
|
|
149
|
+
* @param projectDir - Project root directory.
|
|
150
|
+
* @returns Array of detected ComponentKind values.
|
|
151
|
+
*/
|
|
152
|
+
export async function detectWorkspaceComposition(projectDir) {
|
|
153
|
+
const kinds = [];
|
|
154
|
+
// Reason: Check apps/ subdirectories as primary workspace signal
|
|
155
|
+
const appsDir = path.join(projectDir, 'apps');
|
|
156
|
+
const appsExists = await pathExists(appsDir);
|
|
157
|
+
if (appsExists) {
|
|
158
|
+
// Frontend signals
|
|
159
|
+
const feDirs = ['apps/frontend', 'apps/web', 'apps/client'];
|
|
160
|
+
for (const d of feDirs) {
|
|
161
|
+
if (await pathExists(path.join(projectDir, d, 'package.json'))) {
|
|
162
|
+
kinds.push('frontend');
|
|
163
|
+
break;
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
// Backend signals
|
|
167
|
+
const beDirs = ['apps/backend', 'apps/api', 'apps/server'];
|
|
168
|
+
for (const d of beDirs) {
|
|
169
|
+
const hasPy = await pathExists(path.join(projectDir, d, 'requirements.txt'))
|
|
170
|
+
|| await pathExists(path.join(projectDir, d, 'pyproject.toml'));
|
|
171
|
+
const hasNode = await pathExists(path.join(projectDir, d, 'package.json'));
|
|
172
|
+
if (hasPy || hasNode) {
|
|
173
|
+
kinds.push('backend');
|
|
174
|
+
break;
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
// Website signals
|
|
178
|
+
const webDirs = ['apps/website', 'apps/landing'];
|
|
179
|
+
for (const d of webDirs) {
|
|
180
|
+
if (await pathExists(path.join(projectDir, d))) {
|
|
181
|
+
kinds.push('website');
|
|
182
|
+
break;
|
|
183
|
+
}
|
|
184
|
+
}
|
|
185
|
+
}
|
|
186
|
+
else {
|
|
187
|
+
// Non-workspace: single-component project
|
|
188
|
+
const hasPkgJson = await pathExists(path.join(projectDir, 'package.json'));
|
|
189
|
+
const hasPyProject = await pathExists(path.join(projectDir, 'requirements.txt'))
|
|
190
|
+
|| await pathExists(path.join(projectDir, 'pyproject.toml'));
|
|
191
|
+
if (hasPkgJson && hasPyProject) {
|
|
192
|
+
kinds.push('frontend', 'backend');
|
|
193
|
+
}
|
|
194
|
+
else if (hasPkgJson) {
|
|
195
|
+
kinds.push('frontend');
|
|
196
|
+
}
|
|
197
|
+
else if (hasPyProject) {
|
|
198
|
+
kinds.push('backend');
|
|
199
|
+
}
|
|
200
|
+
}
|
|
201
|
+
// Shared directory
|
|
202
|
+
if (await pathExists(path.join(projectDir, 'packages'))
|
|
203
|
+
|| await pathExists(path.join(projectDir, 'libs'))
|
|
204
|
+
|| await pathExists(path.join(projectDir, 'shared'))) {
|
|
205
|
+
kinds.push('shared');
|
|
206
|
+
}
|
|
207
|
+
// Infra signals
|
|
208
|
+
if (await pathExists(path.join(projectDir, 'infra'))
|
|
209
|
+
|| await pathExists(path.join(projectDir, 'docker-compose.yml'))
|
|
210
|
+
|| await pathExists(path.join(projectDir, 'docker-compose.yaml'))
|
|
211
|
+
|| await pathExists(path.join(projectDir, 'Dockerfile'))) {
|
|
212
|
+
kinds.push('infra');
|
|
213
|
+
}
|
|
214
|
+
return kinds;
|
|
215
|
+
}
|
|
216
|
+
// ---------------------------------------------------------------------------
|
|
217
|
+
// Per-component scanning
|
|
218
|
+
// ---------------------------------------------------------------------------
|
|
219
|
+
/**
|
|
220
|
+
* Detect the likely framework from a package.json or file structure.
|
|
221
|
+
*
|
|
222
|
+
* @param componentDir - Component root directory.
|
|
223
|
+
* @returns Framework name or undefined.
|
|
224
|
+
*/
|
|
225
|
+
async function detectFramework(componentDir) {
|
|
226
|
+
try {
|
|
227
|
+
const pkgPath = path.join(componentDir, 'package.json');
|
|
228
|
+
const content = await fs.readFile(pkgPath, 'utf-8');
|
|
229
|
+
const pkg = JSON.parse(content);
|
|
230
|
+
const allDeps = { ...pkg.dependencies, ...pkg.devDependencies };
|
|
231
|
+
if (allDeps['next'])
|
|
232
|
+
return 'next';
|
|
233
|
+
if (allDeps['@remix-run/react'])
|
|
234
|
+
return 'remix';
|
|
235
|
+
if (allDeps['react'])
|
|
236
|
+
return 'react';
|
|
237
|
+
if (allDeps['vue'])
|
|
238
|
+
return 'vue';
|
|
239
|
+
if (allDeps['svelte'])
|
|
240
|
+
return 'svelte';
|
|
241
|
+
if (allDeps['express'])
|
|
242
|
+
return 'express';
|
|
243
|
+
if (allDeps['fastify'])
|
|
244
|
+
return 'fastify';
|
|
245
|
+
if (allDeps['hono'])
|
|
246
|
+
return 'hono';
|
|
247
|
+
}
|
|
248
|
+
catch {
|
|
249
|
+
// No package.json or unparseable
|
|
250
|
+
}
|
|
251
|
+
// Python framework detection
|
|
252
|
+
try {
|
|
253
|
+
const reqPath = path.join(componentDir, 'requirements.txt');
|
|
254
|
+
const content = await fs.readFile(reqPath, 'utf-8');
|
|
255
|
+
if (/fastapi/i.test(content))
|
|
256
|
+
return 'fastapi';
|
|
257
|
+
if (/django/i.test(content))
|
|
258
|
+
return 'django';
|
|
259
|
+
if (/flask/i.test(content))
|
|
260
|
+
return 'flask';
|
|
261
|
+
}
|
|
262
|
+
catch {
|
|
263
|
+
// No requirements.txt
|
|
264
|
+
}
|
|
265
|
+
return undefined;
|
|
266
|
+
}
|
|
267
|
+
/**
|
|
268
|
+
* Scan a single component directory for files, routes, entry points, and deps.
|
|
269
|
+
*
|
|
270
|
+
* @param componentDir - Absolute path to component root.
|
|
271
|
+
* @param kind - Component kind.
|
|
272
|
+
* @param language - Language hint (e.g., 'typescript', 'python').
|
|
273
|
+
* @returns ComponentScan result.
|
|
274
|
+
*/
|
|
275
|
+
export async function scanComponent(componentDir, kind, language, projectDir) {
|
|
276
|
+
const entries = await walkDir(componentDir);
|
|
277
|
+
const sourceFiles = [];
|
|
278
|
+
const testFiles = [];
|
|
279
|
+
const entryPoints = [];
|
|
280
|
+
const routeFiles = [];
|
|
281
|
+
const depManifests = [];
|
|
282
|
+
for (const entry of entries) {
|
|
283
|
+
if (entry.isDir)
|
|
284
|
+
continue;
|
|
285
|
+
const ext = path.extname(entry.relativePath);
|
|
286
|
+
if (SOURCE_EXTENSIONS.has(ext)) {
|
|
287
|
+
const fe = { path: entry.relativePath, extension: ext };
|
|
288
|
+
if (isTestFile(entry.relativePath)) {
|
|
289
|
+
testFiles.push(fe);
|
|
290
|
+
}
|
|
291
|
+
else {
|
|
292
|
+
sourceFiles.push(fe);
|
|
293
|
+
}
|
|
294
|
+
// Entry point detection
|
|
295
|
+
const base = path.basename(entry.relativePath);
|
|
296
|
+
if (['main.ts', 'main.tsx', 'index.ts', 'index.tsx', 'app.ts', 'app.tsx',
|
|
297
|
+
'main.py', 'app.py', 'server.ts', 'server.js'].includes(base)) {
|
|
298
|
+
entryPoints.push(entry.relativePath);
|
|
299
|
+
}
|
|
300
|
+
// Route file detection
|
|
301
|
+
if (/route[rs]?\.[jt]sx?$/i.test(base)
|
|
302
|
+
|| /router\.[jt]sx?$/i.test(base)
|
|
303
|
+
|| entry.relativePath.includes('/routes/')
|
|
304
|
+
|| entry.relativePath.includes('/api/')
|
|
305
|
+
|| /urls\.py$/.test(base)
|
|
306
|
+
|| /routes\.py$/.test(base)
|
|
307
|
+
|| /router\.py$/.test(base)) {
|
|
308
|
+
routeFiles.push(entry.relativePath);
|
|
309
|
+
}
|
|
310
|
+
}
|
|
311
|
+
// Dependency manifests
|
|
312
|
+
const baseName = path.basename(entry.relativePath);
|
|
313
|
+
if (baseName === 'package.json' && !entry.relativePath.includes('node_modules')) {
|
|
314
|
+
depManifests.push(await parseDependencyFile(entry.absolutePath, 'package.json'));
|
|
315
|
+
}
|
|
316
|
+
else if (baseName === 'requirements.txt') {
|
|
317
|
+
depManifests.push(await parseDependencyFile(entry.absolutePath, 'requirements.txt'));
|
|
318
|
+
}
|
|
319
|
+
else if (baseName === 'pyproject.toml') {
|
|
320
|
+
depManifests.push(await parseDependencyFile(entry.absolutePath, 'pyproject.toml'));
|
|
321
|
+
}
|
|
322
|
+
}
|
|
323
|
+
// Reason: rootDir must be relative to the project root for correct LOC path resolution.
|
|
324
|
+
// Using parent dir gives just "frontend" instead of "apps/frontend".
|
|
325
|
+
const rootDir = projectDir
|
|
326
|
+
? (path.relative(projectDir, componentDir) || '.')
|
|
327
|
+
: (path.relative(path.dirname(componentDir), componentDir) || '.');
|
|
328
|
+
const framework = await detectFramework(componentDir);
|
|
329
|
+
return {
|
|
330
|
+
kind,
|
|
331
|
+
rootDir,
|
|
332
|
+
language,
|
|
333
|
+
framework,
|
|
334
|
+
entryPoints,
|
|
335
|
+
routeFiles,
|
|
336
|
+
testFiles,
|
|
337
|
+
sourceFiles,
|
|
338
|
+
dependencyManifests: depManifests,
|
|
339
|
+
};
|
|
340
|
+
}
|
|
341
|
+
// ---------------------------------------------------------------------------
|
|
342
|
+
// Dependency parsing
|
|
343
|
+
// ---------------------------------------------------------------------------
|
|
344
|
+
/**
|
|
345
|
+
* Parse a single dependency manifest file.
|
|
346
|
+
*
|
|
347
|
+
* @param filePath - Absolute path to the file.
|
|
348
|
+
* @param type - Manifest type.
|
|
349
|
+
* @returns DependencyManifest with parsed dependencies.
|
|
350
|
+
*/
|
|
351
|
+
async function parseDependencyFile(filePath, type) {
|
|
352
|
+
const relPath = path.basename(filePath);
|
|
353
|
+
try {
|
|
354
|
+
const content = await fs.readFile(filePath, 'utf-8');
|
|
355
|
+
if (type === 'package.json') {
|
|
356
|
+
const pkg = JSON.parse(content);
|
|
357
|
+
return {
|
|
358
|
+
file: relPath,
|
|
359
|
+
type,
|
|
360
|
+
dependencies: pkg.dependencies ?? {},
|
|
361
|
+
devDependencies: pkg.devDependencies ?? {},
|
|
362
|
+
};
|
|
363
|
+
}
|
|
364
|
+
if (type === 'requirements.txt') {
|
|
365
|
+
const deps = {};
|
|
366
|
+
for (const line of content.split('\n')) {
|
|
367
|
+
const trimmed = line.trim();
|
|
368
|
+
if (!trimmed || trimmed.startsWith('#'))
|
|
369
|
+
continue;
|
|
370
|
+
const match = trimmed.match(/^([a-zA-Z0-9_-]+)([=<>!~].+)?$/);
|
|
371
|
+
if (match) {
|
|
372
|
+
deps[match[1]] = match[2] ?? '*';
|
|
373
|
+
}
|
|
374
|
+
}
|
|
375
|
+
return { file: relPath, type, dependencies: deps };
|
|
376
|
+
}
|
|
377
|
+
// pyproject.toml — simplified parsing
|
|
378
|
+
return { file: relPath, type: 'pyproject.toml' };
|
|
379
|
+
}
|
|
380
|
+
catch {
|
|
381
|
+
return { file: relPath, type };
|
|
382
|
+
}
|
|
383
|
+
}
|
|
384
|
+
/**
|
|
385
|
+
* Parse all dependency manifests found in the project.
|
|
386
|
+
*
|
|
387
|
+
* @param projectDir - Project root directory.
|
|
388
|
+
* @param language - Project language.
|
|
389
|
+
* @returns Array of dependency manifests.
|
|
390
|
+
*/
|
|
391
|
+
export async function parseDependencies(projectDir, _language) {
|
|
392
|
+
const manifests = [];
|
|
393
|
+
const entries = await walkDir(projectDir, 3);
|
|
394
|
+
for (const entry of entries) {
|
|
395
|
+
if (entry.isDir)
|
|
396
|
+
continue;
|
|
397
|
+
const base = path.basename(entry.relativePath);
|
|
398
|
+
if (base === 'package.json' && !entry.relativePath.includes('node_modules')) {
|
|
399
|
+
manifests.push(await parseDependencyFile(entry.absolutePath, 'package.json'));
|
|
400
|
+
}
|
|
401
|
+
else if (base === 'requirements.txt') {
|
|
402
|
+
manifests.push(await parseDependencyFile(entry.absolutePath, 'requirements.txt'));
|
|
403
|
+
}
|
|
404
|
+
else if (base === 'pyproject.toml') {
|
|
405
|
+
manifests.push(await parseDependencyFile(entry.absolutePath, 'pyproject.toml'));
|
|
406
|
+
}
|
|
407
|
+
}
|
|
408
|
+
return manifests;
|
|
409
|
+
}
|
|
410
|
+
// ---------------------------------------------------------------------------
|
|
411
|
+
// Route file detection
|
|
412
|
+
// ---------------------------------------------------------------------------
|
|
413
|
+
/**
|
|
414
|
+
* Find all route-like files in the project.
|
|
415
|
+
*
|
|
416
|
+
* @param projectDir - Project root directory.
|
|
417
|
+
* @param language - Project language.
|
|
418
|
+
* @returns Array of relative paths to route files.
|
|
419
|
+
*/
|
|
420
|
+
export async function findRouteFiles(projectDir, _language) {
|
|
421
|
+
const routes = [];
|
|
422
|
+
const entries = await walkDir(projectDir);
|
|
423
|
+
for (const entry of entries) {
|
|
424
|
+
if (entry.isDir)
|
|
425
|
+
continue;
|
|
426
|
+
const base = path.basename(entry.relativePath);
|
|
427
|
+
const rel = entry.relativePath;
|
|
428
|
+
if (/route[rs]?\.[jt]sx?$/i.test(base)
|
|
429
|
+
|| /router\.[jt]sx?$/i.test(base)
|
|
430
|
+
|| rel.includes('/routes/')
|
|
431
|
+
|| rel.includes('/api/')
|
|
432
|
+
|| /urls\.py$/.test(base)
|
|
433
|
+
|| /routes\.py$/.test(base)
|
|
434
|
+
|| /router\.py$/.test(base)) {
|
|
435
|
+
routes.push(rel);
|
|
436
|
+
}
|
|
437
|
+
}
|
|
438
|
+
return routes;
|
|
439
|
+
}
|
|
440
|
+
// ---------------------------------------------------------------------------
|
|
441
|
+
// Priority doc reads
|
|
442
|
+
// ---------------------------------------------------------------------------
|
|
443
|
+
/**
|
|
444
|
+
* Read project documentation in priority order: CLAUDE.md -> README.md -> docs.
|
|
445
|
+
*
|
|
446
|
+
* @param projectDir - Project root directory.
|
|
447
|
+
* @returns Priority doc contents and docs index.
|
|
448
|
+
*/
|
|
449
|
+
export async function readPriorityDocs(projectDir) {
|
|
450
|
+
const docsIndex = [];
|
|
451
|
+
const keyFiles = [];
|
|
452
|
+
// 1. CLAUDE.md (highest priority)
|
|
453
|
+
const claudeMdPath = path.join(projectDir, 'CLAUDE.md');
|
|
454
|
+
const claudeMd = await safeRead(claudeMdPath);
|
|
455
|
+
if (claudeMd)
|
|
456
|
+
docsIndex.push('CLAUDE.md');
|
|
457
|
+
// 2. README.md
|
|
458
|
+
const readmePath = path.join(projectDir, 'README.md');
|
|
459
|
+
const readme = await safeRead(readmePath);
|
|
460
|
+
if (readme)
|
|
461
|
+
docsIndex.push('README.md');
|
|
462
|
+
// 3. Other root-level .md files (excluding README and CLAUDE)
|
|
463
|
+
try {
|
|
464
|
+
const rootEntries = await fs.readdir(projectDir, { withFileTypes: true });
|
|
465
|
+
for (const entry of rootEntries) {
|
|
466
|
+
if (entry.isFile()
|
|
467
|
+
&& entry.name.endsWith('.md')
|
|
468
|
+
&& entry.name !== 'README.md'
|
|
469
|
+
&& entry.name !== 'CLAUDE.md') {
|
|
470
|
+
docsIndex.push(entry.name);
|
|
471
|
+
const content = await safeRead(path.join(projectDir, entry.name), MAX_FILE_EXCERPT);
|
|
472
|
+
if (content) {
|
|
473
|
+
keyFiles.push({ path: entry.name, content });
|
|
474
|
+
}
|
|
475
|
+
}
|
|
476
|
+
}
|
|
477
|
+
}
|
|
478
|
+
catch {
|
|
479
|
+
// Root dir read error
|
|
480
|
+
}
|
|
481
|
+
// 4. docs/ directory
|
|
482
|
+
const docsDir = path.join(projectDir, 'docs');
|
|
483
|
+
if (await pathExists(docsDir)) {
|
|
484
|
+
const docEntries = await walkDir(docsDir, 3);
|
|
485
|
+
for (const entry of docEntries) {
|
|
486
|
+
if (!entry.isDir && entry.relativePath.endsWith('.md')) {
|
|
487
|
+
const relFromRoot = path.join('docs', entry.relativePath);
|
|
488
|
+
docsIndex.push(relFromRoot);
|
|
489
|
+
const content = await safeRead(entry.absolutePath, MAX_FILE_EXCERPT);
|
|
490
|
+
if (content) {
|
|
491
|
+
keyFiles.push({ path: relFromRoot, content });
|
|
492
|
+
}
|
|
493
|
+
}
|
|
494
|
+
}
|
|
495
|
+
}
|
|
496
|
+
return { claudeMd, readme, docsIndex, keyFiles };
|
|
497
|
+
}
|
|
498
|
+
// ---------------------------------------------------------------------------
|
|
499
|
+
// Wiring matrix (deterministic FE<->BE check)
|
|
500
|
+
// ---------------------------------------------------------------------------
|
|
501
|
+
/**
|
|
502
|
+
* Build a wiring matrix from the project, checking FE<->BE env keys,
|
|
503
|
+
* CORS origins, and API prefixes for mismatches.
|
|
504
|
+
*
|
|
505
|
+
* @param projectDir - Project root directory.
|
|
506
|
+
* @param components - Already-scanned component list.
|
|
507
|
+
* @returns WiringMatrix with detected mismatches.
|
|
508
|
+
*/
|
|
509
|
+
export async function buildWiringMatrix(projectDir, components) {
|
|
510
|
+
const feApiKeys = [];
|
|
511
|
+
let feApiResolved;
|
|
512
|
+
let beCorsOrigins;
|
|
513
|
+
let beApiPrefix;
|
|
514
|
+
const mismatches = [];
|
|
515
|
+
// Scan .env.example for FE API env keys
|
|
516
|
+
const envFiles = ['.env.example', '.env.local.example', '.env'];
|
|
517
|
+
for (const envFile of envFiles) {
|
|
518
|
+
const envPath = path.join(projectDir, envFile);
|
|
519
|
+
const content = await safeRead(envPath, 4000);
|
|
520
|
+
if (!content)
|
|
521
|
+
continue;
|
|
522
|
+
for (const line of content.split('\n')) {
|
|
523
|
+
const match = line.match(/^([A-Z_]+)=(.*)$/);
|
|
524
|
+
if (!match)
|
|
525
|
+
continue;
|
|
526
|
+
const [, key, value] = match;
|
|
527
|
+
if (FE_API_ENV_PATTERNS.some((p) => p.test(key))) {
|
|
528
|
+
feApiKeys.push(key);
|
|
529
|
+
if (value && !feApiResolved) {
|
|
530
|
+
feApiResolved = value.replace(/["']/g, '').trim();
|
|
531
|
+
}
|
|
532
|
+
}
|
|
533
|
+
}
|
|
534
|
+
}
|
|
535
|
+
// Scan backend component files for CORS origins
|
|
536
|
+
const beComponent = components.find((c) => c.kind === 'backend');
|
|
537
|
+
if (beComponent) {
|
|
538
|
+
const beDir = path.join(projectDir, beComponent.rootDir === '.' ? '' : beComponent.rootDir);
|
|
539
|
+
const beEntries = await walkDir(beDir, 4);
|
|
540
|
+
for (const entry of beEntries) {
|
|
541
|
+
if (entry.isDir)
|
|
542
|
+
continue;
|
|
543
|
+
const ext = path.extname(entry.relativePath);
|
|
544
|
+
if (!['.ts', '.js', '.py'].includes(ext))
|
|
545
|
+
continue;
|
|
546
|
+
const content = await safeRead(entry.absolutePath, 6000);
|
|
547
|
+
if (!content)
|
|
548
|
+
continue;
|
|
549
|
+
// CORS origins extraction
|
|
550
|
+
const corsMatch = content.match(/cors.*origins?\s*[=:]\s*\[([^\]]+)\]/is);
|
|
551
|
+
if (corsMatch) {
|
|
552
|
+
beCorsOrigins = corsMatch[1]
|
|
553
|
+
.split(',')
|
|
554
|
+
.map((s) => s.replace(/["'`\s]/g, ''))
|
|
555
|
+
.filter(Boolean);
|
|
556
|
+
}
|
|
557
|
+
// API prefix extraction
|
|
558
|
+
const prefixMatch = content.match(/(?:prefix|api_prefix|apiPrefix)\s*[=:]\s*["'`]([^"'`]+)["'`]/i);
|
|
559
|
+
if (prefixMatch) {
|
|
560
|
+
beApiPrefix = prefixMatch[1];
|
|
561
|
+
}
|
|
562
|
+
}
|
|
563
|
+
}
|
|
564
|
+
// Detect mismatches
|
|
565
|
+
if (feApiResolved && beCorsOrigins && beCorsOrigins.length > 0) {
|
|
566
|
+
try {
|
|
567
|
+
const feUrl = new URL(feApiResolved);
|
|
568
|
+
const feOrigin = feUrl.origin;
|
|
569
|
+
// Reason: Check if the frontend's expected API origin is in the backend's CORS list
|
|
570
|
+
const corsHasFe = beCorsOrigins.some((o) => o === '*' || o === feOrigin);
|
|
571
|
+
if (!corsHasFe) {
|
|
572
|
+
mismatches.push({
|
|
573
|
+
type: 'cors-origin-mismatch',
|
|
574
|
+
details: `Frontend expects API at ${feApiResolved} but backend CORS does not include origin ${feOrigin}`,
|
|
575
|
+
evidence: [
|
|
576
|
+
{ file: '.env.example', snippet: `${feApiKeys[0]}=${feApiResolved}` },
|
|
577
|
+
],
|
|
578
|
+
});
|
|
579
|
+
}
|
|
580
|
+
}
|
|
581
|
+
catch {
|
|
582
|
+
// Invalid URL in env — not a wiring mismatch
|
|
583
|
+
}
|
|
584
|
+
}
|
|
585
|
+
return {
|
|
586
|
+
frontendApiBaseEnvKeys: feApiKeys,
|
|
587
|
+
frontendApiBaseResolved: feApiResolved,
|
|
588
|
+
backendCorsOrigins: beCorsOrigins,
|
|
589
|
+
backendApiPrefix: beApiPrefix,
|
|
590
|
+
potentialMismatches: mismatches,
|
|
591
|
+
};
|
|
592
|
+
}
|
|
593
|
+
// ---------------------------------------------------------------------------
|
|
594
|
+
// LOC counting
|
|
595
|
+
// ---------------------------------------------------------------------------
|
|
596
|
+
/**
|
|
597
|
+
* Count lines of code and test code in the given file lists.
|
|
598
|
+
*
|
|
599
|
+
* @param sourceFiles - Array of source file entries.
|
|
600
|
+
* @param testFiles - Array of test file entries.
|
|
601
|
+
* @param projectDir - Project root for resolving paths.
|
|
602
|
+
* @returns Total lines of code and test code.
|
|
603
|
+
*/
|
|
604
|
+
export async function countLines(sourceFiles, testFiles, projectDir) {
|
|
605
|
+
let code = 0;
|
|
606
|
+
let tests = 0;
|
|
607
|
+
const countBatch = async (files, baseDir) => {
|
|
608
|
+
let total = 0;
|
|
609
|
+
for (const f of files) {
|
|
610
|
+
total += await countFileLines(path.join(baseDir, f.path));
|
|
611
|
+
}
|
|
612
|
+
return total;
|
|
613
|
+
};
|
|
614
|
+
code = await countBatch(sourceFiles, projectDir);
|
|
615
|
+
tests = await countBatch(testFiles, projectDir);
|
|
616
|
+
return { code, tests };
|
|
617
|
+
}
|
|
618
|
+
// ---------------------------------------------------------------------------
|
|
619
|
+
// Tree builder
|
|
620
|
+
// ---------------------------------------------------------------------------
|
|
621
|
+
/**
|
|
622
|
+
* Build a truncated tree string from the project directory.
|
|
623
|
+
*
|
|
624
|
+
* @param projectDir - Project root directory.
|
|
625
|
+
* @returns Indented tree string.
|
|
626
|
+
*/
|
|
627
|
+
async function buildTree(projectDir) {
|
|
628
|
+
const entries = await walkDir(projectDir, 3);
|
|
629
|
+
const lines = [];
|
|
630
|
+
for (const entry of entries) {
|
|
631
|
+
if (lines.length >= MAX_TREE_ENTRIES) {
|
|
632
|
+
lines.push(`... (+${entries.length - MAX_TREE_ENTRIES} more entries)`);
|
|
633
|
+
break;
|
|
634
|
+
}
|
|
635
|
+
const depth = entry.relativePath.split(path.sep).length - 1;
|
|
636
|
+
const indent = ' '.repeat(depth);
|
|
637
|
+
const name = path.basename(entry.relativePath);
|
|
638
|
+
lines.push(`${indent}${name}${entry.isDir ? '/' : ''}`);
|
|
639
|
+
}
|
|
640
|
+
return lines.join('\n');
|
|
641
|
+
}
|
|
642
|
+
// ---------------------------------------------------------------------------
|
|
643
|
+
// Main scanner
|
|
644
|
+
// ---------------------------------------------------------------------------
|
|
645
|
+
/**
|
|
646
|
+
* Scan the entire project and produce a structured ProjectScanResult.
|
|
647
|
+
*
|
|
648
|
+
* @param projectDir - Project root directory.
|
|
649
|
+
* @param language - Language from state.json.
|
|
650
|
+
* @param onProgress - Optional progress callback.
|
|
651
|
+
* @returns ProjectScanResult with all scan data.
|
|
652
|
+
*/
|
|
653
|
+
export async function scanProject(projectDir, language, onProgress) {
|
|
654
|
+
onProgress?.('Detecting workspace composition...');
|
|
655
|
+
const detectedComposition = await detectWorkspaceComposition(projectDir);
|
|
656
|
+
// Determine if state.language and detected composition agree
|
|
657
|
+
const isWs = isWorkspace(language);
|
|
658
|
+
const hasMultipleComponents = detectedComposition.filter((k) => k !== 'shared' && k !== 'infra').length > 1;
|
|
659
|
+
const compositionMismatch = isWs !== hasMultipleComponents;
|
|
660
|
+
onProgress?.('Reading priority documentation...');
|
|
661
|
+
const docs = await readPriorityDocs(projectDir);
|
|
662
|
+
onProgress?.('Scanning components...');
|
|
663
|
+
const components = [];
|
|
664
|
+
// Determine component language hint
|
|
665
|
+
const langHint = (kind) => {
|
|
666
|
+
if (kind === 'backend' && ['python', 'fullstack', 'all'].includes(language))
|
|
667
|
+
return 'python';
|
|
668
|
+
if (kind === 'frontend' || kind === 'website')
|
|
669
|
+
return 'typescript';
|
|
670
|
+
return 'mixed';
|
|
671
|
+
};
|
|
672
|
+
if (isWs) {
|
|
673
|
+
// Workspace: scan each apps/ subdirectory
|
|
674
|
+
const appDirMap = {
|
|
675
|
+
frontend: ['apps/frontend', 'apps/web', 'apps/client'],
|
|
676
|
+
backend: ['apps/backend', 'apps/api', 'apps/server'],
|
|
677
|
+
website: ['apps/website', 'apps/landing'],
|
|
678
|
+
};
|
|
679
|
+
for (const kind of detectedComposition) {
|
|
680
|
+
if (kind === 'shared' || kind === 'infra')
|
|
681
|
+
continue;
|
|
682
|
+
const candidates = appDirMap[kind] ?? [];
|
|
683
|
+
for (const candidate of candidates) {
|
|
684
|
+
const absCandidate = path.join(projectDir, candidate);
|
|
685
|
+
if (await pathExists(absCandidate)) {
|
|
686
|
+
components.push(await scanComponent(absCandidate, kind, langHint(kind), projectDir));
|
|
687
|
+
break;
|
|
688
|
+
}
|
|
689
|
+
}
|
|
690
|
+
}
|
|
691
|
+
}
|
|
692
|
+
else {
|
|
693
|
+
// Single-component project: scan root
|
|
694
|
+
const kind = detectedComposition[0] ?? 'frontend';
|
|
695
|
+
components.push(await scanComponent(projectDir, kind, langHint(kind), projectDir));
|
|
696
|
+
}
|
|
697
|
+
// Aggregate files from all components
|
|
698
|
+
const allSource = [];
|
|
699
|
+
const allTest = [];
|
|
700
|
+
const allEntryPoints = [];
|
|
701
|
+
const allRouteFiles = [];
|
|
702
|
+
const allDeps = [];
|
|
703
|
+
for (const comp of components) {
|
|
704
|
+
// Reason: Component file paths are relative to the component dir.
|
|
705
|
+
// For LOC counting, we need them relative to the project root.
|
|
706
|
+
const prefix = comp.rootDir === '.' ? '' : comp.rootDir;
|
|
707
|
+
const prefixPath = (p) => prefix ? path.join(prefix, p) : p;
|
|
708
|
+
allSource.push(...comp.sourceFiles.map((f) => ({ ...f, path: prefixPath(f.path) })));
|
|
709
|
+
allTest.push(...comp.testFiles.map((f) => ({ ...f, path: prefixPath(f.path) })));
|
|
710
|
+
allEntryPoints.push(...comp.entryPoints.map(prefixPath));
|
|
711
|
+
allRouteFiles.push(...comp.routeFiles.map(prefixPath));
|
|
712
|
+
allDeps.push(...comp.dependencyManifests);
|
|
713
|
+
}
|
|
714
|
+
// Config files detection
|
|
715
|
+
onProgress?.('Scanning config files...');
|
|
716
|
+
const configFiles = [];
|
|
717
|
+
try {
|
|
718
|
+
const rootEntries = await fs.readdir(projectDir, { withFileTypes: true });
|
|
719
|
+
for (const entry of rootEntries) {
|
|
720
|
+
if (entry.isFile() && CONFIG_FILES.has(entry.name)) {
|
|
721
|
+
configFiles.push(entry.name);
|
|
722
|
+
}
|
|
723
|
+
}
|
|
724
|
+
}
|
|
725
|
+
catch {
|
|
726
|
+
// Root dir error
|
|
727
|
+
}
|
|
728
|
+
// LOC counting
|
|
729
|
+
onProgress?.('Counting lines of code...');
|
|
730
|
+
const locResult = await countLines(allSource, allTest, projectDir);
|
|
731
|
+
// Tree
|
|
732
|
+
const tree = await buildTree(projectDir);
|
|
733
|
+
// Config content reads
|
|
734
|
+
const envExampleContent = await safeRead(path.join(projectDir, '.env.example'), 4000);
|
|
735
|
+
const dockerComposeContent = await safeRead(path.join(projectDir, 'docker-compose.yml'), 4000) ?? await safeRead(path.join(projectDir, 'docker-compose.yaml'), 4000);
|
|
736
|
+
// Wiring matrix
|
|
737
|
+
onProgress?.('Building wiring matrix...');
|
|
738
|
+
const hasFe = components.some((c) => c.kind === 'frontend');
|
|
739
|
+
const hasBe = components.some((c) => c.kind === 'backend');
|
|
740
|
+
const wiring = (hasFe && hasBe) ? await buildWiringMatrix(projectDir, components) : undefined;
|
|
741
|
+
onProgress?.(`Scan complete: ${allSource.length} source files, ${locResult.code} LOC`);
|
|
742
|
+
return {
|
|
743
|
+
tree,
|
|
744
|
+
components,
|
|
745
|
+
detectedComposition,
|
|
746
|
+
stateLanguage: language,
|
|
747
|
+
compositionMismatch,
|
|
748
|
+
sourceFiles: allSource,
|
|
749
|
+
testFiles: allTest,
|
|
750
|
+
configFiles,
|
|
751
|
+
entryPoints: allEntryPoints,
|
|
752
|
+
routeFiles: allRouteFiles,
|
|
753
|
+
dependencies: allDeps,
|
|
754
|
+
totalSourceFiles: allSource.length,
|
|
755
|
+
totalTestFiles: allTest.length,
|
|
756
|
+
totalLinesOfCode: locResult.code,
|
|
757
|
+
totalLinesOfTests: locResult.tests,
|
|
758
|
+
language,
|
|
759
|
+
claudeMdContent: docs.claudeMd,
|
|
760
|
+
readmeContent: docs.readme,
|
|
761
|
+
docsIndex: docs.docsIndex,
|
|
762
|
+
keyFileSnippets: docs.keyFiles,
|
|
763
|
+
wiring,
|
|
764
|
+
envExampleContent,
|
|
765
|
+
dockerComposeContent,
|
|
766
|
+
};
|
|
767
|
+
}
|
|
768
|
+
//# sourceMappingURL=audit-scanner.js.map
|