devbonzai 2.2.300 → 2.2.302
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/cli.js +0 -4
- package/package.json +3 -7
- package/templates/handlers/index.js +1 -10
- package/templates/receiver.js +0 -20
- package/templates/handlers/analyze_prompt.js +0 -118
- package/templates/handlers/git-churn.js +0 -44
- package/templates/handlers/move.js +0 -35
- package/templates/handlers/prompt_agent.js +0 -181
- package/templates/handlers/prompt_agent_stream.js +0 -306
- package/templates/handlers/revert_job.js +0 -31
- package/templates/handlers/scan_code_quality.js +0 -174
- package/templates/handlers/scan_standards.js +0 -178
- package/templates/handlers/write.js +0 -19
- package/templates/handlers/write_dir.js +0 -20
package/cli.js
CHANGED
|
@@ -108,11 +108,7 @@ async function main() {
|
|
|
108
108
|
}
|
|
109
109
|
packageJson.dependencies.express = "^4.18.2";
|
|
110
110
|
packageJson.dependencies.cors = "^2.8.5";
|
|
111
|
-
packageJson.dependencies["body-parser"] = "^1.20.2";
|
|
112
|
-
packageJson.dependencies["raw-body"] = "^2.5.2";
|
|
113
111
|
packageJson.dependencies["@babel/parser"] = "^7.23.0";
|
|
114
|
-
packageJson.dependencies.eslint = "^8.57.0";
|
|
115
|
-
packageJson.dependencies.madge = "^6.1.0";
|
|
116
112
|
|
|
117
113
|
// Add script to run receiver
|
|
118
114
|
if (!packageJson.scripts) {
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "devbonzai",
|
|
3
|
-
"version": "2.2.
|
|
3
|
+
"version": "2.2.302",
|
|
4
4
|
"description": "Quickly set up a local file server in any repository for browser-based file access",
|
|
5
5
|
"main": "cli.js",
|
|
6
6
|
"bin": {
|
|
@@ -24,10 +24,6 @@
|
|
|
24
24
|
"dependencies": {
|
|
25
25
|
"express": "^4.18.2",
|
|
26
26
|
"cors": "^2.8.5",
|
|
27
|
-
"
|
|
28
|
-
"raw-body": "^2.5.2",
|
|
29
|
-
"glob": "^10.0.0",
|
|
30
|
-
"eslint": "^8.57.0",
|
|
31
|
-
"madge": "^6.1.0"
|
|
27
|
+
"@babel/parser": "^7.23.0"
|
|
32
28
|
}
|
|
33
|
-
}
|
|
29
|
+
}
|
|
@@ -5,18 +5,9 @@ function indexHandler(req, res) {
|
|
|
5
5
|
endpoints: {
|
|
6
6
|
'GET /list': 'List all files in the directory',
|
|
7
7
|
'GET /read?path=<filepath>': 'Read file content',
|
|
8
|
-
'GET /git-churn?path=<filepath>&commits=30': 'Get git commit churn for a file',
|
|
9
|
-
'POST /write': 'Write file content (body: {path, content})',
|
|
10
|
-
'POST /write_dir': 'Create directory (body: {path})',
|
|
11
8
|
'POST /delete': 'Delete file or directory (body: {path})',
|
|
12
|
-
'POST /move': 'Move file or folder (body: {source, destination})',
|
|
13
9
|
'POST /open-cursor': 'Open Cursor (body: {path, line?})',
|
|
14
|
-
'POST /
|
|
15
|
-
'POST /prompt_agent': 'Execute cursor-agent command (body: {prompt})',
|
|
16
|
-
'POST /prompt_agent_stream': 'Execute cursor-agent with SSE streaming (body: {prompt})',
|
|
17
|
-
'POST /revert_job': 'Revert to a previous commit (body: {beforeCommit})',
|
|
18
|
-
'POST /shutdown': 'Gracefully shutdown the server',
|
|
19
|
-
'POST /scan_standards': 'Analyze codebase against architectural standards (body: {projectPath, standards, apiKey?})'
|
|
10
|
+
'POST /shutdown': 'Gracefully shutdown the server'
|
|
20
11
|
},
|
|
21
12
|
example: 'Try: /list or /read?path=README.md'
|
|
22
13
|
});
|
package/templates/receiver.js
CHANGED
|
@@ -7,19 +7,9 @@ const cors = require('./node_modules/cors');
|
|
|
7
7
|
const indexHandler = require('./handlers/index');
|
|
8
8
|
const listHandler = require('./handlers/list');
|
|
9
9
|
const readHandler = require('./handlers/read');
|
|
10
|
-
const gitChurnHandler = require('./handlers/git-churn');
|
|
11
|
-
const writeHandler = require('./handlers/write');
|
|
12
|
-
const writeDirHandler = require('./handlers/write_dir');
|
|
13
10
|
const deleteHandler = require('./handlers/delete');
|
|
14
|
-
const moveHandler = require('./handlers/move');
|
|
15
11
|
const openCursorHandler = require('./handlers/open-cursor');
|
|
16
|
-
const analyzePromptHandler = require('./handlers/analyze_prompt');
|
|
17
|
-
const promptAgentHandler = require('./handlers/prompt_agent');
|
|
18
|
-
const promptAgentStreamHandler = require('./handlers/prompt_agent_stream');
|
|
19
|
-
const revertJobHandler = require('./handlers/revert_job');
|
|
20
12
|
const shutdownHandler = require('./handlers/shutdown');
|
|
21
|
-
const scanCodeQualityHandler = require('./handlers/scan_code_quality');
|
|
22
|
-
const scanStandardsHandler = require('./handlers/scan_standards');
|
|
23
13
|
|
|
24
14
|
const app = express();
|
|
25
15
|
|
|
@@ -30,19 +20,9 @@ app.use(express.json());
|
|
|
30
20
|
app.get('/', indexHandler);
|
|
31
21
|
app.get('/list', listHandler);
|
|
32
22
|
app.get('/read', readHandler);
|
|
33
|
-
app.get('/git-churn', gitChurnHandler);
|
|
34
|
-
app.post('/write', writeHandler);
|
|
35
|
-
app.post('/write_dir', writeDirHandler);
|
|
36
23
|
app.post('/delete', deleteHandler);
|
|
37
|
-
app.post('/move', moveHandler);
|
|
38
24
|
app.post('/open-cursor', openCursorHandler);
|
|
39
|
-
app.post('/analyze_prompt', analyzePromptHandler);
|
|
40
|
-
app.post('/prompt_agent', promptAgentHandler);
|
|
41
|
-
app.post('/prompt_agent_stream', promptAgentStreamHandler);
|
|
42
|
-
app.post('/revert_job', revertJobHandler);
|
|
43
25
|
app.post('/shutdown', shutdownHandler);
|
|
44
|
-
app.post('/scan_code_quality', scanCodeQualityHandler);
|
|
45
|
-
app.post('/scan_standards', scanStandardsHandler);
|
|
46
26
|
|
|
47
27
|
const port = 3001;
|
|
48
28
|
app.listen(port, () => {
|
|
@@ -1,118 +0,0 @@
|
|
|
1
|
-
const { spawn } = require('child_process');
|
|
2
|
-
const { ROOT } = require('../config');
|
|
3
|
-
|
|
4
|
-
function analyzePromptHandler(req, res) {
|
|
5
|
-
console.log('🔵 [analyze_prompt] Endpoint hit');
|
|
6
|
-
const { prompt } = req.body;
|
|
7
|
-
console.log('🔵 [analyze_prompt] Received prompt:', prompt ? `${prompt.substring(0, 50)}...` : 'none');
|
|
8
|
-
|
|
9
|
-
if (!prompt || typeof prompt !== 'string') {
|
|
10
|
-
console.log('❌ [analyze_prompt] Error: prompt required');
|
|
11
|
-
return res.status(400).json({ error: 'prompt required' });
|
|
12
|
-
}
|
|
13
|
-
|
|
14
|
-
// Configurable timeout (default 2 minutes for analysis)
|
|
15
|
-
const timeoutMs = parseInt(req.body.timeout) || 2 * 60 * 1000;
|
|
16
|
-
let timeoutId = null;
|
|
17
|
-
let responseSent = false;
|
|
18
|
-
|
|
19
|
-
// Build analysis prompt - ask agent to list files without making changes
|
|
20
|
-
const analysisPrompt = `You are analyzing a coding task. Do NOT make any changes to any files. Only analyze and list the files you would need to modify to complete this task.
|
|
21
|
-
|
|
22
|
-
Respond ONLY with valid JSON in this exact format (no other text):
|
|
23
|
-
{"files": [{"path": "path/to/file.ext", "reason": "brief reason for modification"}]}
|
|
24
|
-
|
|
25
|
-
If no files need modification, respond with: {"files": []}
|
|
26
|
-
|
|
27
|
-
Task to analyze: ${prompt}`;
|
|
28
|
-
|
|
29
|
-
const args = ['--print', '--force', '--workspace', '.', analysisPrompt];
|
|
30
|
-
|
|
31
|
-
console.log('🔵 [analyze_prompt] Spawning cursor-agent process...');
|
|
32
|
-
const proc = spawn(
|
|
33
|
-
'cursor-agent',
|
|
34
|
-
args,
|
|
35
|
-
{
|
|
36
|
-
cwd: ROOT,
|
|
37
|
-
env: process.env,
|
|
38
|
-
stdio: ['ignore', 'pipe', 'pipe']
|
|
39
|
-
}
|
|
40
|
-
);
|
|
41
|
-
|
|
42
|
-
console.log('🔵 [analyze_prompt] Process spawned, PID:', proc.pid);
|
|
43
|
-
|
|
44
|
-
let stdout = '';
|
|
45
|
-
let stderr = '';
|
|
46
|
-
|
|
47
|
-
timeoutId = setTimeout(() => {
|
|
48
|
-
if (!responseSent && proc && !proc.killed) {
|
|
49
|
-
console.log('⏱️ [analyze_prompt] Timeout reached, killing process...');
|
|
50
|
-
proc.kill('SIGTERM');
|
|
51
|
-
setTimeout(() => {
|
|
52
|
-
if (!proc.killed) proc.kill('SIGKILL');
|
|
53
|
-
}, 5000);
|
|
54
|
-
|
|
55
|
-
if (!responseSent) {
|
|
56
|
-
responseSent = true;
|
|
57
|
-
res.status(500).json({
|
|
58
|
-
error: 'Process timeout',
|
|
59
|
-
message: `Analysis exceeded timeout of ${timeoutMs / 1000} seconds`
|
|
60
|
-
});
|
|
61
|
-
}
|
|
62
|
-
}
|
|
63
|
-
}, timeoutMs);
|
|
64
|
-
|
|
65
|
-
proc.stdout.on('data', (d) => {
|
|
66
|
-
stdout += d.toString();
|
|
67
|
-
});
|
|
68
|
-
|
|
69
|
-
proc.stderr.on('data', (d) => {
|
|
70
|
-
stderr += d.toString();
|
|
71
|
-
});
|
|
72
|
-
|
|
73
|
-
proc.on('error', (error) => {
|
|
74
|
-
console.log('❌ [analyze_prompt] Process error:', error.message);
|
|
75
|
-
if (timeoutId) clearTimeout(timeoutId);
|
|
76
|
-
if (!responseSent) {
|
|
77
|
-
responseSent = true;
|
|
78
|
-
return res.status(500).json({ error: error.message });
|
|
79
|
-
}
|
|
80
|
-
});
|
|
81
|
-
|
|
82
|
-
proc.on('close', (code, signal) => {
|
|
83
|
-
console.log('🔵 [analyze_prompt] Process closed with code:', code);
|
|
84
|
-
if (timeoutId) clearTimeout(timeoutId);
|
|
85
|
-
|
|
86
|
-
if (!responseSent) {
|
|
87
|
-
responseSent = true;
|
|
88
|
-
|
|
89
|
-
// Try to parse JSON from the output
|
|
90
|
-
try {
|
|
91
|
-
// Look for JSON in the output - it might be wrapped in other text
|
|
92
|
-
const jsonMatch = stdout.match(/\{[\s\S]*"files"[\s\S]*\}/);
|
|
93
|
-
if (jsonMatch) {
|
|
94
|
-
const parsed = JSON.parse(jsonMatch[0]);
|
|
95
|
-
console.log('✅ [analyze_prompt] Parsed files:', parsed.files);
|
|
96
|
-
res.json({ files: parsed.files || [] });
|
|
97
|
-
} else {
|
|
98
|
-
console.log('⚠️ [analyze_prompt] No JSON found in output, returning raw');
|
|
99
|
-
res.json({
|
|
100
|
-
files: [],
|
|
101
|
-
raw: stdout,
|
|
102
|
-
warning: 'Could not parse structured response'
|
|
103
|
-
});
|
|
104
|
-
}
|
|
105
|
-
} catch (parseError) {
|
|
106
|
-
console.log('⚠️ [analyze_prompt] JSON parse error:', parseError.message);
|
|
107
|
-
res.json({
|
|
108
|
-
files: [],
|
|
109
|
-
raw: stdout,
|
|
110
|
-
warning: 'Could not parse JSON: ' + parseError.message
|
|
111
|
-
});
|
|
112
|
-
}
|
|
113
|
-
}
|
|
114
|
-
});
|
|
115
|
-
}
|
|
116
|
-
|
|
117
|
-
module.exports = analyzePromptHandler;
|
|
118
|
-
|
|
@@ -1,44 +0,0 @@
|
|
|
1
|
-
const path = require('path');
|
|
2
|
-
const { exec } = require('child_process');
|
|
3
|
-
const { ROOT } = require('../config');
|
|
4
|
-
|
|
5
|
-
function gitChurnHandler(req, res) {
|
|
6
|
-
try {
|
|
7
|
-
const filePath = path.join(ROOT, req.query.path || '');
|
|
8
|
-
if (!filePath.startsWith(ROOT)) {
|
|
9
|
-
return res.status(400).json({ error: 'Invalid path' });
|
|
10
|
-
}
|
|
11
|
-
|
|
12
|
-
// Get commits parameter, default to 30
|
|
13
|
-
const commits = parseInt(req.query.commits) || 30;
|
|
14
|
-
|
|
15
|
-
// Get relative path from ROOT for git command
|
|
16
|
-
const relativePath = path.relative(ROOT, filePath);
|
|
17
|
-
|
|
18
|
-
// Build git log command with relative path
|
|
19
|
-
const gitCommand = `git log --oneline --all -${commits} -- "${relativePath}"`;
|
|
20
|
-
|
|
21
|
-
exec(gitCommand, { cwd: ROOT }, (error, stdout, stderr) => {
|
|
22
|
-
// If git command fails (no repo, file not tracked, etc.), return 0 churn
|
|
23
|
-
if (error) {
|
|
24
|
-
// Check if it's because file is not in git or no git repo
|
|
25
|
-
if (error.code === 128 || stderr.includes('not a git repository')) {
|
|
26
|
-
return res.json({ churn: 0 });
|
|
27
|
-
}
|
|
28
|
-
// For other errors, still return 0 churn gracefully
|
|
29
|
-
return res.json({ churn: 0 });
|
|
30
|
-
}
|
|
31
|
-
|
|
32
|
-
// Count non-empty lines (each line is a commit)
|
|
33
|
-
const commitCount = stdout.trim().split('\n').filter(line => line.trim().length > 0).length;
|
|
34
|
-
|
|
35
|
-
res.json({ churn: commitCount });
|
|
36
|
-
});
|
|
37
|
-
} catch (e) {
|
|
38
|
-
// Handle any other errors gracefully
|
|
39
|
-
res.status(500).json({ error: e.message, churn: 0 });
|
|
40
|
-
}
|
|
41
|
-
}
|
|
42
|
-
|
|
43
|
-
module.exports = gitChurnHandler;
|
|
44
|
-
|
|
@@ -1,35 +0,0 @@
|
|
|
1
|
-
const fs = require('fs');
|
|
2
|
-
const path = require('path');
|
|
3
|
-
const { ROOT } = require('../config');
|
|
4
|
-
|
|
5
|
-
function moveHandler(req, res) {
|
|
6
|
-
try {
|
|
7
|
-
const sourcePath = path.join(ROOT, req.body.source || '');
|
|
8
|
-
const destinationPath = path.join(ROOT, req.body.destination || '');
|
|
9
|
-
|
|
10
|
-
// Validate both paths are within ROOT directory
|
|
11
|
-
if (!sourcePath.startsWith(ROOT) || !destinationPath.startsWith(ROOT)) {
|
|
12
|
-
return res.status(400).send('Invalid path');
|
|
13
|
-
}
|
|
14
|
-
|
|
15
|
-
// Check if source exists
|
|
16
|
-
if (!fs.existsSync(sourcePath)) {
|
|
17
|
-
return res.status(400).send('Source path does not exist');
|
|
18
|
-
}
|
|
19
|
-
|
|
20
|
-
// Ensure destination directory exists
|
|
21
|
-
const destinationDir = path.dirname(destinationPath);
|
|
22
|
-
if (!fs.existsSync(destinationDir)) {
|
|
23
|
-
fs.mkdirSync(destinationDir, { recursive: true });
|
|
24
|
-
}
|
|
25
|
-
|
|
26
|
-
// Move the file or folder
|
|
27
|
-
fs.renameSync(sourcePath, destinationPath);
|
|
28
|
-
res.json({ status: 'ok' });
|
|
29
|
-
} catch (e) {
|
|
30
|
-
res.status(500).send(e.message);
|
|
31
|
-
}
|
|
32
|
-
}
|
|
33
|
-
|
|
34
|
-
module.exports = moveHandler;
|
|
35
|
-
|
|
@@ -1,181 +0,0 @@
|
|
|
1
|
-
const { spawn, execSync } = require('child_process');
|
|
2
|
-
const { ROOT } = require('../config');
|
|
3
|
-
|
|
4
|
-
function promptAgentHandler(req, res) {
|
|
5
|
-
console.log('🔵 [prompt_agent] Endpoint hit');
|
|
6
|
-
const { prompt } = req.body;
|
|
7
|
-
console.log('🔵 [prompt_agent] Received prompt:', prompt ? `${prompt.substring(0, 50)}...` : 'none');
|
|
8
|
-
|
|
9
|
-
if (!prompt || typeof prompt !== 'string') {
|
|
10
|
-
console.log('❌ [prompt_agent] Error: prompt required');
|
|
11
|
-
return res.status(400).json({ error: 'prompt required' });
|
|
12
|
-
}
|
|
13
|
-
|
|
14
|
-
// Capture beforeCommit
|
|
15
|
-
let beforeCommit = '';
|
|
16
|
-
try {
|
|
17
|
-
beforeCommit = execSync('git rev-parse HEAD', { cwd: ROOT }).toString().trim();
|
|
18
|
-
console.log('🔵 [prompt_agent] beforeCommit:', beforeCommit);
|
|
19
|
-
} catch (e) {
|
|
20
|
-
console.log('⚠️ [prompt_agent] Could not get beforeCommit:', e.message);
|
|
21
|
-
}
|
|
22
|
-
|
|
23
|
-
// Capture initial state of modified files (files already dirty before job starts)
|
|
24
|
-
const initiallyModifiedFiles = new Set();
|
|
25
|
-
try {
|
|
26
|
-
const initialStatus = execSync('git status --short', { cwd: ROOT }).toString();
|
|
27
|
-
initialStatus.split('\n').filter(Boolean).forEach(line => {
|
|
28
|
-
const filePath = line.substring(3).trim();
|
|
29
|
-
if (filePath) initiallyModifiedFiles.add(filePath);
|
|
30
|
-
});
|
|
31
|
-
console.log('🔵 [prompt_agent] Initially modified files:', Array.from(initiallyModifiedFiles));
|
|
32
|
-
} catch (e) {
|
|
33
|
-
console.log('⚠️ [prompt_agent] Could not get initial status:', e.message);
|
|
34
|
-
}
|
|
35
|
-
|
|
36
|
-
// Set up file change tracking - only track NEW changes during job
|
|
37
|
-
const changedFiles = new Set();
|
|
38
|
-
const pollInterval = setInterval(() => {
|
|
39
|
-
try {
|
|
40
|
-
const status = execSync('git status --short', { cwd: ROOT }).toString();
|
|
41
|
-
status.split('\n').filter(Boolean).forEach(line => {
|
|
42
|
-
const filePath = line.substring(3).trim(); // Remove status prefix (XY + space)
|
|
43
|
-
// Only add if this file was NOT already modified before the job started
|
|
44
|
-
if (filePath && !initiallyModifiedFiles.has(filePath)) {
|
|
45
|
-
const wasNew = !changedFiles.has(filePath);
|
|
46
|
-
changedFiles.add(filePath);
|
|
47
|
-
if (wasNew) {
|
|
48
|
-
console.log('📁 [prompt_agent] New file changed:', filePath);
|
|
49
|
-
}
|
|
50
|
-
}
|
|
51
|
-
});
|
|
52
|
-
} catch (e) {
|
|
53
|
-
// Ignore git status errors
|
|
54
|
-
}
|
|
55
|
-
}, 500);
|
|
56
|
-
|
|
57
|
-
// Configurable timeout (default 5 minutes)
|
|
58
|
-
const timeoutMs = parseInt(req.body.timeout) || 5 * 60 * 1000;
|
|
59
|
-
let timeoutId = null;
|
|
60
|
-
let responseSent = false;
|
|
61
|
-
|
|
62
|
-
// Build command arguments
|
|
63
|
-
const args = ['--print', '--force', '--workspace', '.', prompt];
|
|
64
|
-
|
|
65
|
-
console.log('🔵 [prompt_agent] Spawning cursor-agent process...');
|
|
66
|
-
const proc = spawn(
|
|
67
|
-
'cursor-agent',
|
|
68
|
-
args,
|
|
69
|
-
{
|
|
70
|
-
cwd: ROOT,
|
|
71
|
-
env: process.env,
|
|
72
|
-
stdio: ['ignore', 'pipe', 'pipe'] // Ignore stdin, pipe stdout/stderr
|
|
73
|
-
}
|
|
74
|
-
);
|
|
75
|
-
|
|
76
|
-
console.log('🔵 [prompt_agent] Process spawned, PID:', proc.pid);
|
|
77
|
-
|
|
78
|
-
let stdout = '';
|
|
79
|
-
let stderr = '';
|
|
80
|
-
|
|
81
|
-
// Set up timeout to kill process if it takes too long
|
|
82
|
-
timeoutId = setTimeout(() => {
|
|
83
|
-
if (!responseSent && proc && !proc.killed) {
|
|
84
|
-
console.log('⏱️ [prompt_agent] Timeout reached, killing process...');
|
|
85
|
-
clearInterval(pollInterval);
|
|
86
|
-
proc.kill('SIGTERM');
|
|
87
|
-
|
|
88
|
-
// Force kill after a short grace period if SIGTERM doesn't work
|
|
89
|
-
setTimeout(() => {
|
|
90
|
-
if (!proc.killed) {
|
|
91
|
-
console.log('💀 [prompt_agent] Force killing process...');
|
|
92
|
-
proc.kill('SIGKILL');
|
|
93
|
-
}
|
|
94
|
-
}, 5000);
|
|
95
|
-
|
|
96
|
-
if (!responseSent) {
|
|
97
|
-
responseSent = true;
|
|
98
|
-
res.status(500).json({
|
|
99
|
-
error: 'Process timeout',
|
|
100
|
-
message: `cursor-agent exceeded timeout of ${timeoutMs / 1000} seconds`,
|
|
101
|
-
code: -1,
|
|
102
|
-
stdout,
|
|
103
|
-
stderr,
|
|
104
|
-
changedFiles: Array.from(changedFiles),
|
|
105
|
-
beforeCommit,
|
|
106
|
-
afterCommit: ''
|
|
107
|
-
});
|
|
108
|
-
}
|
|
109
|
-
}
|
|
110
|
-
}, timeoutMs);
|
|
111
|
-
|
|
112
|
-
proc.stdout.on('data', (d) => {
|
|
113
|
-
const data = d.toString();
|
|
114
|
-
console.log('📤 [prompt_agent] stdout data received:', data.length, 'bytes');
|
|
115
|
-
stdout += data;
|
|
116
|
-
});
|
|
117
|
-
|
|
118
|
-
proc.stderr.on('data', (d) => {
|
|
119
|
-
const data = d.toString();
|
|
120
|
-
console.log('⚠️ [prompt_agent] stderr data received:', data.length, 'bytes');
|
|
121
|
-
stderr += data;
|
|
122
|
-
});
|
|
123
|
-
|
|
124
|
-
proc.on('error', (error) => {
|
|
125
|
-
console.log('❌ [prompt_agent] Process error:', error.message);
|
|
126
|
-
clearInterval(pollInterval);
|
|
127
|
-
if (timeoutId) clearTimeout(timeoutId);
|
|
128
|
-
if (!responseSent) {
|
|
129
|
-
responseSent = true;
|
|
130
|
-
return res.status(500).json({ error: error.message });
|
|
131
|
-
}
|
|
132
|
-
});
|
|
133
|
-
|
|
134
|
-
proc.on('close', (code, signal) => {
|
|
135
|
-
console.log('🔵 [prompt_agent] Process closed with code:', code, 'signal:', signal);
|
|
136
|
-
console.log('🔵 [prompt_agent] stdout length:', stdout.length);
|
|
137
|
-
console.log('🔵 [prompt_agent] stderr length:', stderr.length);
|
|
138
|
-
|
|
139
|
-
// Stop polling for file changes
|
|
140
|
-
clearInterval(pollInterval);
|
|
141
|
-
if (timeoutId) clearTimeout(timeoutId);
|
|
142
|
-
|
|
143
|
-
// Capture afterCommit
|
|
144
|
-
let afterCommit = '';
|
|
145
|
-
try {
|
|
146
|
-
afterCommit = execSync('git rev-parse HEAD', { cwd: ROOT }).toString().trim();
|
|
147
|
-
console.log('🔵 [prompt_agent] afterCommit:', afterCommit);
|
|
148
|
-
} catch (e) {
|
|
149
|
-
console.log('⚠️ [prompt_agent] Could not get afterCommit:', e.message);
|
|
150
|
-
}
|
|
151
|
-
|
|
152
|
-
if (!responseSent) {
|
|
153
|
-
responseSent = true;
|
|
154
|
-
// Check if process was killed due to timeout
|
|
155
|
-
if (signal === 'SIGTERM' || signal === 'SIGKILL') {
|
|
156
|
-
res.status(500).json({
|
|
157
|
-
error: 'Process terminated',
|
|
158
|
-
message: signal === 'SIGTERM' ? 'Process was terminated due to timeout' : 'Process was force killed',
|
|
159
|
-
code: code || -1,
|
|
160
|
-
stdout,
|
|
161
|
-
stderr,
|
|
162
|
-
changedFiles: Array.from(changedFiles),
|
|
163
|
-
beforeCommit,
|
|
164
|
-
afterCommit
|
|
165
|
-
});
|
|
166
|
-
} else {
|
|
167
|
-
res.json({
|
|
168
|
-
code,
|
|
169
|
-
stdout,
|
|
170
|
-
stderr,
|
|
171
|
-
changedFiles: Array.from(changedFiles),
|
|
172
|
-
beforeCommit,
|
|
173
|
-
afterCommit
|
|
174
|
-
});
|
|
175
|
-
}
|
|
176
|
-
}
|
|
177
|
-
});
|
|
178
|
-
}
|
|
179
|
-
|
|
180
|
-
module.exports = promptAgentHandler;
|
|
181
|
-
|
|
@@ -1,306 +0,0 @@
|
|
|
1
|
-
const { spawn, execSync } = require('child_process');
|
|
2
|
-
const { ROOT } = require('../config');
|
|
3
|
-
|
|
4
|
-
// Token estimation: ~4 characters per token (rough approximation for English text)
|
|
5
|
-
function estimateTokens(text) {
|
|
6
|
-
if (!text) return 0;
|
|
7
|
-
return Math.ceil(text.length / 4);
|
|
8
|
-
}
|
|
9
|
-
|
|
10
|
-
// Cost estimation based on Claude 3.5 Sonnet pricing ($/1M tokens)
|
|
11
|
-
// Input: $3/1M tokens, Output: $15/1M tokens
|
|
12
|
-
function estimateCost(inputTokens, outputTokens) {
|
|
13
|
-
const inputCostPerMillion = 3.0;
|
|
14
|
-
const outputCostPerMillion = 15.0;
|
|
15
|
-
const inputCost = (inputTokens / 1_000_000) * inputCostPerMillion;
|
|
16
|
-
const outputCost = (outputTokens / 1_000_000) * outputCostPerMillion;
|
|
17
|
-
return {
|
|
18
|
-
inputCost: parseFloat(inputCost.toFixed(6)),
|
|
19
|
-
outputCost: parseFloat(outputCost.toFixed(6)),
|
|
20
|
-
totalCost: parseFloat((inputCost + outputCost).toFixed(6))
|
|
21
|
-
};
|
|
22
|
-
}
|
|
23
|
-
|
|
24
|
-
function promptAgentStreamHandler(req, res) {
|
|
25
|
-
console.log('🔵 [prompt_agent_stream] Endpoint hit');
|
|
26
|
-
const { prompt } = req.body;
|
|
27
|
-
console.log('🔵 [prompt_agent_stream] Received prompt:', prompt ? `${prompt.substring(0, 50)}...` : 'none');
|
|
28
|
-
|
|
29
|
-
// Start execution timer
|
|
30
|
-
const startTime = Date.now();
|
|
31
|
-
|
|
32
|
-
if (!prompt || typeof prompt !== 'string') {
|
|
33
|
-
console.log('❌ [prompt_agent_stream] Error: prompt required');
|
|
34
|
-
return res.status(400).json({ error: 'prompt required' });
|
|
35
|
-
}
|
|
36
|
-
|
|
37
|
-
// Set up SSE headers
|
|
38
|
-
res.setHeader('Content-Type', 'text/event-stream');
|
|
39
|
-
res.setHeader('Cache-Control', 'no-cache');
|
|
40
|
-
res.setHeader('Connection', 'keep-alive');
|
|
41
|
-
res.setHeader('Access-Control-Allow-Origin', '*');
|
|
42
|
-
res.flushHeaders();
|
|
43
|
-
|
|
44
|
-
// Helper to send SSE events with robustness checks
|
|
45
|
-
const sendEvent = (type, data) => {
|
|
46
|
-
try {
|
|
47
|
-
// Check if response is still writable - try to send even if clientDisconnected flag is set
|
|
48
|
-
// because the response stream might still be open
|
|
49
|
-
if (res.destroyed || res.closed) {
|
|
50
|
-
console.log(`⚠️ [prompt_agent_stream] Response already closed, cannot send ${type} event`);
|
|
51
|
-
return false;
|
|
52
|
-
}
|
|
53
|
-
res.write(`data: ${JSON.stringify({ type, ...data })}\n\n`);
|
|
54
|
-
return true;
|
|
55
|
-
} catch (e) {
|
|
56
|
-
console.log(`⚠️ [prompt_agent_stream] Error sending ${type} event:`, e.message);
|
|
57
|
-
return false;
|
|
58
|
-
}
|
|
59
|
-
};
|
|
60
|
-
|
|
61
|
-
// Capture beforeCommit
|
|
62
|
-
let beforeCommit = '';
|
|
63
|
-
try {
|
|
64
|
-
beforeCommit = execSync('git rev-parse HEAD', { cwd: ROOT }).toString().trim();
|
|
65
|
-
console.log('🔵 [prompt_agent_stream] beforeCommit:', beforeCommit);
|
|
66
|
-
} catch (e) {
|
|
67
|
-
console.log('⚠️ [prompt_agent_stream] Could not get beforeCommit:', e.message);
|
|
68
|
-
}
|
|
69
|
-
|
|
70
|
-
// Capture initial state of modified files
|
|
71
|
-
const initiallyModifiedFiles = new Set();
|
|
72
|
-
try {
|
|
73
|
-
const initialStatus = execSync('git status --short', { cwd: ROOT }).toString();
|
|
74
|
-
initialStatus.split('\n').filter(Boolean).forEach(line => {
|
|
75
|
-
const filePath = line.substring(3).trim();
|
|
76
|
-
if (filePath) initiallyModifiedFiles.add(filePath);
|
|
77
|
-
});
|
|
78
|
-
} catch (e) {
|
|
79
|
-
// Ignore
|
|
80
|
-
}
|
|
81
|
-
|
|
82
|
-
// Send starting event with timestamp
|
|
83
|
-
sendEvent('start', { beforeCommit, startTimestamp: startTime });
|
|
84
|
-
|
|
85
|
-
// Set up file change tracking with real-time updates
|
|
86
|
-
const changedFiles = new Set();
|
|
87
|
-
const pollInterval = setInterval(() => {
|
|
88
|
-
try {
|
|
89
|
-
const status = execSync('git status --short', { cwd: ROOT }).toString();
|
|
90
|
-
status.split('\n').filter(Boolean).forEach(line => {
|
|
91
|
-
const filePath = line.substring(3).trim();
|
|
92
|
-
if (filePath && !initiallyModifiedFiles.has(filePath)) {
|
|
93
|
-
if (!changedFiles.has(filePath)) {
|
|
94
|
-
changedFiles.add(filePath);
|
|
95
|
-
console.log('📁 [prompt_agent_stream] File changed:', filePath);
|
|
96
|
-
// Send real-time update to client
|
|
97
|
-
sendEvent('file_changed', { path: filePath });
|
|
98
|
-
}
|
|
99
|
-
}
|
|
100
|
-
});
|
|
101
|
-
} catch (e) {
|
|
102
|
-
// Ignore git status errors
|
|
103
|
-
}
|
|
104
|
-
}, 500);
|
|
105
|
-
|
|
106
|
-
const timeoutMs = parseInt(req.body.timeout) || 5 * 60 * 1000;
|
|
107
|
-
let timeoutId = null;
|
|
108
|
-
let responseSent = false;
|
|
109
|
-
|
|
110
|
-
const args = ['--print', '--force', '--workspace', '.', prompt];
|
|
111
|
-
|
|
112
|
-
console.log('🔵 [prompt_agent_stream] Spawning cursor-agent process...');
|
|
113
|
-
const proc = spawn(
|
|
114
|
-
'cursor-agent',
|
|
115
|
-
args,
|
|
116
|
-
{
|
|
117
|
-
cwd: ROOT,
|
|
118
|
-
env: process.env,
|
|
119
|
-
stdio: ['ignore', 'pipe', 'pipe']
|
|
120
|
-
}
|
|
121
|
-
);
|
|
122
|
-
|
|
123
|
-
console.log('🔵 [prompt_agent_stream] Process spawned, PID:', proc.pid);
|
|
124
|
-
|
|
125
|
-
let stdout = '';
|
|
126
|
-
let stderr = '';
|
|
127
|
-
|
|
128
|
-
timeoutId = setTimeout(() => {
|
|
129
|
-
if (!responseSent && proc && !proc.killed) {
|
|
130
|
-
console.log('⏱️ [prompt_agent_stream] Timeout reached');
|
|
131
|
-
clearInterval(pollInterval);
|
|
132
|
-
proc.kill('SIGTERM');
|
|
133
|
-
|
|
134
|
-
setTimeout(() => {
|
|
135
|
-
if (!proc.killed) proc.kill('SIGKILL');
|
|
136
|
-
}, 5000);
|
|
137
|
-
|
|
138
|
-
// Always try to send complete event when timeout occurs
|
|
139
|
-
// Only skip if we've already sent it
|
|
140
|
-
if (!responseSent) {
|
|
141
|
-
try {
|
|
142
|
-
// Check if response is still writable - try to send even if clientDisconnected flag is set
|
|
143
|
-
// because the response stream might still be open
|
|
144
|
-
if (res.destroyed || res.closed) {
|
|
145
|
-
console.log('⚠️ [prompt_agent_stream] Response already closed, cannot send timeout events');
|
|
146
|
-
} else {
|
|
147
|
-
responseSent = true;
|
|
148
|
-
|
|
149
|
-
// Calculate metrics
|
|
150
|
-
const executionTimeMs = Date.now() - startTime;
|
|
151
|
-
const inputTokens = estimateTokens(prompt);
|
|
152
|
-
const outputTokens = estimateTokens(stdout);
|
|
153
|
-
const costEstimate = estimateCost(inputTokens, outputTokens);
|
|
154
|
-
|
|
155
|
-
sendEvent('error', {
|
|
156
|
-
error: 'Process timeout',
|
|
157
|
-
message: `cursor-agent exceeded timeout of ${timeoutMs / 1000} seconds`
|
|
158
|
-
});
|
|
159
|
-
sendEvent('complete', {
|
|
160
|
-
code: -1,
|
|
161
|
-
stdout,
|
|
162
|
-
stderr,
|
|
163
|
-
changedFiles: Array.from(changedFiles),
|
|
164
|
-
beforeCommit,
|
|
165
|
-
afterCommit: '',
|
|
166
|
-
metrics: {
|
|
167
|
-
executionTimeMs,
|
|
168
|
-
executionTimeSec: parseFloat((executionTimeMs / 1000).toFixed(2)),
|
|
169
|
-
inputTokens,
|
|
170
|
-
outputTokens,
|
|
171
|
-
totalTokens: inputTokens + outputTokens,
|
|
172
|
-
costEstimate
|
|
173
|
-
}
|
|
174
|
-
});
|
|
175
|
-
// Send stop event after complete
|
|
176
|
-
sendEvent('stop', {});
|
|
177
|
-
res.end();
|
|
178
|
-
console.log('✅ [prompt_agent_stream] Sent timeout error, complete, and stop events');
|
|
179
|
-
}
|
|
180
|
-
} catch (e) {
|
|
181
|
-
console.log('⚠️ [prompt_agent_stream] Error sending timeout events:', e.message);
|
|
182
|
-
// Don't set responseSent = true on error, in case we can retry
|
|
183
|
-
// But realistically, if there's an error, the connection is probably dead
|
|
184
|
-
}
|
|
185
|
-
} else {
|
|
186
|
-
console.log('⚠️ [prompt_agent_stream] Timeout events already sent, skipping');
|
|
187
|
-
}
|
|
188
|
-
}
|
|
189
|
-
}, timeoutMs);
|
|
190
|
-
|
|
191
|
-
proc.stdout.on('data', (d) => {
|
|
192
|
-
stdout += d.toString();
|
|
193
|
-
});
|
|
194
|
-
|
|
195
|
-
proc.stderr.on('data', (d) => {
|
|
196
|
-
stderr += d.toString();
|
|
197
|
-
});
|
|
198
|
-
|
|
199
|
-
proc.on('error', (error) => {
|
|
200
|
-
console.log('❌ [prompt_agent_stream] Process error:', error.message);
|
|
201
|
-
clearInterval(pollInterval);
|
|
202
|
-
if (timeoutId) clearTimeout(timeoutId);
|
|
203
|
-
|
|
204
|
-
// Always try to send error event when process error occurs
|
|
205
|
-
// Only skip if we've already sent it
|
|
206
|
-
if (!responseSent) {
|
|
207
|
-
try {
|
|
208
|
-
// Check if response is still writable - try to send even if clientDisconnected flag is set
|
|
209
|
-
// because the response stream might still be open
|
|
210
|
-
if (res.destroyed || res.closed) {
|
|
211
|
-
console.log('⚠️ [prompt_agent_stream] Response already closed, cannot send error event');
|
|
212
|
-
} else {
|
|
213
|
-
responseSent = true;
|
|
214
|
-
sendEvent('error', { error: error.message });
|
|
215
|
-
// Send stop event after error
|
|
216
|
-
sendEvent('stop', {});
|
|
217
|
-
res.end();
|
|
218
|
-
console.log('✅ [prompt_agent_stream] Sent error and stop events');
|
|
219
|
-
}
|
|
220
|
-
} catch (e) {
|
|
221
|
-
console.log('⚠️ [prompt_agent_stream] Error sending error event:', e.message);
|
|
222
|
-
// Don't set responseSent = true on error, in case we can retry
|
|
223
|
-
// But realistically, if there's an error, the connection is probably dead
|
|
224
|
-
}
|
|
225
|
-
} else {
|
|
226
|
-
console.log('⚠️ [prompt_agent_stream] Error event already sent, skipping');
|
|
227
|
-
}
|
|
228
|
-
});
|
|
229
|
-
|
|
230
|
-
proc.on('close', (code, signal) => {
|
|
231
|
-
console.log('🔵 [prompt_agent_stream] Process closed with code:', code);
|
|
232
|
-
clearInterval(pollInterval);
|
|
233
|
-
if (timeoutId) clearTimeout(timeoutId);
|
|
234
|
-
|
|
235
|
-
let afterCommit = '';
|
|
236
|
-
try {
|
|
237
|
-
afterCommit = execSync('git rev-parse HEAD', { cwd: ROOT }).toString().trim();
|
|
238
|
-
} catch (e) {
|
|
239
|
-
// Ignore
|
|
240
|
-
}
|
|
241
|
-
|
|
242
|
-
// Always try to send complete event when process finishes
|
|
243
|
-
// Check actual response stream state rather than relying on responseSent flag
|
|
244
|
-
// Only skip if we've already sent it (responseSent flag prevents duplicates)
|
|
245
|
-
if (!responseSent) {
|
|
246
|
-
try {
|
|
247
|
-
// Check if response is still writable - check actual stream state
|
|
248
|
-
if (res.destroyed || res.closed) {
|
|
249
|
-
console.log('⚠️ [prompt_agent_stream] Response already closed, cannot send complete event');
|
|
250
|
-
} else {
|
|
251
|
-
// Calculate metrics
|
|
252
|
-
const executionTimeMs = Date.now() - startTime;
|
|
253
|
-
const inputTokens = estimateTokens(prompt);
|
|
254
|
-
const outputTokens = estimateTokens(stdout);
|
|
255
|
-
const costEstimate = estimateCost(inputTokens, outputTokens);
|
|
256
|
-
|
|
257
|
-
console.log('📊 [prompt_agent_stream] Metrics:', {
|
|
258
|
-
executionTimeMs,
|
|
259
|
-
inputTokens,
|
|
260
|
-
outputTokens,
|
|
261
|
-
costEstimate
|
|
262
|
-
});
|
|
263
|
-
|
|
264
|
-
// Send events and only set flag after successful send
|
|
265
|
-
sendEvent('complete', {
|
|
266
|
-
code,
|
|
267
|
-
stdout,
|
|
268
|
-
stderr,
|
|
269
|
-
changedFiles: Array.from(changedFiles),
|
|
270
|
-
beforeCommit,
|
|
271
|
-
afterCommit,
|
|
272
|
-
metrics: {
|
|
273
|
-
executionTimeMs,
|
|
274
|
-
executionTimeSec: parseFloat((executionTimeMs / 1000).toFixed(2)),
|
|
275
|
-
inputTokens,
|
|
276
|
-
outputTokens,
|
|
277
|
-
totalTokens: inputTokens + outputTokens,
|
|
278
|
-
costEstimate
|
|
279
|
-
}
|
|
280
|
-
});
|
|
281
|
-
// Send stop event after complete
|
|
282
|
-
sendEvent('stop', {});
|
|
283
|
-
res.end();
|
|
284
|
-
responseSent = true;
|
|
285
|
-
console.log('✅ [prompt_agent_stream] Sent complete and stop events');
|
|
286
|
-
}
|
|
287
|
-
} catch (e) {
|
|
288
|
-
console.log('⚠️ [prompt_agent_stream] Error sending complete event:', e.message);
|
|
289
|
-
// Don't set responseSent = true on error, in case we can retry
|
|
290
|
-
// But realistically, if there's an error, the connection is probably dead
|
|
291
|
-
}
|
|
292
|
-
} else {
|
|
293
|
-
console.log('⚠️ [prompt_agent_stream] Complete event already sent, skipping');
|
|
294
|
-
}
|
|
295
|
-
});
|
|
296
|
-
|
|
297
|
-
// Handle client disconnect - DON'T kill the process, let it complete
|
|
298
|
-
req.on('close', () => {
|
|
299
|
-
console.log('🔵 [prompt_agent_stream] Client disconnected (process continues in background)');
|
|
300
|
-
// Don't kill the process - let it complete
|
|
301
|
-
// Don't set responseSent here - let proc.on('close') check actual stream state
|
|
302
|
-
});
|
|
303
|
-
}
|
|
304
|
-
|
|
305
|
-
module.exports = promptAgentStreamHandler;
|
|
306
|
-
|
|
@@ -1,31 +0,0 @@
|
|
|
1
|
-
const { execSync } = require('child_process');
|
|
2
|
-
const { ROOT } = require('../config');
|
|
3
|
-
|
|
4
|
-
function revertJobHandler(req, res) {
|
|
5
|
-
console.log('🔵 [revert_job] Endpoint hit');
|
|
6
|
-
const { beforeCommit } = req.body;
|
|
7
|
-
|
|
8
|
-
if (!beforeCommit || typeof beforeCommit !== 'string') {
|
|
9
|
-
console.log('❌ [revert_job] Error: beforeCommit required');
|
|
10
|
-
return res.status(400).json({ error: 'beforeCommit required' });
|
|
11
|
-
}
|
|
12
|
-
|
|
13
|
-
// Validate commit hash format (basic sanitization to prevent command injection)
|
|
14
|
-
if (!/^[a-f0-9]{7,40}$/i.test(beforeCommit)) {
|
|
15
|
-
console.log('❌ [revert_job] Error: invalid commit hash format');
|
|
16
|
-
return res.status(400).json({ error: 'Invalid commit hash format' });
|
|
17
|
-
}
|
|
18
|
-
|
|
19
|
-
try {
|
|
20
|
-
console.log('🔵 [revert_job] Resetting to commit:', beforeCommit);
|
|
21
|
-
execSync(`git reset --hard ${beforeCommit}`, { cwd: ROOT });
|
|
22
|
-
console.log('✅ [revert_job] Successfully reverted to commit:', beforeCommit);
|
|
23
|
-
res.json({ success: true });
|
|
24
|
-
} catch (e) {
|
|
25
|
-
console.log('❌ [revert_job] Error:', e.message);
|
|
26
|
-
res.status(500).json({ error: e.message });
|
|
27
|
-
}
|
|
28
|
-
}
|
|
29
|
-
|
|
30
|
-
module.exports = revertJobHandler;
|
|
31
|
-
|
|
@@ -1,174 +0,0 @@
|
|
|
1
|
-
const fs = require('fs');
|
|
2
|
-
const path = require('path');
|
|
3
|
-
const glob = require('glob');
|
|
4
|
-
const { ESLint } = require('eslint');
|
|
5
|
-
const madge = require('madge');
|
|
6
|
-
const { getIgnorePatterns, shouldIgnore } = require('../utils/ignore');
|
|
7
|
-
|
|
8
|
-
module.exports = async function scanCodeQuality(req, res) {
|
|
9
|
-
try {
|
|
10
|
-
const {
|
|
11
|
-
projectPath,
|
|
12
|
-
maxFileLines = 500,
|
|
13
|
-
maxFolderFiles = 20,
|
|
14
|
-
willEnforceLines = true,
|
|
15
|
-
willEnforceFolders = true,
|
|
16
|
-
willFlagUnusedImports = true,
|
|
17
|
-
willFlagDeadCode = true,
|
|
18
|
-
willFlagCircularDeps = false
|
|
19
|
-
} = req.body;
|
|
20
|
-
|
|
21
|
-
if (!projectPath) {
|
|
22
|
-
return res.status(400).json({ error: 'projectPath required' });
|
|
23
|
-
}
|
|
24
|
-
|
|
25
|
-
const ignorePatterns = getIgnorePatterns();
|
|
26
|
-
const issues = [];
|
|
27
|
-
|
|
28
|
-
// Common build/output directories to always ignore
|
|
29
|
-
const defaultIgnoreGlobs = [
|
|
30
|
-
'**/node_modules/**',
|
|
31
|
-
'**/dist/**',
|
|
32
|
-
'**/build/**',
|
|
33
|
-
'**/static/**',
|
|
34
|
-
'**/out/**',
|
|
35
|
-
'**/.next/**',
|
|
36
|
-
'**/*.min.js',
|
|
37
|
-
'**/*.bundle.js',
|
|
38
|
-
'**/*.chunk.js'
|
|
39
|
-
];
|
|
40
|
-
|
|
41
|
-
// Get all files (needed for multiple checks)
|
|
42
|
-
const allFiles = glob.sync('**/*.{js,ts,jsx,tsx,py,java,go}', {
|
|
43
|
-
cwd: projectPath,
|
|
44
|
-
nodir: true,
|
|
45
|
-
ignore: defaultIgnoreGlobs
|
|
46
|
-
});
|
|
47
|
-
|
|
48
|
-
const files = allFiles.filter(file => !shouldIgnore(file, ignorePatterns));
|
|
49
|
-
|
|
50
|
-
// 1. Check file sizes
|
|
51
|
-
if (willEnforceLines) {
|
|
52
|
-
files.forEach(file => {
|
|
53
|
-
const fullPath = path.join(projectPath, file);
|
|
54
|
-
const content = fs.readFileSync(fullPath, 'utf8');
|
|
55
|
-
const lines = content.split('\n').length;
|
|
56
|
-
|
|
57
|
-
if (lines > maxFileLines) {
|
|
58
|
-
issues.push({
|
|
59
|
-
type: 'large_file',
|
|
60
|
-
file: file,
|
|
61
|
-
lines: lines,
|
|
62
|
-
limit: maxFileLines
|
|
63
|
-
});
|
|
64
|
-
}
|
|
65
|
-
});
|
|
66
|
-
}
|
|
67
|
-
|
|
68
|
-
// 2. Check folder sizes
|
|
69
|
-
if (willEnforceFolders) {
|
|
70
|
-
const allFolders = glob.sync('**/', {
|
|
71
|
-
cwd: projectPath,
|
|
72
|
-
ignore: defaultIgnoreGlobs
|
|
73
|
-
});
|
|
74
|
-
|
|
75
|
-
const folders = allFolders.filter(folder => !shouldIgnore(folder, ignorePatterns));
|
|
76
|
-
|
|
77
|
-
folders.forEach(folder => {
|
|
78
|
-
const fullPath = path.join(projectPath, folder);
|
|
79
|
-
const contents = fs.readdirSync(fullPath);
|
|
80
|
-
const fileCount = contents.filter(item => {
|
|
81
|
-
const itemPath = path.join(fullPath, item);
|
|
82
|
-
return fs.statSync(itemPath).isFile();
|
|
83
|
-
}).length;
|
|
84
|
-
|
|
85
|
-
if (fileCount > maxFolderFiles) {
|
|
86
|
-
issues.push({
|
|
87
|
-
type: 'crowded_folder',
|
|
88
|
-
folder: folder,
|
|
89
|
-
count: fileCount,
|
|
90
|
-
limit: maxFolderFiles
|
|
91
|
-
});
|
|
92
|
-
}
|
|
93
|
-
});
|
|
94
|
-
}
|
|
95
|
-
|
|
96
|
-
// 3. Check unused imports and dead code with ESLint
|
|
97
|
-
if (willFlagUnusedImports || willFlagDeadCode) {
|
|
98
|
-
try {
|
|
99
|
-
const eslint = new ESLint({
|
|
100
|
-
cwd: projectPath,
|
|
101
|
-
useEslintrc: false,
|
|
102
|
-
overrideConfig: {
|
|
103
|
-
env: {
|
|
104
|
-
browser: true,
|
|
105
|
-
node: true,
|
|
106
|
-
es6: true
|
|
107
|
-
},
|
|
108
|
-
parserOptions: {
|
|
109
|
-
ecmaVersion: 2021,
|
|
110
|
-
sourceType: 'module',
|
|
111
|
-
ecmaFeatures: { jsx: true }
|
|
112
|
-
},
|
|
113
|
-
rules: {
|
|
114
|
-
'no-unused-vars': willFlagDeadCode ? 'error' : 'off'
|
|
115
|
-
}
|
|
116
|
-
}
|
|
117
|
-
});
|
|
118
|
-
|
|
119
|
-
const jsFiles = files.filter(f => /\.(js|jsx|ts|tsx)$/.test(f));
|
|
120
|
-
const results = await eslint.lintFiles(jsFiles.map(f => path.join(projectPath, f)));
|
|
121
|
-
|
|
122
|
-
results.forEach(result => {
|
|
123
|
-
const relativePath = path.relative(projectPath, result.filePath);
|
|
124
|
-
|
|
125
|
-
result.messages.forEach(msg => {
|
|
126
|
-
if (msg.ruleId === 'no-unused-vars') {
|
|
127
|
-
issues.push({
|
|
128
|
-
type: 'unused_variable',
|
|
129
|
-
file: relativePath,
|
|
130
|
-
line: msg.line,
|
|
131
|
-
variable: msg.message.match(/'(.+?)'/)?.[1] || 'unknown',
|
|
132
|
-
message: msg.message
|
|
133
|
-
});
|
|
134
|
-
}
|
|
135
|
-
});
|
|
136
|
-
});
|
|
137
|
-
} catch (eslintError) {
|
|
138
|
-
console.error('ESLint scan error:', eslintError);
|
|
139
|
-
// Don't fail entire scan if ESLint fails
|
|
140
|
-
}
|
|
141
|
-
}
|
|
142
|
-
|
|
143
|
-
// 4. Check circular dependencies
|
|
144
|
-
if (willFlagCircularDeps) {
|
|
145
|
-
try {
|
|
146
|
-
const result = await madge(projectPath, {
|
|
147
|
-
fileExtensions: ['js', 'jsx', 'ts', 'tsx'],
|
|
148
|
-
excludeRegExp: ignorePatterns.map(p => new RegExp(p))
|
|
149
|
-
});
|
|
150
|
-
|
|
151
|
-
const circular = result.circular();
|
|
152
|
-
|
|
153
|
-
if (circular.length > 0) {
|
|
154
|
-
circular.forEach(cycle => {
|
|
155
|
-
issues.push({
|
|
156
|
-
type: 'circular_dependency',
|
|
157
|
-
files: cycle,
|
|
158
|
-
message: `Circular dependency: ${cycle.join(' → ')}`
|
|
159
|
-
});
|
|
160
|
-
});
|
|
161
|
-
}
|
|
162
|
-
} catch (madgeError) {
|
|
163
|
-
console.error('Madge scan error:', madgeError);
|
|
164
|
-
// Don't fail entire scan if madge fails
|
|
165
|
-
}
|
|
166
|
-
}
|
|
167
|
-
|
|
168
|
-
res.json({ issues });
|
|
169
|
-
|
|
170
|
-
} catch (error) {
|
|
171
|
-
console.error('Scan error:', error);
|
|
172
|
-
res.status(500).json({ error: error.message });
|
|
173
|
-
}
|
|
174
|
-
};
|
|
@@ -1,178 +0,0 @@
|
|
|
1
|
-
module.exports = async function scanStandards(req, res) {
|
|
2
|
-
console.log('🔵 [scan_standards] Endpoint hit');
|
|
3
|
-
|
|
4
|
-
// Return sample response immediately
|
|
5
|
-
const violations = [
|
|
6
|
-
{
|
|
7
|
-
"rule": "React Best Practices",
|
|
8
|
-
"file": "src/App.js",
|
|
9
|
-
"description": "Component file exceeds 200 lines (701 lines). Components should be split into smaller, focused components.",
|
|
10
|
-
"severity": "high"
|
|
11
|
-
},
|
|
12
|
-
{
|
|
13
|
-
"rule": "React Best Practices",
|
|
14
|
-
"file": "src/components/StatusChecklistModal.js",
|
|
15
|
-
"description": "Component file exceeds 200 lines (1320 lines). Component should be split into smaller, focused components.",
|
|
16
|
-
"severity": "high"
|
|
17
|
-
},
|
|
18
|
-
{
|
|
19
|
-
"rule": "React Best Practices",
|
|
20
|
-
"file": "src/components/code/ElementsGenerator.js",
|
|
21
|
-
"description": "Component file exceeds 200 lines (1097 lines). Component should be split into smaller, focused components.",
|
|
22
|
-
"severity": "high"
|
|
23
|
-
},
|
|
24
|
-
{
|
|
25
|
-
"rule": "React Best Practices",
|
|
26
|
-
"file": "src/components/JobsSidebar.js",
|
|
27
|
-
"description": "Component file exceeds 200 lines (652 lines). Component should be split into smaller, focused components.",
|
|
28
|
-
"severity": "high"
|
|
29
|
-
},
|
|
30
|
-
{
|
|
31
|
-
"rule": "React Best Practices",
|
|
32
|
-
"file": "src/components/jobs/JobsSidebar.js",
|
|
33
|
-
"description": "Component file exceeds 200 lines (554 lines). Component should be split into smaller, focused components.",
|
|
34
|
-
"severity": "high"
|
|
35
|
-
},
|
|
36
|
-
{
|
|
37
|
-
"rule": "React Best Practices",
|
|
38
|
-
"file": "src/components/jobs/JobCard.js",
|
|
39
|
-
"description": "Component file exceeds 200 lines (514 lines). Component should be split into smaller, focused components.",
|
|
40
|
-
"severity": "high"
|
|
41
|
-
},
|
|
42
|
-
{
|
|
43
|
-
"rule": "React Best Practices",
|
|
44
|
-
"file": "src/components/UniversalNode.js",
|
|
45
|
-
"description": "Component file exceeds 200 lines (538 lines). Component should be split into smaller, focused components.",
|
|
46
|
-
"severity": "high"
|
|
47
|
-
},
|
|
48
|
-
{
|
|
49
|
-
"rule": "React Best Practices",
|
|
50
|
-
"file": "src/App.js",
|
|
51
|
-
"description": "Component contains data fetching directly (scanCodeQuality, scanStandards calls in useEffect). Data fetching should be in custom hooks, not components.",
|
|
52
|
-
"severity": "high"
|
|
53
|
-
},
|
|
54
|
-
{
|
|
55
|
-
"rule": "React Best Practices",
|
|
56
|
-
"file": "src/components/StatusChecklistModal.js",
|
|
57
|
-
"description": "Component contains data fetching directly (fetch calls to /analyze_prompt). Data fetching should be in custom hooks, not components.",
|
|
58
|
-
"severity": "high"
|
|
59
|
-
},
|
|
60
|
-
{
|
|
61
|
-
"rule": "React Best Practices",
|
|
62
|
-
"file": "src/components/code/ElementsGenerator.js",
|
|
63
|
-
"description": "Component contains data fetching directly (multiple fetch calls for delete, move, write operations). Data fetching should be in custom hooks, not components.",
|
|
64
|
-
"severity": "high"
|
|
65
|
-
},
|
|
66
|
-
{
|
|
67
|
-
"rule": "React Best Practices",
|
|
68
|
-
"file": "src/components/JobsSidebar.js",
|
|
69
|
-
"description": "Component contains data fetching directly (fetch calls to /analyze_prompt, /prompt_agent_stream, /revert_job). Data fetching should be in custom hooks, not components.",
|
|
70
|
-
"severity": "high"
|
|
71
|
-
},
|
|
72
|
-
{
|
|
73
|
-
"rule": "React Best Practices",
|
|
74
|
-
"file": "src/components/jobs/JobsSidebar.js",
|
|
75
|
-
"description": "Component contains data fetching directly (fetch calls to /analyze_prompt, /prompt_agent_stream, /revert_job). Data fetching should be in custom hooks, not components.",
|
|
76
|
-
"severity": "high"
|
|
77
|
-
},
|
|
78
|
-
{
|
|
79
|
-
"rule": "React Best Practices",
|
|
80
|
-
"file": "src/components/FileList.js",
|
|
81
|
-
"description": "Component contains data fetching directly (fetch call to /list). Data fetching should be in custom hooks, not components.",
|
|
82
|
-
"severity": "medium"
|
|
83
|
-
},
|
|
84
|
-
{
|
|
85
|
-
"rule": "React Best Practices",
|
|
86
|
-
"file": "src/components/FileBrowser.js",
|
|
87
|
-
"description": "Component contains data fetching directly (fetch call to /read for line counts). Data fetching should be in custom hooks, not components.",
|
|
88
|
-
"severity": "medium"
|
|
89
|
-
},
|
|
90
|
-
{
|
|
91
|
-
"rule": "React Best Practices",
|
|
92
|
-
"file": "src/components/UniversalNode.js",
|
|
93
|
-
"description": "Component contains data fetching directly (fetch calls to /open-cursor and /read). Data fetching should be in custom hooks, not components.",
|
|
94
|
-
"severity": "medium"
|
|
95
|
-
},
|
|
96
|
-
{
|
|
97
|
-
"rule": "React Best Practices",
|
|
98
|
-
"file": "src/App.js",
|
|
99
|
-
"description": "Component contains business logic mixed with UI rendering (file operations, job management logic). Business logic should be in hooks or services, not components.",
|
|
100
|
-
"severity": "high"
|
|
101
|
-
},
|
|
102
|
-
{
|
|
103
|
-
"rule": "React Best Practices",
|
|
104
|
-
"file": "src/components/StatusChecklistModal.js",
|
|
105
|
-
"description": "Component contains business logic mixed with UI rendering (job creation, fix operations). Business logic should be in hooks or services, not components.",
|
|
106
|
-
"severity": "high"
|
|
107
|
-
},
|
|
108
|
-
{
|
|
109
|
-
"rule": "React Best Practices",
|
|
110
|
-
"file": "src/components/code/ElementsGenerator.js",
|
|
111
|
-
"description": "Component contains business logic mixed with UI rendering (file operations, tree manipulation). Business logic should be in hooks or services, not components.",
|
|
112
|
-
"severity": "high"
|
|
113
|
-
},
|
|
114
|
-
{
|
|
115
|
-
"rule": "React Best Practices",
|
|
116
|
-
"file": "src/components/JobsSidebar.js",
|
|
117
|
-
"description": "Component contains business logic mixed with UI rendering (job execution, state management). Business logic should be in hooks or services, not components.",
|
|
118
|
-
"severity": "high"
|
|
119
|
-
},
|
|
120
|
-
{
|
|
121
|
-
"rule": "React Best Practices",
|
|
122
|
-
"file": "src/App.js",
|
|
123
|
-
"description": "Props drilling detected - many props passed down multiple component levels (jobs, hoveredJobId, onJobsChange, etc.). Should use context for props beyond 2 levels.",
|
|
124
|
-
"severity": "medium"
|
|
125
|
-
},
|
|
126
|
-
{
|
|
127
|
-
"rule": "Feature-Based Organization",
|
|
128
|
-
"file": "src/components/JobsSidebar.js",
|
|
129
|
-
"description": "Duplicate file exists: both src/components/JobsSidebar.js and src/components/jobs/JobsSidebar.js contain similar functionality. Feature folders should be self-contained with no duplicates.",
|
|
130
|
-
"severity": "medium"
|
|
131
|
-
},
|
|
132
|
-
{
|
|
133
|
-
"rule": "Clean Architecture",
|
|
134
|
-
"file": "src/App.js",
|
|
135
|
-
"description": "Business logic is not independent of frameworks - React components contain business logic that should be in framework-independent services.",
|
|
136
|
-
"severity": "high"
|
|
137
|
-
},
|
|
138
|
-
{
|
|
139
|
-
"rule": "Clean Architecture",
|
|
140
|
-
"file": "src/components/StatusChecklistModal.js",
|
|
141
|
-
"description": "Business logic is not independent of frameworks - React component contains business logic that should be in framework-independent services.",
|
|
142
|
-
"severity": "high"
|
|
143
|
-
},
|
|
144
|
-
{
|
|
145
|
-
"rule": "Clean Architecture",
|
|
146
|
-
"file": "src/components/code/ElementsGenerator.js",
|
|
147
|
-
"description": "Business logic is not independent of frameworks - React component contains business logic that should be in framework-independent services.",
|
|
148
|
-
"severity": "high"
|
|
149
|
-
},
|
|
150
|
-
{
|
|
151
|
-
"rule": "Layered Architecture",
|
|
152
|
-
"file": "src/App.js",
|
|
153
|
-
"description": "Presentation layer contains data access (direct fetch calls). Data access should be in a separate data layer, not presentation layer.",
|
|
154
|
-
"severity": "high"
|
|
155
|
-
},
|
|
156
|
-
{
|
|
157
|
-
"rule": "Layered Architecture",
|
|
158
|
-
"file": "src/components/StatusChecklistModal.js",
|
|
159
|
-
"description": "Presentation layer contains data access (direct fetch calls). Data access should be in a separate data layer, not presentation layer.",
|
|
160
|
-
"severity": "high"
|
|
161
|
-
},
|
|
162
|
-
{
|
|
163
|
-
"rule": "Layered Architecture",
|
|
164
|
-
"file": "src/components/code/ElementsGenerator.js",
|
|
165
|
-
"description": "Presentation layer contains data access (direct fetch calls). Data access should be in a separate data layer, not presentation layer.",
|
|
166
|
-
"severity": "high"
|
|
167
|
-
},
|
|
168
|
-
{
|
|
169
|
-
"rule": "Layered Architecture",
|
|
170
|
-
"file": "src/components/JobsSidebar.js",
|
|
171
|
-
"description": "Presentation layer contains data access (direct fetch calls). Data access should be in a separate data layer, not presentation layer.",
|
|
172
|
-
"severity": "high"
|
|
173
|
-
}
|
|
174
|
-
];
|
|
175
|
-
|
|
176
|
-
console.log('✅ [scan_standards] Returning', violations.length, 'sample violations');
|
|
177
|
-
res.json({ violations });
|
|
178
|
-
};
|
|
@@ -1,19 +0,0 @@
|
|
|
1
|
-
const fs = require('fs');
|
|
2
|
-
const path = require('path');
|
|
3
|
-
const { ROOT } = require('../config');
|
|
4
|
-
|
|
5
|
-
function writeHandler(req, res) {
|
|
6
|
-
try {
|
|
7
|
-
const filePath = path.join(ROOT, req.body.path || '');
|
|
8
|
-
if (!filePath.startsWith(ROOT)) {
|
|
9
|
-
return res.status(400).send('Invalid path');
|
|
10
|
-
}
|
|
11
|
-
fs.writeFileSync(filePath, req.body.content, 'utf8');
|
|
12
|
-
res.json({ status: 'ok' });
|
|
13
|
-
} catch (e) {
|
|
14
|
-
res.status(500).send(e.message);
|
|
15
|
-
}
|
|
16
|
-
}
|
|
17
|
-
|
|
18
|
-
module.exports = writeHandler;
|
|
19
|
-
|
|
@@ -1,20 +0,0 @@
|
|
|
1
|
-
const fs = require('fs');
|
|
2
|
-
const path = require('path');
|
|
3
|
-
const { ROOT } = require('../config');
|
|
4
|
-
|
|
5
|
-
function writeDirHandler(req, res) {
|
|
6
|
-
try {
|
|
7
|
-
const dirPath = path.join(ROOT, req.body.path || '');
|
|
8
|
-
if (!dirPath.startsWith(ROOT)) {
|
|
9
|
-
return res.status(400).send('Invalid path');
|
|
10
|
-
}
|
|
11
|
-
// Create directory recursively (creates parent directories if they don't exist)
|
|
12
|
-
fs.mkdirSync(dirPath, { recursive: true });
|
|
13
|
-
res.json({ status: 'ok' });
|
|
14
|
-
} catch (e) {
|
|
15
|
-
res.status(500).send(e.message);
|
|
16
|
-
}
|
|
17
|
-
}
|
|
18
|
-
|
|
19
|
-
module.exports = writeDirHandler;
|
|
20
|
-
|