@blockdeepanshu/ai-pr-review-cli 1.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +88 -0
- package/LICENSE +21 -0
- package/README.md +470 -0
- package/batch-review.js +409 -0
- package/bin/cli.js +113 -0
- package/package.json +72 -0
- package/src/ai.js +38 -0
- package/src/config.js +52 -0
- package/src/git.js +190 -0
- package/src/prompt.js +22 -0
- package/src/reviewer.js +207 -0
package/src/git.js
ADDED
|
@@ -0,0 +1,190 @@
|
|
|
1
|
+
const simpleGit = require("simple-git");
|
|
2
|
+
const fs = require("fs");
|
|
3
|
+
|
|
4
|
+
const git = simpleGit();
|
|
5
|
+
|
|
6
|
+
async function getCurrentBranch() {
|
|
7
|
+
try {
|
|
8
|
+
const status = await git.status();
|
|
9
|
+
return status.current;
|
|
10
|
+
} catch (error) {
|
|
11
|
+
return 'unknown';
|
|
12
|
+
}
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
async function checkBranchExists(branchName) {
|
|
16
|
+
try {
|
|
17
|
+
await git.raw(['rev-parse', '--verify', branchName]);
|
|
18
|
+
return true;
|
|
19
|
+
} catch (error) {
|
|
20
|
+
return false;
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
async function detectBaseBranch() {
|
|
25
|
+
// List of common base branch names in order of preference
|
|
26
|
+
const commonBranches = [
|
|
27
|
+
'origin/main',
|
|
28
|
+
'origin/master',
|
|
29
|
+
'origin/develop',
|
|
30
|
+
'main',
|
|
31
|
+
'master',
|
|
32
|
+
'develop'
|
|
33
|
+
];
|
|
34
|
+
|
|
35
|
+
for (const branch of commonBranches) {
|
|
36
|
+
if (await checkBranchExists(branch)) {
|
|
37
|
+
return branch;
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
// Fallback to HEAD~1 if no common branch found
|
|
42
|
+
return 'HEAD~1';
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
async function getAllBranches() {
|
|
46
|
+
try {
|
|
47
|
+
const result = await git.branch(['-a']);
|
|
48
|
+
return result.all;
|
|
49
|
+
} catch (error) {
|
|
50
|
+
return [];
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
async function getDiff(baseBranch) {
|
|
55
|
+
try {
|
|
56
|
+
// First try the original approach
|
|
57
|
+
const committedDiff = await git.diff([`${baseBranch}...HEAD`]);
|
|
58
|
+
|
|
59
|
+
// Also check for working directory changes
|
|
60
|
+
const workingDiff = await git.diff();
|
|
61
|
+
const stagedDiff = await git.diff(['--cached']);
|
|
62
|
+
|
|
63
|
+
// Combine all diffs
|
|
64
|
+
const totalDiff = [committedDiff, workingDiff, stagedDiff].filter(d => d.length > 0).join('\n\n--- NEXT DIFF SECTION ---\n\n');
|
|
65
|
+
|
|
66
|
+
// Limit diff size to prevent rate limit issues
|
|
67
|
+
const maxDiffLength = 10000;
|
|
68
|
+
if (totalDiff.length > maxDiffLength) {
|
|
69
|
+
console.warn(`ā ļø Large diff detected (${totalDiff.length} chars). Truncating to ${maxDiffLength} chars to avoid rate limits.`);
|
|
70
|
+
return totalDiff.slice(0, maxDiffLength) + '\n\n[... diff truncated to avoid rate limits ...]';
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
return totalDiff;
|
|
74
|
+
} catch (error) {
|
|
75
|
+
// Silently try alternative approaches
|
|
76
|
+
|
|
77
|
+
try {
|
|
78
|
+
// Try diffing against the previous commit
|
|
79
|
+
return await git.diff(['HEAD~1']);
|
|
80
|
+
} catch (error2) {
|
|
81
|
+
try {
|
|
82
|
+
// Fallback: show changes in working directory and staged changes
|
|
83
|
+
const workingDiff = await git.diff();
|
|
84
|
+
const stagedDiff = await git.diff(['--cached']);
|
|
85
|
+
return workingDiff + '\n' + stagedDiff;
|
|
86
|
+
} catch (error3) {
|
|
87
|
+
return '';
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
async function getChangedFiles(baseBranch) {
|
|
94
|
+
try {
|
|
95
|
+
// Get committed changes
|
|
96
|
+
const committedSummary = await git.diffSummary([`${baseBranch}...HEAD`]);
|
|
97
|
+
const committedFiles = committedSummary.files.map((f) => f.file);
|
|
98
|
+
|
|
99
|
+
// Also check for working directory changes
|
|
100
|
+
const workingSummary = await git.diffSummary();
|
|
101
|
+
const workingFiles = workingSummary.files.map((f) => f.file);
|
|
102
|
+
|
|
103
|
+
// Also check for staged changes
|
|
104
|
+
const stagedSummary = await git.diffSummary(['--cached']);
|
|
105
|
+
const stagedFiles = stagedSummary.files.map((f) => f.file);
|
|
106
|
+
|
|
107
|
+
// Combine all changed files (remove duplicates)
|
|
108
|
+
const allFiles = [...new Set([...committedFiles, ...workingFiles, ...stagedFiles])];
|
|
109
|
+
|
|
110
|
+
return allFiles;
|
|
111
|
+
} catch (error) {
|
|
112
|
+
// Try alternatives silently
|
|
113
|
+
|
|
114
|
+
try {
|
|
115
|
+
// Try diffing against the previous commit
|
|
116
|
+
const summary = await git.diffSummary(['HEAD~1']);
|
|
117
|
+
return summary.files.map((f) => f.file);
|
|
118
|
+
} catch (error2) {
|
|
119
|
+
try {
|
|
120
|
+
// Fallback: show changes in working directory
|
|
121
|
+
const summary = await git.diffSummary();
|
|
122
|
+
return summary.files.map((f) => f.file);
|
|
123
|
+
} catch (error3) {
|
|
124
|
+
// Return all JavaScript/TypeScript files in src directory
|
|
125
|
+
const files = [];
|
|
126
|
+
if (fs.existsSync('src')) {
|
|
127
|
+
const srcFiles = fs.readdirSync('src', { recursive: true });
|
|
128
|
+
files.push(...srcFiles
|
|
129
|
+
.filter(f => typeof f === 'string' && (f.endsWith('.js') || f.endsWith('.ts') || f.endsWith('.jsx') || f.endsWith('.tsx')))
|
|
130
|
+
.map(f => `src/${f}`)
|
|
131
|
+
);
|
|
132
|
+
}
|
|
133
|
+
// Also check root directory for common files
|
|
134
|
+
const rootFiles = ['package.json', 'README.md', 'index.js', 'app.js', 'server.js'];
|
|
135
|
+
rootFiles.forEach(file => {
|
|
136
|
+
if (fs.existsSync(file)) {
|
|
137
|
+
files.push(file);
|
|
138
|
+
}
|
|
139
|
+
});
|
|
140
|
+
return files;
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
function readFiles(files, batchSize = 5) {
|
|
147
|
+
// Don't limit files here - let the caller handle batching
|
|
148
|
+
// This function should read ALL requested files
|
|
149
|
+
|
|
150
|
+
return files
|
|
151
|
+
.filter((f) => fs.existsSync(f))
|
|
152
|
+
.filter((f) => {
|
|
153
|
+
// Skip large generated files common in frontend projects
|
|
154
|
+
const skipPatterns = [
|
|
155
|
+
/node_modules/,
|
|
156
|
+
/\.git/,
|
|
157
|
+
/dist\//,
|
|
158
|
+
/build\//,
|
|
159
|
+
/public\/.*\.(js|css)$/, // Built assets
|
|
160
|
+
/\.min\.(js|css)$/, // Minified files
|
|
161
|
+
/bundle.*\.js$/, // Webpack bundles
|
|
162
|
+
/chunk.*\.js$/, // Code-split chunks
|
|
163
|
+
/vendor.*\.js$/, // Vendor bundles
|
|
164
|
+
/\.map$/, // Source maps
|
|
165
|
+
/coverage\//, // Test coverage
|
|
166
|
+
/\.lock$/, // Lock files
|
|
167
|
+
/\.log$/ // Log files
|
|
168
|
+
];
|
|
169
|
+
|
|
170
|
+
return !skipPatterns.some(pattern => pattern.test(f));
|
|
171
|
+
})
|
|
172
|
+
.map((file) => {
|
|
173
|
+
let content = fs.readFileSync(file, "utf-8");
|
|
174
|
+
|
|
175
|
+
// Special handling for package-lock.json (huge files)
|
|
176
|
+
if (file.includes('package-lock.json')) {
|
|
177
|
+
const lines = content.split('\n');
|
|
178
|
+
if (lines.length > 50) {
|
|
179
|
+
content = lines.slice(0, 30).join('\n') + '\n... [truncated large package-lock.json]';
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
return {
|
|
184
|
+
name: file,
|
|
185
|
+
content: content.slice(0, 3000),
|
|
186
|
+
};
|
|
187
|
+
});
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
module.exports = { getDiff, getChangedFiles, readFiles, getCurrentBranch, checkBranchExists, detectBaseBranch, getAllBranches };
|
package/src/prompt.js
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
function buildPrompt(diff, files) {
|
|
2
|
+
return `You are a strict code reviewer. Find ALL problems in these code changes. Check for:
|
|
3
|
+
|
|
4
|
+
ISSUES: Syntax errors, logic bugs, invalid JSON/config, security flaws, broken imports, undefined variables, type errors
|
|
5
|
+
TYPOS: Spelling/grammar mistakes in comments, strings, documentation
|
|
6
|
+
IMPROVEMENTS: Code quality, performance, best practices, refactoring opportunities
|
|
7
|
+
|
|
8
|
+
DIFF:
|
|
9
|
+
${diff}
|
|
10
|
+
|
|
11
|
+
FILES:
|
|
12
|
+
${files.map((f) => `${f.name}:\n${f.content}`).join("\n\n")}
|
|
13
|
+
|
|
14
|
+
Be thorough and critical. Return ONLY this JSON (no other text):
|
|
15
|
+
{
|
|
16
|
+
"issues": ["list specific problems found"],
|
|
17
|
+
"typos": ["list spelling/grammar errors"],
|
|
18
|
+
"improvements": ["list enhancement suggestions"]
|
|
19
|
+
}`;
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
module.exports = { buildPrompt };
|
package/src/reviewer.js
ADDED
|
@@ -0,0 +1,207 @@
|
|
|
1
|
+
const { getDiff, getChangedFiles, readFiles, getCurrentBranch, checkBranchExists, detectBaseBranch } = require("./git");
|
|
2
|
+
const { buildPrompt } = require("./prompt");
|
|
3
|
+
const { runAI } = require("./ai");
|
|
4
|
+
const { getConfig } = require("./config");
|
|
5
|
+
|
|
6
|
+
async function reviewFileBatch(files, diff, config, batchIndex, totalBatches, options) {
|
|
7
|
+
const { default: chalk } = await import("chalk");
|
|
8
|
+
|
|
9
|
+
console.log(chalk.gray(`\n--- Batch ${batchIndex + 1}/${totalBatches} ---`));
|
|
10
|
+
console.log(chalk.gray(`Files: ${files.map(f => f.name).join(', ')}`));
|
|
11
|
+
|
|
12
|
+
if (options.verbose) {
|
|
13
|
+
console.log(chalk.gray('\nš Debug - Files with content:'));
|
|
14
|
+
files.forEach((f, i) => {
|
|
15
|
+
console.log(chalk.gray(` ${i + 1}. ${f.name} (${f.content.length} chars): "${f.content.substring(0, 50)}..."`));
|
|
16
|
+
});
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
const prompt = buildPrompt(diff, files);
|
|
20
|
+
|
|
21
|
+
if (options.verbose) {
|
|
22
|
+
console.log(chalk.gray(`\nš Debug - Prompt length: ${prompt.length} characters`));
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
try {
|
|
26
|
+
const result = await runAI(prompt, config);
|
|
27
|
+
|
|
28
|
+
let parsed;
|
|
29
|
+
try {
|
|
30
|
+
parsed = JSON.parse(result);
|
|
31
|
+
} catch (jsonError) {
|
|
32
|
+
// Try to extract JSON from response
|
|
33
|
+
const jsonMatch = result.match(/\{[\s\S]*?"issues"[\s\S]*?"typos"[\s\S]*?"improvements"[\s\S]*?\}/);
|
|
34
|
+
if (jsonMatch) {
|
|
35
|
+
try {
|
|
36
|
+
parsed = JSON.parse(jsonMatch[0]);
|
|
37
|
+
} catch (extractError) {
|
|
38
|
+
parsed = {
|
|
39
|
+
issues: [`Batch ${batchIndex + 1}: JSON parsing failed. Response: ${result.substring(0, 200)}...`],
|
|
40
|
+
typos: [], improvements: []
|
|
41
|
+
};
|
|
42
|
+
}
|
|
43
|
+
} else {
|
|
44
|
+
parsed = {
|
|
45
|
+
issues: [`Batch ${batchIndex + 1}: AI response format error. Response: ${result.substring(0, 200)}...`],
|
|
46
|
+
typos: [], improvements: []
|
|
47
|
+
};
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
// Ensure structure
|
|
52
|
+
if (!parsed.issues) parsed.issues = [];
|
|
53
|
+
if (!parsed.typos) parsed.typos = [];
|
|
54
|
+
if (!parsed.improvements) parsed.improvements = [];
|
|
55
|
+
|
|
56
|
+
console.log(chalk.green(`ā
Batch ${batchIndex + 1} completed: ${parsed.issues.length} issues, ${parsed.typos.length} typos, ${parsed.improvements.length} improvements`));
|
|
57
|
+
|
|
58
|
+
return parsed;
|
|
59
|
+
} catch (error) {
|
|
60
|
+
console.log(chalk.red(`ā Batch ${batchIndex + 1} failed: ${error.message}`));
|
|
61
|
+
return { issues: [`Batch ${batchIndex + 1} failed: ${error.message}`], typos: [], improvements: [] };
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
async function review(options = {}) {
|
|
66
|
+
const { default: ora } = await import("ora");
|
|
67
|
+
const { default: chalk } = await import("chalk");
|
|
68
|
+
|
|
69
|
+
const spinner = ora("š¤ AI is analyzing your code changes...").start();
|
|
70
|
+
|
|
71
|
+
try {
|
|
72
|
+
// Merge config with CLI options
|
|
73
|
+
const baseConfig = getConfig();
|
|
74
|
+
const config = {
|
|
75
|
+
...baseConfig,
|
|
76
|
+
...(options.provider && { provider: options.provider }),
|
|
77
|
+
...(options.model && { model: options.model })
|
|
78
|
+
};
|
|
79
|
+
|
|
80
|
+
// Determine base branch: CLI option > config file > auto-detection
|
|
81
|
+
let baseBranch = options.base || config.baseBranch;
|
|
82
|
+
|
|
83
|
+
if (!baseBranch) {
|
|
84
|
+
if (options.verbose) console.log(chalk.gray("š Auto-detecting base branch..."));
|
|
85
|
+
baseBranch = await detectBaseBranch();
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
// Get current git status
|
|
89
|
+
const currentBranch = await getCurrentBranch();
|
|
90
|
+
const baseBranchExists = await checkBranchExists(baseBranch);
|
|
91
|
+
|
|
92
|
+
console.log(chalk.blue(`\nš Analyzing changes on branch: ${chalk.bold(currentBranch)}`));
|
|
93
|
+
if (options.verbose || !baseBranchExists) {
|
|
94
|
+
console.log(chalk.gray(` Comparing against: ${baseBranch} ${baseBranchExists ? 'ā' : 'ā'}`));
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
const diff = await getDiff(baseBranch);
|
|
98
|
+
const changedFiles = await getChangedFiles(baseBranch);
|
|
99
|
+
|
|
100
|
+
console.log(chalk.green(`š Found ${changedFiles.length} changed file${changedFiles.length === 1 ? '' : 's'}: ${chalk.bold(changedFiles.join(', '))}`));
|
|
101
|
+
|
|
102
|
+
if (changedFiles.length === 0 && diff.length === 0) {
|
|
103
|
+
spinner.info(chalk.yellow('No changes found to review'));
|
|
104
|
+
console.log(chalk.yellow('\nš” Possible reasons:'));
|
|
105
|
+
if (currentBranch === baseBranch.replace('origin/', '')) {
|
|
106
|
+
console.log(chalk.gray(' ⢠You are currently on the base branch. Create a feature branch first.'));
|
|
107
|
+
}
|
|
108
|
+
if (!baseBranchExists) {
|
|
109
|
+
console.log(chalk.gray(' ⢠The base branch does not exist. Try "main", "master", or "origin/master".'));
|
|
110
|
+
}
|
|
111
|
+
console.log(chalk.gray(' ⢠No commits made yet on this branch.'));
|
|
112
|
+
console.log(chalk.gray(' ⢠Try: git fetch origin (to update remote branches)'));
|
|
113
|
+
console.log(chalk.gray(' ⢠Try: git status (to see uncommitted changes)'));
|
|
114
|
+
return;
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
// Process files in batches to avoid rate limits
|
|
118
|
+
const BATCH_SIZE = 5;
|
|
119
|
+
const fileBatches = [];
|
|
120
|
+
|
|
121
|
+
// Split files into batches and read their content
|
|
122
|
+
for (let i = 0; i < changedFiles.length; i += BATCH_SIZE) {
|
|
123
|
+
const batchFileNames = changedFiles.slice(i, i + BATCH_SIZE);
|
|
124
|
+
const batchFiles = readFiles(batchFileNames);
|
|
125
|
+
if (batchFiles.length > 0) {
|
|
126
|
+
fileBatches.push(batchFiles);
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
if (fileBatches.length === 0) {
|
|
131
|
+
spinner.info(chalk.yellow('No valid files to review'));
|
|
132
|
+
return;
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
console.log(chalk.blue(`\nš Processing ${fileBatches.length} batch${fileBatches.length === 1 ? '' : 'es'} of files (${BATCH_SIZE} files per batch)...`));
|
|
136
|
+
|
|
137
|
+
// Process each batch
|
|
138
|
+
const allResults = { issues: [], typos: [], improvements: [] };
|
|
139
|
+
|
|
140
|
+
for (let batchIndex = 0; batchIndex < fileBatches.length; batchIndex++) {
|
|
141
|
+
const files = fileBatches[batchIndex];
|
|
142
|
+
|
|
143
|
+
spinner.text = `š¤ AI is analyzing batch ${batchIndex + 1}/${fileBatches.length}...`;
|
|
144
|
+
|
|
145
|
+
const batchResult = await reviewFileBatch(files, diff, config, batchIndex, fileBatches.length, options);
|
|
146
|
+
|
|
147
|
+
// Combine results
|
|
148
|
+
allResults.issues.push(...batchResult.issues);
|
|
149
|
+
allResults.typos.push(...batchResult.typos);
|
|
150
|
+
allResults.improvements.push(...batchResult.improvements);
|
|
151
|
+
|
|
152
|
+
// Wait between batches (except for last batch)
|
|
153
|
+
if (batchIndex < fileBatches.length - 1) {
|
|
154
|
+
console.log(chalk.gray('ā³ Waiting 3 seconds before next batch...'));
|
|
155
|
+
await new Promise(resolve => setTimeout(resolve, 3000));
|
|
156
|
+
}
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
const parsed = allResults;
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
console.log(chalk.green(`š Found ${changedFiles.length} changed file${changedFiles.length === 1 ? '' : 's'}: ${chalk.bold(changedFiles.join(', '))}`));
|
|
163
|
+
|
|
164
|
+
spinner.succeed(chalk.green('Review completed!'));
|
|
165
|
+
|
|
166
|
+
// Display results with better formatting
|
|
167
|
+
console.log(chalk.red.bold("\nšØ Issues"));
|
|
168
|
+
if (parsed.issues.length === 0) {
|
|
169
|
+
console.log(chalk.gray(" No critical issues found"));
|
|
170
|
+
} else {
|
|
171
|
+
parsed.issues.forEach((i, index) =>
|
|
172
|
+
console.log(chalk.red(` ${index + 1}. ${i}`))
|
|
173
|
+
);
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
console.log(chalk.yellow.bold("\nāļø Typos"));
|
|
177
|
+
if (parsed.typos.length === 0) {
|
|
178
|
+
console.log(chalk.gray(" No typos found"));
|
|
179
|
+
} else {
|
|
180
|
+
parsed.typos.forEach((t, index) =>
|
|
181
|
+
console.log(chalk.yellow(` ${index + 1}. ${t}`))
|
|
182
|
+
);
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
console.log(chalk.cyan.bold("\nš” Improvements"));
|
|
186
|
+
if (parsed.improvements.length === 0) {
|
|
187
|
+
console.log(chalk.gray(" No improvement suggestions"));
|
|
188
|
+
} else {
|
|
189
|
+
parsed.improvements.forEach((i, index) =>
|
|
190
|
+
console.log(chalk.cyan(` ${index + 1}. ${i}`))
|
|
191
|
+
);
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
// Summary
|
|
195
|
+
const totalCount = parsed.issues.length + parsed.typos.length + parsed.improvements.length;
|
|
196
|
+
if (totalCount === 0) {
|
|
197
|
+
console.log(chalk.green.bold("\n⨠Great job! Your code looks clean and well-written."));
|
|
198
|
+
} else {
|
|
199
|
+
console.log(chalk.blue(`\nš Review Summary: ${parsed.issues.length} issues, ${parsed.typos.length} typos, ${parsed.improvements.length} improvements`));
|
|
200
|
+
}
|
|
201
|
+
} catch (e) {
|
|
202
|
+
spinner.fail(chalk.red("Review failed"));
|
|
203
|
+
console.error(chalk.red(`ā Error: ${e.message}`));
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
module.exports = review;
|