scai 0.1.49 → 0.1.51
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +9 -10
- package/dist/commands/AskCmd.js +5 -4
- package/dist/commands/FindCmd.js +10 -3
- package/dist/commands/ReviewCmd.js +178 -0
- package/dist/config.js +11 -0
- package/dist/daemon/daemonBatch.js +30 -32
- package/dist/db/functionExtractors/extractFromJava.js +6 -2
- package/dist/db/functionExtractors/extractFromJs.js +93 -83
- package/dist/db/functionExtractors/extractFromXML.js +6 -2
- package/dist/db/functionExtractors/index.js +4 -14
- package/dist/db/sqlTemplates.js +48 -0
- package/dist/fileRules/fileClassifier.js +42 -3
- package/dist/github/api.js +43 -0
- package/dist/github/auth.js +15 -0
- package/dist/github/github.js +71 -0
- package/dist/github/githubAuthCheck.js +17 -0
- package/dist/github/repo.js +57 -0
- package/dist/github/token.js +14 -0
- package/dist/github/types.js +1 -0
- package/dist/index.js +54 -0
- package/dist/pipeline/modules/reviewModule.js +21 -0
- package/dist/utils/fileTree.js +23 -22
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -1,16 +1,15 @@
|
|
|
1
|
-
# ⚙️ scai — Smart Commit AI ✨
|
|
1
|
+
# ⚙️ scai — Smart Commit & Review AI ✨
|
|
2
2
|
|
|
3
|
+
> AI-powered CLI tool for commit messages **and** pull request reviews — using local models.
|
|
3
4
|
|
|
4
|
-
|
|
5
|
+
**scai** is your AI pair‑programmer in the terminal. Focus on coding while scai:
|
|
5
6
|
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
-
|
|
10
|
-
-
|
|
11
|
-
-
|
|
12
|
-
- 🔍 Search and ask questions across your codebase (ALPHA)
|
|
13
|
-
- 🔐 100% local — no API keys, no cloud, no telemetry
|
|
7
|
+
- 🤖 **Reviews open pull requests** and provides AI‑driven feedback (BETA)
|
|
8
|
+
- 💬 **Suggests intelligent Git commit messages** based on your staged diff
|
|
9
|
+
- 📝 Summarizes files in plain English
|
|
10
|
+
- 📜 Auto‑updates your changelog
|
|
11
|
+
- 🔍 (ALPHA) Search & ask questions across your codebase
|
|
12
|
+
- 🔐 100% local — no API keys, no cloud, no telemetry
|
|
14
13
|
|
|
15
14
|
---
|
|
16
15
|
|
package/dist/commands/AskCmd.js
CHANGED
|
@@ -5,12 +5,12 @@ import { searchFiles, queryFiles, getFunctionsForFiles } from '../db/fileIndex.j
|
|
|
5
5
|
import { sanitizeQueryForFts } from '../utils/sanitizeQuery.js';
|
|
6
6
|
import { generate } from '../lib/generate.js';
|
|
7
7
|
import { buildContextualPrompt } from '../utils/buildContextualPrompt.js';
|
|
8
|
-
import {
|
|
8
|
+
import { generateFocusedFileTree } from '../utils/fileTree.js';
|
|
9
9
|
import { log } from '../utils/log.js';
|
|
10
10
|
import { PROMPT_LOG_PATH, SCAI_HOME, INDEX_DIR, RELATED_FILES_LIMIT, MAX_SUMMARY_LINES } from '../constants.js';
|
|
11
11
|
export async function runAskCommand(query) {
|
|
12
12
|
if (!query) {
|
|
13
|
-
query = await promptOnce('
|
|
13
|
+
query = await promptOnce('💬 Ask your question:\n');
|
|
14
14
|
}
|
|
15
15
|
query = query.trim();
|
|
16
16
|
if (!query) {
|
|
@@ -103,7 +103,7 @@ export async function runAskCommand(query) {
|
|
|
103
103
|
// 🟩 STEP 6: Generate file tree
|
|
104
104
|
let fileTree = '';
|
|
105
105
|
try {
|
|
106
|
-
fileTree =
|
|
106
|
+
fileTree = generateFocusedFileTree(INDEX_DIR, filepath, 2);
|
|
107
107
|
}
|
|
108
108
|
catch (e) {
|
|
109
109
|
console.warn('⚠️ Could not generate file tree:', e);
|
|
@@ -144,11 +144,12 @@ export async function runAskCommand(query) {
|
|
|
144
144
|
// 🟩 Helper: Prompt once
|
|
145
145
|
function promptOnce(promptText) {
|
|
146
146
|
return new Promise(resolve => {
|
|
147
|
+
console.log(promptText); // Instead of putting it *in* rl.question
|
|
147
148
|
const rl = readline.createInterface({
|
|
148
149
|
input: process.stdin,
|
|
149
150
|
output: process.stdout,
|
|
150
151
|
});
|
|
151
|
-
rl.question(
|
|
152
|
+
rl.question('> ', answer => {
|
|
152
153
|
rl.close();
|
|
153
154
|
resolve(answer.trim());
|
|
154
155
|
});
|
package/dist/commands/FindCmd.js
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import { queryFiles } from '../db/fileIndex.js';
|
|
2
2
|
import { sanitizeQueryForFts } from '../utils/sanitizeQuery.js';
|
|
3
3
|
import path from 'path';
|
|
4
|
+
import os from 'os';
|
|
4
5
|
export async function runFindCommand(query) {
|
|
5
6
|
if (!query) {
|
|
6
7
|
console.error('❌ Please provide a search query.\n👉 Usage: scai find "keyword"');
|
|
@@ -13,9 +14,15 @@ export async function runFindCommand(query) {
|
|
|
13
14
|
console.log('⚠️ No matching files found.');
|
|
14
15
|
return;
|
|
15
16
|
}
|
|
16
|
-
console.log(`✅ Found ${results.length} result(s)
|
|
17
|
-
|
|
17
|
+
console.log(`✅ Found ${results.length} result(s).\n`);
|
|
18
|
+
const homeDir = os.homedir();
|
|
18
19
|
results.forEach((result, index) => {
|
|
19
|
-
|
|
20
|
+
let absPath = path.resolve(result.path); // ensure absolute path
|
|
21
|
+
if (absPath.startsWith(homeDir)) {
|
|
22
|
+
absPath = absPath.replace(homeDir, '~');
|
|
23
|
+
}
|
|
24
|
+
// Normalize to forward slashes (especially for Windows)
|
|
25
|
+
absPath = absPath.replace(/\\/g, '/');
|
|
26
|
+
console.log(`📄 [${index + 1}] ${absPath}`);
|
|
20
27
|
});
|
|
21
28
|
}
|
|
@@ -0,0 +1,178 @@
|
|
|
1
|
+
import readline from 'readline';
|
|
2
|
+
import { reviewModule } from '../pipeline/modules/reviewModule.js';
|
|
3
|
+
import { fetchOpenPullRequests, fetchPullRequestDiff, getGitHubUsername, submitReview } from '../github/github.js';
|
|
4
|
+
import { getRepoDetails } from '../github/repo.js';
|
|
5
|
+
import { ensureGitHubAuth } from '../github/auth.js';
|
|
6
|
+
// Function to fetch the PRs with requested reviews for a specific branch (default to 'main')
|
|
7
|
+
export async function getPullRequestsForReview(token, owner, repo, username, branch = 'main', filterForUser = true) {
|
|
8
|
+
const prs = await fetchOpenPullRequests(token, owner, repo);
|
|
9
|
+
const filtered = [];
|
|
10
|
+
const failedPRs = [];
|
|
11
|
+
for (const pr of prs) {
|
|
12
|
+
const isDraft = pr.draft;
|
|
13
|
+
const isMerged = pr.merged_at != null;
|
|
14
|
+
const shouldInclude = !isDraft &&
|
|
15
|
+
!isMerged &&
|
|
16
|
+
(!filterForUser || pr.requested_reviewers?.some(r => r.login === username));
|
|
17
|
+
if (shouldInclude) {
|
|
18
|
+
const diffUrl = `https://api.github.com/repos/${owner}/${repo}/pulls/${pr.number}.diff`;
|
|
19
|
+
try {
|
|
20
|
+
const diffRes = await fetch(diffUrl, {
|
|
21
|
+
headers: {
|
|
22
|
+
Authorization: `token ${token}`,
|
|
23
|
+
Accept: 'application/vnd.github.v3.diff',
|
|
24
|
+
},
|
|
25
|
+
});
|
|
26
|
+
if (!diffRes.ok) {
|
|
27
|
+
throw new Error(`${diffRes.status} ${diffRes.statusText}`);
|
|
28
|
+
}
|
|
29
|
+
const diff = await diffRes.text();
|
|
30
|
+
filtered.push({ pr, diff });
|
|
31
|
+
}
|
|
32
|
+
catch (err) {
|
|
33
|
+
console.warn(`⚠️ Could not fetch diff for PR #${pr.number}: ${err.message}`);
|
|
34
|
+
failedPRs.push(pr);
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
if (failedPRs.length > 0) {
|
|
39
|
+
console.warn(`\n⚠️ Skipped ${failedPRs.length} PR(s) due to diff fetch failures:`);
|
|
40
|
+
for (const pr of failedPRs) {
|
|
41
|
+
console.warn(` - #${pr.number}: ${pr.title}`);
|
|
42
|
+
}
|
|
43
|
+
console.warn('These PRs will not be included in the review summary.\n');
|
|
44
|
+
}
|
|
45
|
+
return filtered;
|
|
46
|
+
}
|
|
47
|
+
// Ask user to pick a PR to review
|
|
48
|
+
function askUserToPickPR(prs) {
|
|
49
|
+
return new Promise((resolve) => {
|
|
50
|
+
if (prs.length === 0) {
|
|
51
|
+
console.log("⚠️ No pull requests with review requested.");
|
|
52
|
+
return resolve(null);
|
|
53
|
+
}
|
|
54
|
+
console.log("\n📦 Open Pull Requests with review requested:");
|
|
55
|
+
prs.forEach((pr, i) => {
|
|
56
|
+
console.log(`${i + 1}) #${pr.number} - ${pr.title}`);
|
|
57
|
+
});
|
|
58
|
+
const rl = readline.createInterface({
|
|
59
|
+
input: process.stdin,
|
|
60
|
+
output: process.stdout,
|
|
61
|
+
});
|
|
62
|
+
rl.question(`\n👉 Choose a PR to review [1-${prs.length}]: `, (answer) => {
|
|
63
|
+
rl.close();
|
|
64
|
+
const index = parseInt(answer, 10);
|
|
65
|
+
if (!isNaN(index) && index >= 1 && index <= prs.length) {
|
|
66
|
+
resolve(index - 1); // Return array index, not PR number
|
|
67
|
+
}
|
|
68
|
+
else {
|
|
69
|
+
resolve(null);
|
|
70
|
+
}
|
|
71
|
+
});
|
|
72
|
+
});
|
|
73
|
+
}
|
|
74
|
+
// Ask user to approve or reject the review suggestion
|
|
75
|
+
function askReviewApproval(suggestion) {
|
|
76
|
+
return new Promise((resolve) => {
|
|
77
|
+
console.log('\n💡 AI-suggested review:\n');
|
|
78
|
+
console.log(suggestion);
|
|
79
|
+
console.log('\n---');
|
|
80
|
+
console.log('1) ✅ Approve');
|
|
81
|
+
console.log('2) ❌ Reject');
|
|
82
|
+
console.log('3) ✍️ Edit');
|
|
83
|
+
console.log('4) Write your own review');
|
|
84
|
+
console.log('5) 🚪 Cancel');
|
|
85
|
+
const rl = readline.createInterface({
|
|
86
|
+
input: process.stdin,
|
|
87
|
+
output: process.stdout,
|
|
88
|
+
});
|
|
89
|
+
rl.question(`\n👉 Choose an option [1-5]: `, (answer) => {
|
|
90
|
+
rl.close();
|
|
91
|
+
if (answer === '1') {
|
|
92
|
+
resolve('approve');
|
|
93
|
+
}
|
|
94
|
+
else if (answer === '2') {
|
|
95
|
+
resolve('reject');
|
|
96
|
+
}
|
|
97
|
+
else if (answer === '3') {
|
|
98
|
+
resolve('edit');
|
|
99
|
+
}
|
|
100
|
+
else if (answer === '4') {
|
|
101
|
+
resolve('custom');
|
|
102
|
+
}
|
|
103
|
+
else if (answer === '5') {
|
|
104
|
+
resolve('cancel');
|
|
105
|
+
}
|
|
106
|
+
else {
|
|
107
|
+
console.log('⚠️ Invalid selection. Defaulting to "approve".');
|
|
108
|
+
resolve('approve');
|
|
109
|
+
}
|
|
110
|
+
});
|
|
111
|
+
});
|
|
112
|
+
}
|
|
113
|
+
// Prompt for custom review input
|
|
114
|
+
function promptCustomReview() {
|
|
115
|
+
return new Promise((resolve) => {
|
|
116
|
+
const rl = readline.createInterface({
|
|
117
|
+
input: process.stdin,
|
|
118
|
+
output: process.stdout,
|
|
119
|
+
});
|
|
120
|
+
rl.question('\n📝 Enter your custom review:\n> ', (input) => {
|
|
121
|
+
rl.close();
|
|
122
|
+
resolve(input.trim());
|
|
123
|
+
});
|
|
124
|
+
});
|
|
125
|
+
}
|
|
126
|
+
export async function reviewPullRequestCmd(branch = 'main', showAll = false) {
|
|
127
|
+
try {
|
|
128
|
+
console.log("🔍 Fetching pull requests and diffs...");
|
|
129
|
+
const token = await ensureGitHubAuth();
|
|
130
|
+
const username = await getGitHubUsername(token);
|
|
131
|
+
const { owner, repo } = getRepoDetails();
|
|
132
|
+
console.log(`👤 Authenticated user: ${username}`);
|
|
133
|
+
console.log(`📦 GitHub repo: ${owner}/${repo}`);
|
|
134
|
+
console.log(`🔍 Filtering ${showAll ? "all" : "user-specific"} PRs for branch: ${branch}`);
|
|
135
|
+
const prsWithReviewRequested = await getPullRequestsForReview(token, owner, repo, username, branch, !showAll);
|
|
136
|
+
console.log(`🔍 Found ${prsWithReviewRequested.length} PR(s) requiring review.`);
|
|
137
|
+
if (prsWithReviewRequested.length === 0) {
|
|
138
|
+
console.log("⚠️ No PRs found with review requested.");
|
|
139
|
+
return;
|
|
140
|
+
}
|
|
141
|
+
const selectedIndex = await askUserToPickPR(prsWithReviewRequested.map(p => p.pr));
|
|
142
|
+
if (selectedIndex === null)
|
|
143
|
+
return;
|
|
144
|
+
const { pr, diff } = prsWithReviewRequested[selectedIndex];
|
|
145
|
+
let prDiff = diff;
|
|
146
|
+
if (!prDiff) {
|
|
147
|
+
console.log(`🔍 Fetching diff for PR #${pr.number}...`);
|
|
148
|
+
prDiff = await fetchPullRequestDiff(pr, token);
|
|
149
|
+
}
|
|
150
|
+
const result = await reviewModule.run({ content: prDiff });
|
|
151
|
+
const reviewSuggestion = result.content || 'No review suggestion generated.';
|
|
152
|
+
const reviewChoice = await askReviewApproval(reviewSuggestion);
|
|
153
|
+
let reviewEvent;
|
|
154
|
+
if (reviewChoice === 'approve') {
|
|
155
|
+
reviewEvent = 'APPROVE';
|
|
156
|
+
console.log(`✅ Review for PR #${pr.number} approved.`);
|
|
157
|
+
await submitReview(pr.number, reviewSuggestion, reviewEvent);
|
|
158
|
+
}
|
|
159
|
+
else if (reviewChoice === 'reject') {
|
|
160
|
+
reviewEvent = 'REQUEST_CHANGES';
|
|
161
|
+
console.log(`❌ Review for PR #${pr.number} rejected.`);
|
|
162
|
+
await submitReview(pr.number, reviewSuggestion, reviewEvent);
|
|
163
|
+
}
|
|
164
|
+
else if (reviewChoice === 'cancel') {
|
|
165
|
+
console.log(`🚪 Review process for PR #${pr.number} cancelled.`);
|
|
166
|
+
return; // Exit the function and cancel the review process
|
|
167
|
+
}
|
|
168
|
+
else {
|
|
169
|
+
reviewEvent = 'COMMENT';
|
|
170
|
+
const customReview = await promptCustomReview();
|
|
171
|
+
console.log(`💬 Custom review: ${customReview}`);
|
|
172
|
+
await submitReview(pr.number, customReview, reviewEvent);
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
catch (err) {
|
|
176
|
+
console.error("❌ Error reviewing PR:", err.message);
|
|
177
|
+
}
|
|
178
|
+
}
|
package/dist/config.js
CHANGED
|
@@ -5,6 +5,7 @@ const defaultConfig = {
|
|
|
5
5
|
model: 'llama3',
|
|
6
6
|
language: 'ts',
|
|
7
7
|
indexDir: INDEX_DIR, // Default index directory from constants
|
|
8
|
+
githubToken: '', // Add githubToken to default config
|
|
8
9
|
};
|
|
9
10
|
// Function to ensure the configuration directory exists
|
|
10
11
|
function ensureConfigDir() {
|
|
@@ -57,6 +58,15 @@ export const Config = {
|
|
|
57
58
|
writeConfig({ indexDir });
|
|
58
59
|
console.log(`📁 Index directory set to: ${indexDir}`);
|
|
59
60
|
},
|
|
61
|
+
// Get the GitHub token from the config
|
|
62
|
+
getGitHubToken() {
|
|
63
|
+
return readConfig().githubToken || null;
|
|
64
|
+
},
|
|
65
|
+
// Set the GitHub token in the config
|
|
66
|
+
setGitHubToken(token) {
|
|
67
|
+
writeConfig({ githubToken: token });
|
|
68
|
+
console.log("✅ GitHub token updated");
|
|
69
|
+
},
|
|
60
70
|
// Show the current configuration
|
|
61
71
|
show() {
|
|
62
72
|
const cfg = readConfig();
|
|
@@ -64,5 +74,6 @@ export const Config = {
|
|
|
64
74
|
console.log(` Model : ${cfg.model}`);
|
|
65
75
|
console.log(` Language : ${cfg.language}`);
|
|
66
76
|
console.log(` Index dir : ${cfg.indexDir}`);
|
|
77
|
+
console.log(` GitHub Token: ${cfg.githubToken ? '*****' : 'Not Set'}`);
|
|
67
78
|
}
|
|
68
79
|
};
|
|
@@ -8,7 +8,12 @@ import { log } from '../utils/log.js';
|
|
|
8
8
|
import lockfile from 'proper-lockfile';
|
|
9
9
|
import { summaryModule } from '../pipeline/modules/summaryModule.js';
|
|
10
10
|
import { classifyFile } from '../fileRules/classifyFile.js';
|
|
11
|
+
import { markFileAsSkippedByPath, selectUnprocessedFiles, updateFileWithSummaryAndEmbedding, } from '../db/sqlTemplates.js';
|
|
11
12
|
const MAX_FILES_PER_BATCH = 5;
|
|
13
|
+
/**
|
|
14
|
+
* Acquires a lock on the database to ensure that only one daemon batch
|
|
15
|
+
* can modify it at a time.
|
|
16
|
+
*/
|
|
12
17
|
async function lockDb() {
|
|
13
18
|
try {
|
|
14
19
|
return await lockfile.lock(DB_PATH);
|
|
@@ -18,15 +23,18 @@ async function lockDb() {
|
|
|
18
23
|
throw err;
|
|
19
24
|
}
|
|
20
25
|
}
|
|
26
|
+
/**
|
|
27
|
+
* Runs a daemon batch to process up to MAX_FILES_PER_BATCH unprocessed files.
|
|
28
|
+
* This includes:
|
|
29
|
+
* - Verifying file existence and validity
|
|
30
|
+
* - Generating summaries and embeddings if needed
|
|
31
|
+
* - Extracting functions from source files
|
|
32
|
+
* - Marking skipped files as necessary
|
|
33
|
+
*/
|
|
21
34
|
export async function runDaemonBatch() {
|
|
22
35
|
log('🟡 Starting daemon batch...');
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
FROM files
|
|
26
|
-
WHERE processing_status = 'unprocessed' OR summary IS NULL OR summary = ''
|
|
27
|
-
ORDER BY last_modified DESC
|
|
28
|
-
LIMIT ?
|
|
29
|
-
`).all(MAX_FILES_PER_BATCH);
|
|
36
|
+
// Selects up to MAX_FILES_PER_BATCH files that haven't been processed yet
|
|
37
|
+
const rows = db.prepare(selectUnprocessedFiles).all(MAX_FILES_PER_BATCH);
|
|
30
38
|
if (rows.length === 0) {
|
|
31
39
|
log('✅ No files left to process.');
|
|
32
40
|
return false;
|
|
@@ -34,72 +42,62 @@ export async function runDaemonBatch() {
|
|
|
34
42
|
const release = await lockDb();
|
|
35
43
|
for (const row of rows) {
|
|
36
44
|
log(`📂 Processing file: ${row.path}`);
|
|
45
|
+
// Skip if file is missing from the file system
|
|
37
46
|
if (!fsSync.existsSync(row.path)) {
|
|
38
47
|
log(`⚠️ Skipped missing file: ${row.path}`);
|
|
39
|
-
db.prepare(
|
|
48
|
+
db.prepare(markFileAsSkippedByPath).run({ path: row.path });
|
|
40
49
|
continue;
|
|
41
50
|
}
|
|
51
|
+
// Skip if file is classified as something we don't process
|
|
42
52
|
const classification = classifyFile(row.path);
|
|
43
53
|
if (classification !== 'valid') {
|
|
44
54
|
log(`⏭️ Skipping (${classification}): ${row.path}`);
|
|
45
|
-
db.prepare(
|
|
55
|
+
db.prepare(markFileAsSkippedByPath).run({ path: row.path });
|
|
46
56
|
continue;
|
|
47
57
|
}
|
|
48
58
|
try {
|
|
49
59
|
const content = await fs.readFile(row.path, 'utf-8');
|
|
50
|
-
//
|
|
60
|
+
// Determine whether the file needs to be re-summarized
|
|
51
61
|
const needsResummary = !row.summary ||
|
|
52
62
|
!row.indexed_at ||
|
|
53
63
|
(row.last_modified && new Date(row.last_modified) > new Date(row.indexed_at));
|
|
54
64
|
if (needsResummary) {
|
|
55
65
|
log(`📝 Generating summary for ${row.path}...`);
|
|
66
|
+
// Generate a summary using the summary pipeline
|
|
56
67
|
const summaryResult = await summaryModule.run({ content, filepath: row.path });
|
|
57
68
|
const summary = summaryResult?.summary?.trim() || null;
|
|
58
69
|
let embedding = null;
|
|
70
|
+
// Generate an embedding from the summary (if present)
|
|
59
71
|
if (summary) {
|
|
60
72
|
const vector = await generateEmbedding(summary);
|
|
61
73
|
if (vector) {
|
|
62
74
|
embedding = JSON.stringify(vector);
|
|
63
75
|
}
|
|
64
76
|
}
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
`).run({ summary, embedding, path: row.path });
|
|
77
|
+
// Update the file record with the new summary and embedding
|
|
78
|
+
db.prepare(updateFileWithSummaryAndEmbedding).run({
|
|
79
|
+
summary,
|
|
80
|
+
embedding,
|
|
81
|
+
path: row.path,
|
|
82
|
+
});
|
|
72
83
|
log(`✅ Updated summary & embedding for ${row.path}`);
|
|
73
84
|
}
|
|
74
85
|
else {
|
|
75
86
|
log(`⚡ Skipped summary (up-to-date) for ${row.path}`);
|
|
76
87
|
}
|
|
77
|
-
//
|
|
88
|
+
// Extract top-level functions from the file and update the DB
|
|
78
89
|
const extracted = await indexFunctionsForFile(row.path, row.id);
|
|
79
90
|
if (extracted) {
|
|
80
|
-
db.prepare(`
|
|
81
|
-
UPDATE files
|
|
82
|
-
SET processing_status = 'extracted',
|
|
83
|
-
functions_extracted_at = datetime('now')
|
|
84
|
-
WHERE id = @id
|
|
85
|
-
`).run({ id: row.id });
|
|
86
91
|
log(`✅ Function extraction complete for ${row.path}\n`);
|
|
87
92
|
}
|
|
88
93
|
else {
|
|
89
|
-
// If no functions were found, set processing status to 'skipped' or 'failed'
|
|
90
|
-
db.prepare(`
|
|
91
|
-
UPDATE files
|
|
92
|
-
SET processing_status = 'failed',
|
|
93
|
-
functions_extracted_at = datetime('now')
|
|
94
|
-
WHERE id = @id
|
|
95
|
-
`).run({ id: row.id });
|
|
96
94
|
log(`ℹ️ No functions extracted for ${row.path}\n`);
|
|
97
95
|
}
|
|
98
96
|
}
|
|
99
97
|
catch (err) {
|
|
100
98
|
log(`❌ Failed: ${row.path}: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
101
|
-
db.prepare(`UPDATE files SET processing_status = 'failed' WHERE path = @path`).run({ path: row.path });
|
|
102
99
|
}
|
|
100
|
+
// Add a small delay to throttle processing
|
|
103
101
|
await new Promise(resolve => setTimeout(resolve, 200));
|
|
104
102
|
}
|
|
105
103
|
await release();
|
|
@@ -1,4 +1,8 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
1
|
+
import { db } from '../client.js';
|
|
2
|
+
import { markFileAsSkippedTemplate } from '../sqlTemplates.js';
|
|
3
|
+
export async function extractFromJava(filePath, _content, fileId) {
|
|
3
4
|
console.warn(`⛔️ Java extraction not implemented: ${filePath}`);
|
|
5
|
+
// Mark the file as skipped with the relevant status update
|
|
6
|
+
db.prepare(markFileAsSkippedTemplate).run({ id: fileId });
|
|
7
|
+
return false;
|
|
4
8
|
}
|
|
@@ -4,6 +4,8 @@ import { generateEmbedding } from '../../lib/generateEmbedding.js';
|
|
|
4
4
|
import { db } from '../client.js';
|
|
5
5
|
import path from 'path';
|
|
6
6
|
import { log } from '../../utils/log.js';
|
|
7
|
+
import fs from 'fs';
|
|
8
|
+
import { markFileAsSkippedTemplate, markFileAsExtractedTemplate, markFileAsFailedTemplate } from '../sqlTemplates.js';
|
|
7
9
|
function getFunctionName(node, parent, fileName) {
|
|
8
10
|
if (node.id?.name)
|
|
9
11
|
return node.id.name;
|
|
@@ -18,96 +20,104 @@ function getFunctionName(node, parent, fileName) {
|
|
|
18
20
|
return `${fileName}:<anon>`;
|
|
19
21
|
}
|
|
20
22
|
export async function extractFromJS(filePath, content, fileId) {
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
const functions = [];
|
|
27
|
-
walkAncestor(ast, {
|
|
28
|
-
FunctionDeclaration(node, ancestors) {
|
|
29
|
-
const parent = ancestors[ancestors.length - 2];
|
|
30
|
-
const name = getFunctionName(node, parent, path.basename(filePath));
|
|
31
|
-
functions.push({
|
|
32
|
-
name,
|
|
33
|
-
start_line: node.loc?.start.line ?? -1,
|
|
34
|
-
end_line: node.loc?.end.line ?? -1,
|
|
35
|
-
content: content.slice(node.start, node.end),
|
|
36
|
-
});
|
|
37
|
-
},
|
|
38
|
-
FunctionExpression(node, ancestors) {
|
|
39
|
-
const parent = ancestors[ancestors.length - 2];
|
|
40
|
-
const name = getFunctionName(node, parent, path.basename(filePath));
|
|
41
|
-
functions.push({
|
|
42
|
-
name,
|
|
43
|
-
start_line: node.loc?.start.line ?? -1,
|
|
44
|
-
end_line: node.loc?.end.line ?? -1,
|
|
45
|
-
content: content.slice(node.start, node.end),
|
|
46
|
-
});
|
|
47
|
-
},
|
|
48
|
-
ArrowFunctionExpression(node, ancestors) {
|
|
49
|
-
const parent = ancestors[ancestors.length - 2];
|
|
50
|
-
const name = getFunctionName(node, parent, path.basename(filePath));
|
|
51
|
-
functions.push({
|
|
52
|
-
name,
|
|
53
|
-
start_line: node.loc?.start.line ?? -1,
|
|
54
|
-
end_line: node.loc?.end.line ?? -1,
|
|
55
|
-
content: content.slice(node.start, node.end),
|
|
56
|
-
});
|
|
57
|
-
},
|
|
58
|
-
});
|
|
59
|
-
if (functions.length === 0) {
|
|
60
|
-
log(`⚠️ No functions found in: ${filePath}`);
|
|
61
|
-
return false;
|
|
62
|
-
}
|
|
63
|
-
log(`🔍 Found ${functions.length} functions in ${filePath}`);
|
|
64
|
-
for (const fn of functions) {
|
|
65
|
-
const embedding = await generateEmbedding(fn.content);
|
|
66
|
-
const result = db.prepare(`
|
|
67
|
-
INSERT INTO functions (
|
|
68
|
-
file_id, name, start_line, end_line, content, embedding, lang
|
|
69
|
-
) VALUES (
|
|
70
|
-
@file_id, @name, @start_line, @end_line, @content, @embedding, @lang
|
|
71
|
-
)
|
|
72
|
-
`).run({
|
|
73
|
-
file_id: fileId,
|
|
74
|
-
name: fn.name,
|
|
75
|
-
start_line: fn.start_line,
|
|
76
|
-
end_line: fn.end_line,
|
|
77
|
-
content: fn.content,
|
|
78
|
-
embedding: JSON.stringify(embedding),
|
|
79
|
-
lang: 'js'
|
|
80
|
-
});
|
|
81
|
-
const callerId = result.lastInsertRowid;
|
|
82
|
-
const fnAst = parse(fn.content, {
|
|
23
|
+
try {
|
|
24
|
+
const code = fs.readFileSync(filePath, 'utf-8');
|
|
25
|
+
console.log(`[Debug] Attempting to parse: ${filePath}`);
|
|
26
|
+
console.log(`[Debug] First 3 lines:\n${code.split('\n').slice(0, 3).join('\n')}`);
|
|
27
|
+
const ast = parse(content, {
|
|
83
28
|
ecmaVersion: 'latest',
|
|
84
29
|
sourceType: 'module',
|
|
85
30
|
locations: true,
|
|
86
31
|
});
|
|
87
|
-
const
|
|
88
|
-
walkAncestor(
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
32
|
+
const functions = [];
|
|
33
|
+
walkAncestor(ast, {
|
|
34
|
+
FunctionDeclaration(node, ancestors) {
|
|
35
|
+
const parent = ancestors[ancestors.length - 2];
|
|
36
|
+
const name = getFunctionName(node, parent, path.basename(filePath));
|
|
37
|
+
functions.push({
|
|
38
|
+
name,
|
|
39
|
+
start_line: node.loc?.start.line ?? -1,
|
|
40
|
+
end_line: node.loc?.end.line ?? -1,
|
|
41
|
+
content: content.slice(node.start, node.end),
|
|
42
|
+
});
|
|
43
|
+
},
|
|
44
|
+
FunctionExpression(node, ancestors) {
|
|
45
|
+
const parent = ancestors[ancestors.length - 2];
|
|
46
|
+
const name = getFunctionName(node, parent, path.basename(filePath));
|
|
47
|
+
functions.push({
|
|
48
|
+
name,
|
|
49
|
+
start_line: node.loc?.start.line ?? -1,
|
|
50
|
+
end_line: node.loc?.end.line ?? -1,
|
|
51
|
+
content: content.slice(node.start, node.end),
|
|
52
|
+
});
|
|
53
|
+
},
|
|
54
|
+
ArrowFunctionExpression(node, ancestors) {
|
|
55
|
+
const parent = ancestors[ancestors.length - 2];
|
|
56
|
+
const name = getFunctionName(node, parent, path.basename(filePath));
|
|
57
|
+
functions.push({
|
|
58
|
+
name,
|
|
59
|
+
start_line: node.loc?.start.line ?? -1,
|
|
60
|
+
end_line: node.loc?.end.line ?? -1,
|
|
61
|
+
content: content.slice(node.start, node.end),
|
|
62
|
+
});
|
|
63
|
+
},
|
|
94
64
|
});
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
65
|
+
if (functions.length === 0) {
|
|
66
|
+
log(`⚠️ No functions found in: ${filePath}`);
|
|
67
|
+
db.prepare(markFileAsSkippedTemplate).run({ id: fileId });
|
|
68
|
+
return false;
|
|
69
|
+
}
|
|
70
|
+
log(`🔍 Found ${functions.length} functions in ${filePath}`);
|
|
71
|
+
for (const fn of functions) {
|
|
72
|
+
const embedding = await generateEmbedding(fn.content);
|
|
73
|
+
const result = db.prepare(`
|
|
74
|
+
INSERT INTO functions (
|
|
75
|
+
file_id, name, start_line, end_line, content, embedding, lang
|
|
76
|
+
) VALUES (
|
|
77
|
+
@file_id, @name, @start_line, @end_line, @content, @embedding, @lang
|
|
78
|
+
)
|
|
99
79
|
`).run({
|
|
100
|
-
|
|
101
|
-
|
|
80
|
+
file_id: fileId,
|
|
81
|
+
name: fn.name,
|
|
82
|
+
start_line: fn.start_line,
|
|
83
|
+
end_line: fn.end_line,
|
|
84
|
+
content: fn.content,
|
|
85
|
+
embedding: JSON.stringify(embedding),
|
|
86
|
+
lang: 'js'
|
|
87
|
+
});
|
|
88
|
+
const callerId = result.lastInsertRowid;
|
|
89
|
+
const fnAst = parse(fn.content, {
|
|
90
|
+
ecmaVersion: 'latest',
|
|
91
|
+
sourceType: 'module',
|
|
92
|
+
locations: true,
|
|
93
|
+
});
|
|
94
|
+
const calls = [];
|
|
95
|
+
walkAncestor(fnAst, {
|
|
96
|
+
CallExpression(node) {
|
|
97
|
+
if (node.callee?.type === 'Identifier' && node.callee.name) {
|
|
98
|
+
calls.push({ calleeName: node.callee.name });
|
|
99
|
+
}
|
|
100
|
+
}
|
|
102
101
|
});
|
|
102
|
+
for (const call of calls) {
|
|
103
|
+
db.prepare(`
|
|
104
|
+
INSERT INTO function_calls (caller_id, callee_name)
|
|
105
|
+
VALUES (@caller_id, @callee_name)
|
|
106
|
+
`).run({
|
|
107
|
+
caller_id: callerId,
|
|
108
|
+
callee_name: call.calleeName
|
|
109
|
+
});
|
|
110
|
+
}
|
|
111
|
+
log(`📌 Indexed function: ${fn.name} with ${calls.length} calls`);
|
|
103
112
|
}
|
|
104
|
-
|
|
113
|
+
db.prepare(markFileAsExtractedTemplate).run({ id: fileId });
|
|
114
|
+
log(`✅ Marked functions as extracted for ${filePath}`);
|
|
115
|
+
return true;
|
|
116
|
+
}
|
|
117
|
+
catch (err) {
|
|
118
|
+
log(`❌ Failed to extract from: ${filePath}`);
|
|
119
|
+
log(` ↳ ${String(err.message)}`);
|
|
120
|
+
db.prepare(markFileAsFailedTemplate).run({ id: fileId });
|
|
121
|
+
return false;
|
|
105
122
|
}
|
|
106
|
-
db.prepare(`
|
|
107
|
-
UPDATE files
|
|
108
|
-
SET processing_status = 'extracted'
|
|
109
|
-
WHERE id = @fileId
|
|
110
|
-
`).run({ fileId });
|
|
111
|
-
log(`✅ Marked functions as extracted for ${filePath}`);
|
|
112
|
-
return true;
|
|
113
123
|
}
|
|
@@ -1,4 +1,8 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
1
|
+
import { db } from '../client.js';
|
|
2
|
+
import { markFileAsSkippedTemplate } from '../sqlTemplates.js';
|
|
3
|
+
export async function extractFromXML(filePath, _content, fileId) {
|
|
3
4
|
console.warn(`⛔️ XML extraction not implemented: ${filePath}`);
|
|
5
|
+
// Mark the file as skipped with the relevant status update
|
|
6
|
+
db.prepare(markFileAsSkippedTemplate).run({ id: fileId });
|
|
7
|
+
return false;
|
|
4
8
|
}
|
|
@@ -4,6 +4,7 @@ import { extractFromJava } from './extractFromJava.js';
|
|
|
4
4
|
import { extractFromJS } from './extractFromJs.js';
|
|
5
5
|
import { extractFromXML } from './extractFromXML.js';
|
|
6
6
|
import { db } from '../client.js';
|
|
7
|
+
import { markFileAsFailedTemplate, markFileAsSkippedByPath } from '../sqlTemplates.js';
|
|
7
8
|
/**
|
|
8
9
|
* Detects file type and delegates to the appropriate extractor.
|
|
9
10
|
*/
|
|
@@ -17,32 +18,21 @@ export async function extractFunctionsFromFile(filePath, content, fileId) {
|
|
|
17
18
|
if (type === 'java') {
|
|
18
19
|
log(`❌ Nothing extracted for ${filePath} due to missing implementation`);
|
|
19
20
|
await extractFromJava(filePath, content, fileId);
|
|
20
|
-
// move into extract file
|
|
21
|
-
db.prepare(`
|
|
22
|
-
UPDATE files SET processing_status = 'skipped' WHERE id = @id
|
|
23
|
-
`).run({ id: fileId });
|
|
24
21
|
return false;
|
|
25
22
|
}
|
|
26
23
|
if (type === 'xml') {
|
|
27
24
|
log(`❌ Nothing extracted for ${filePath} due to missing implementation`);
|
|
28
25
|
await extractFromXML(filePath, content, fileId);
|
|
29
|
-
// move into extract file
|
|
30
|
-
db.prepare(`
|
|
31
|
-
UPDATE files SET processing_status = 'skipped' WHERE id = @id
|
|
32
|
-
`).run({ id: fileId });
|
|
33
26
|
return false;
|
|
34
27
|
}
|
|
35
28
|
log(`⚠️ Unsupported file type: ${type} for function extraction. Skipping ${filePath}`);
|
|
36
|
-
db.prepare(
|
|
37
|
-
UPDATE files SET processing_status = 'skipped' WHERE id = @id
|
|
38
|
-
`).run({ id: fileId });
|
|
29
|
+
db.prepare(markFileAsSkippedByPath).run({ path: filePath });
|
|
39
30
|
return false;
|
|
40
31
|
}
|
|
41
32
|
catch (error) {
|
|
42
33
|
log(`❌ Failed to extract functions from ${filePath}: ${error instanceof Error ? error.message : error}`);
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
`).run({ id: fileId });
|
|
34
|
+
// Use the sqlTemplate to mark the file as 'failed'
|
|
35
|
+
db.prepare(markFileAsFailedTemplate).run({ id: fileId });
|
|
46
36
|
return false;
|
|
47
37
|
}
|
|
48
38
|
}
|
package/dist/db/sqlTemplates.js
CHANGED
|
@@ -51,3 +51,51 @@ export const insertFunctionCallTemplate = `
|
|
|
51
51
|
INSERT INTO function_calls (caller_id, callee_name)
|
|
52
52
|
VALUES (:caller_id, :callee_name)
|
|
53
53
|
`;
|
|
54
|
+
// Mark a file as unprocessed
|
|
55
|
+
export const markFileAsUnprocessedTemplate = `
|
|
56
|
+
UPDATE files
|
|
57
|
+
SET processing_status = 'unprocessed',
|
|
58
|
+
functions_extracted_at = NULL
|
|
59
|
+
WHERE id = :id
|
|
60
|
+
`;
|
|
61
|
+
// Mark a file as extracted
|
|
62
|
+
export const markFileAsExtractedTemplate = `
|
|
63
|
+
UPDATE files
|
|
64
|
+
SET processing_status = 'extracted',
|
|
65
|
+
functions_extracted_at = CURRENT_TIMESTAMP
|
|
66
|
+
WHERE id = :id
|
|
67
|
+
`;
|
|
68
|
+
// Mark a file as skipped (not extractable)
|
|
69
|
+
export const markFileAsSkippedTemplate = `
|
|
70
|
+
UPDATE files
|
|
71
|
+
SET processing_status = 'skipped',
|
|
72
|
+
functions_extracted_at = NULL
|
|
73
|
+
WHERE id = :id
|
|
74
|
+
`;
|
|
75
|
+
// Mark a file as failed
|
|
76
|
+
export const markFileAsFailedTemplate = `
|
|
77
|
+
UPDATE files
|
|
78
|
+
SET processing_status = 'failed',
|
|
79
|
+
functions_extracted_at = NULL
|
|
80
|
+
WHERE id = :id
|
|
81
|
+
`;
|
|
82
|
+
export const selectUnprocessedFiles = `
|
|
83
|
+
SELECT id, path, type, summary, indexed_at, last_modified, processing_status
|
|
84
|
+
FROM files
|
|
85
|
+
WHERE processing_status = 'unprocessed' OR summary IS NULL OR summary = ''
|
|
86
|
+
ORDER BY last_modified DESC
|
|
87
|
+
LIMIT ?
|
|
88
|
+
`;
|
|
89
|
+
export const markFileAsSkippedByPath = `
|
|
90
|
+
UPDATE files
|
|
91
|
+
SET processing_status = 'skipped',
|
|
92
|
+
functions_extracted_at = NULL
|
|
93
|
+
WHERE path = @path
|
|
94
|
+
`;
|
|
95
|
+
export const updateFileWithSummaryAndEmbedding = `
|
|
96
|
+
UPDATE files
|
|
97
|
+
SET summary = @summary,
|
|
98
|
+
embedding = @embedding,
|
|
99
|
+
indexed_at = datetime('now')
|
|
100
|
+
WHERE path = @path
|
|
101
|
+
`;
|
|
@@ -1,9 +1,48 @@
|
|
|
1
1
|
// utils/fileClassifier.ts
|
|
2
2
|
import path from 'path';
|
|
3
|
+
/**
|
|
4
|
+
* Determines whether a file is likely to be a *generated* or *bundled* file,
|
|
5
|
+
* rather than handwritten source code.
|
|
6
|
+
*
|
|
7
|
+
* This helps filter out files that shouldn't be analyzed for user-authored logic,
|
|
8
|
+
* like minified JS bundles, Webpack chunks, TypeScript output, etc.
|
|
9
|
+
*/
|
|
3
10
|
export function isGeneratedOrBundledFile(filePath) {
|
|
4
11
|
const base = path.basename(filePath);
|
|
5
|
-
|
|
12
|
+
/**
|
|
13
|
+
* Minified file detection:
|
|
14
|
+
* Matches file names like `something.min.js` or `app.min.ts`.
|
|
15
|
+
* These are typically compiled output intended for production and are not original source code.
|
|
16
|
+
*/
|
|
17
|
+
const isMinified = /\.min\.(js|ts)$/.test(base);
|
|
18
|
+
/**
|
|
19
|
+
* Hash-named or chunk file detection:
|
|
20
|
+
* Matches file names like `bundle.839abc.js`, `chunk.123abc.ts`, or `main-worker.js`.
|
|
21
|
+
* These are often created by bundlers like Webpack, Vite, or Rollup.
|
|
22
|
+
*/
|
|
6
23
|
const isHashNamed = /[-_.](worker|bundle|chunk|[a-f0-9]{6,})\.(js|ts)$/.test(base);
|
|
7
|
-
|
|
8
|
-
|
|
24
|
+
/**
|
|
25
|
+
* Output folder detection:
|
|
26
|
+
* These folders are commonly used to store compiled or bundled output.
|
|
27
|
+
* Examples: `dist/`, `build/`, `assets/`, `node_modules/`, `plugins/`
|
|
28
|
+
*
|
|
29
|
+
* If a file is inside any of these folders, we consider it generated/bundled.
|
|
30
|
+
*/
|
|
31
|
+
const outputDirs = ['dist', 'build', 'assets', 'node_modules', 'plugins'];
|
|
32
|
+
const isInKnownOutputFolder = outputDirs.some(dir => new RegExp(`[\\\\/]${dir}[\\\\/]`, 'i').test(filePath));
|
|
33
|
+
/**
|
|
34
|
+
* Special case: `lib/` folder
|
|
35
|
+
* The `lib` folder may contain either handwritten code or compiled output, depending on the project.
|
|
36
|
+
* To avoid over-filtering, we only treat files in `lib/` as generated if they also look minified or hashed.
|
|
37
|
+
*/
|
|
38
|
+
const isInLib = /[\\/]lib[\\/]/i.test(filePath);
|
|
39
|
+
const isLikelyBundledLib = isInLib && (isMinified || isHashNamed);
|
|
40
|
+
/**
|
|
41
|
+
* Return true if *any* of the following conditions are met:
|
|
42
|
+
* - The file looks minified (e.g., `.min.js`)
|
|
43
|
+
* - The file has a hash or is a known bundle/chunk/worker
|
|
44
|
+
* - The file is located in a known output directory
|
|
45
|
+
* - The file is in `lib/` and has signs of being generated (minified or hashed)
|
|
46
|
+
*/
|
|
47
|
+
return isMinified || isHashNamed || isInKnownOutputFolder || isLikelyBundledLib;
|
|
9
48
|
}
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
export async function fetchOpenPullRequests(token, owner, repo) {
|
|
2
|
+
const url = `https://api.github.com/repos/${owner}/${repo}/pulls`;
|
|
3
|
+
const res = await fetch(url, {
|
|
4
|
+
headers: {
|
|
5
|
+
Authorization: `token ${token}`,
|
|
6
|
+
Accept: "application/vnd.github.v3+json",
|
|
7
|
+
},
|
|
8
|
+
});
|
|
9
|
+
if (!res.ok) {
|
|
10
|
+
throw new Error(`GitHub API error: ${res.status} ${res.statusText}`);
|
|
11
|
+
}
|
|
12
|
+
const prs = await res.json();
|
|
13
|
+
return prs.map((pr) => ({
|
|
14
|
+
number: pr.number,
|
|
15
|
+
title: pr.title,
|
|
16
|
+
diff_url: pr.diff_url,
|
|
17
|
+
}));
|
|
18
|
+
}
|
|
19
|
+
export async function fetchPullRequestDiff(pr, token) {
|
|
20
|
+
const res = await fetch(pr.diff_url, {
|
|
21
|
+
headers: {
|
|
22
|
+
Authorization: `token ${token}`,
|
|
23
|
+
Accept: "application/vnd.github.v3.diff",
|
|
24
|
+
},
|
|
25
|
+
});
|
|
26
|
+
if (!res.ok) {
|
|
27
|
+
throw new Error(`Error fetching PR diff: ${res.status} ${res.statusText}`);
|
|
28
|
+
}
|
|
29
|
+
return await res.text();
|
|
30
|
+
}
|
|
31
|
+
export async function getGitHubUsername(token) {
|
|
32
|
+
const res = await fetch('https://api.github.com/user', {
|
|
33
|
+
headers: {
|
|
34
|
+
Authorization: `token ${token}`,
|
|
35
|
+
Accept: "application/vnd.github.v3+json",
|
|
36
|
+
},
|
|
37
|
+
});
|
|
38
|
+
if (!res.ok) {
|
|
39
|
+
throw new Error(`Error fetching user info: ${res.status} ${res.statusText}`);
|
|
40
|
+
}
|
|
41
|
+
const user = await res.json();
|
|
42
|
+
return user.login; // GitHub username
|
|
43
|
+
}
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import { Config } from '../config.js';
|
|
2
|
+
import { promptForToken } from './token.js';
|
|
3
|
+
export async function ensureGitHubAuth() {
|
|
4
|
+
// First check if the token exists in the config
|
|
5
|
+
let token = Config.getGitHubToken();
|
|
6
|
+
if (token) {
|
|
7
|
+
return token; // Token already exists in config, return it
|
|
8
|
+
}
|
|
9
|
+
// Token doesn't exist in config, prompt the user for it
|
|
10
|
+
console.log("🔐 GitHub token not found.");
|
|
11
|
+
token = await promptForToken();
|
|
12
|
+
// Save the token in the config
|
|
13
|
+
Config.setGitHubToken(token.trim());
|
|
14
|
+
return token.trim();
|
|
15
|
+
}
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
import { ensureGitHubAuth } from './auth.js';
|
|
2
|
+
import { getRepoDetails } from './repo.js';
|
|
3
|
+
export async function fetchOpenPullRequests(token, owner, repo) {
|
|
4
|
+
const url = `https://api.github.com/repos/${owner}/${repo}/pulls?state=open&per_page=100`;
|
|
5
|
+
const res = await fetch(url, {
|
|
6
|
+
headers: {
|
|
7
|
+
Authorization: `token ${token}`,
|
|
8
|
+
Accept: 'application/vnd.github.v3+json',
|
|
9
|
+
},
|
|
10
|
+
});
|
|
11
|
+
if (!res.ok) {
|
|
12
|
+
throw new Error(`GitHub API error: ${res.status} ${res.statusText}`);
|
|
13
|
+
}
|
|
14
|
+
const prs = await res.json();
|
|
15
|
+
return prs.map((pr) => ({
|
|
16
|
+
number: pr.number,
|
|
17
|
+
title: pr.title,
|
|
18
|
+
url: pr.url,
|
|
19
|
+
diff_url: pr.diff_url,
|
|
20
|
+
draft: pr.draft,
|
|
21
|
+
merged_at: pr.merged_at,
|
|
22
|
+
base: pr.base,
|
|
23
|
+
requested_reviewers: pr.requested_reviewers,
|
|
24
|
+
}));
|
|
25
|
+
}
|
|
26
|
+
export async function getGitHubUsername(token) {
|
|
27
|
+
const res = await fetch('https://api.github.com/user', {
|
|
28
|
+
headers: {
|
|
29
|
+
Authorization: `token ${token}`,
|
|
30
|
+
Accept: "application/vnd.github.v3+json",
|
|
31
|
+
},
|
|
32
|
+
});
|
|
33
|
+
if (!res.ok) {
|
|
34
|
+
throw new Error(`Error fetching user info: ${res.status} ${res.statusText}`);
|
|
35
|
+
}
|
|
36
|
+
const user = await res.json();
|
|
37
|
+
return user.login; // GitHub username
|
|
38
|
+
}
|
|
39
|
+
export async function fetchPullRequestDiff(pr, token) {
|
|
40
|
+
const res = await fetch(pr.diff_url, {
|
|
41
|
+
headers: {
|
|
42
|
+
Authorization: `token ${token}`,
|
|
43
|
+
Accept: "application/vnd.github.v3.diff",
|
|
44
|
+
},
|
|
45
|
+
});
|
|
46
|
+
if (!res.ok) {
|
|
47
|
+
throw new Error(`Error fetching PR diff: ${res.status} ${res.statusText}`);
|
|
48
|
+
}
|
|
49
|
+
return await res.text();
|
|
50
|
+
}
|
|
51
|
+
export async function submitReview(prNumber, body, event = 'COMMENT') {
|
|
52
|
+
const token = await ensureGitHubAuth();
|
|
53
|
+
const { owner, repo } = getRepoDetails();
|
|
54
|
+
const url = `https://api.github.com/repos/${owner}/${repo}/pulls/${prNumber}/reviews`;
|
|
55
|
+
const res = await fetch(url, {
|
|
56
|
+
method: 'POST',
|
|
57
|
+
headers: {
|
|
58
|
+
Authorization: `token ${token}`,
|
|
59
|
+
Accept: 'application/vnd.github.v3+json',
|
|
60
|
+
},
|
|
61
|
+
body: JSON.stringify({
|
|
62
|
+
body,
|
|
63
|
+
event,
|
|
64
|
+
}),
|
|
65
|
+
});
|
|
66
|
+
if (!res.ok) {
|
|
67
|
+
const errorText = await res.text();
|
|
68
|
+
throw new Error(`Failed to submit review: ${res.status} ${res.statusText} - ${errorText}`);
|
|
69
|
+
}
|
|
70
|
+
console.log(`✅ Submitted ${event} review for PR #${prNumber}`);
|
|
71
|
+
}
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import { ensureGitHubAuth } from './auth.js';
|
|
2
|
+
import { getRepoDetails } from './repo.js';
|
|
3
|
+
export async function validateGitHubTokenAgainstRepo() {
|
|
4
|
+
const token = await ensureGitHubAuth();
|
|
5
|
+
const { owner, repo } = getRepoDetails();
|
|
6
|
+
const response = await fetch(`https://api.github.com/repos/${owner}/${repo}`, {
|
|
7
|
+
headers: {
|
|
8
|
+
Authorization: `Bearer ${token}`,
|
|
9
|
+
'User-Agent': 'scai-cli',
|
|
10
|
+
},
|
|
11
|
+
});
|
|
12
|
+
if (!response.ok) {
|
|
13
|
+
const error = await response.json().catch(() => ({}));
|
|
14
|
+
throw new Error(`❌ Token is invalid or lacks access to ${owner}/${repo}. GitHub says: ${response.status} ${response.statusText}${error.message ? ` – ${error.message}` : ''}`);
|
|
15
|
+
}
|
|
16
|
+
return `✅ GitHub token is valid for ${owner}/${repo}`;
|
|
17
|
+
}
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
import path from 'path';
|
|
2
|
+
import { execSync } from 'child_process';
|
|
3
|
+
import { Config } from '../config.js';
|
|
4
|
+
/**
|
|
5
|
+
* Executes a Git command inside the specified working directory.
|
|
6
|
+
*/
|
|
7
|
+
function runGitCommand(cmd, cwd) {
|
|
8
|
+
return execSync(cmd, { cwd, encoding: 'utf-8' }).trim();
|
|
9
|
+
}
|
|
10
|
+
/**
|
|
11
|
+
* Retrieves the owner and repo name from the Git remote URL inside the indexDir.
|
|
12
|
+
* This ensures we get the correct GitHub repo owner and name, regardless of current working directory.
|
|
13
|
+
*/
|
|
14
|
+
function getRepoOwnerAndNameFromGit(indexDir) {
|
|
15
|
+
try {
|
|
16
|
+
const originUrl = runGitCommand('git config --get remote.origin.url', indexDir);
|
|
17
|
+
console.log(`🔗 Git origin URL from '${indexDir}': ${originUrl}`);
|
|
18
|
+
const match = originUrl.match(/github\.com[:/](.+?)(?:\.git)?$/);
|
|
19
|
+
if (!match)
|
|
20
|
+
throw new Error("❌ Could not parse GitHub repo from origin URL.");
|
|
21
|
+
const [owner, repo] = match[1].split('/');
|
|
22
|
+
console.log(`✅ Parsed from Git: owner='${owner}', repo='${repo}'`);
|
|
23
|
+
return { owner, repo };
|
|
24
|
+
}
|
|
25
|
+
catch (error) {
|
|
26
|
+
console.warn(`⚠️ Failed to parse GitHub info from Git config in '${indexDir}': ${error instanceof Error ? error.message : error}`);
|
|
27
|
+
throw error;
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
/**
|
|
31
|
+
* Fallback: Extracts GitHub repo owner and name from the index directory path.
|
|
32
|
+
*/
|
|
33
|
+
function getRepoOwnerAndNameFromIndexDir(indexDir) {
|
|
34
|
+
const parts = path.resolve(indexDir).split(path.sep);
|
|
35
|
+
const repo = parts.pop();
|
|
36
|
+
const owner = parts.pop();
|
|
37
|
+
console.log(`📁 Parsed from indexDir: owner='${owner}', repo='${repo}'`);
|
|
38
|
+
return { owner, repo };
|
|
39
|
+
}
|
|
40
|
+
/**
|
|
41
|
+
* Get the GitHub repo details, always from the configured indexDir.
|
|
42
|
+
* Prefers Git config, falls back to parsing the path.
|
|
43
|
+
*/
|
|
44
|
+
export function getRepoDetails() {
|
|
45
|
+
const indexDir = Config.getIndexDir();
|
|
46
|
+
if (!indexDir) {
|
|
47
|
+
throw new Error("❌ indexDir is not configured.");
|
|
48
|
+
}
|
|
49
|
+
console.log(`📦 Resolving GitHub repo info from indexDir: ${indexDir}`);
|
|
50
|
+
try {
|
|
51
|
+
return getRepoOwnerAndNameFromGit(indexDir);
|
|
52
|
+
}
|
|
53
|
+
catch {
|
|
54
|
+
console.log("🔁 Falling back to extracting from indexDir path...");
|
|
55
|
+
return getRepoOwnerAndNameFromIndexDir(indexDir);
|
|
56
|
+
}
|
|
57
|
+
}
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
// Utility function to prompt for GitHub token
|
|
2
|
+
import readline from 'readline';
|
|
3
|
+
export function promptForToken() {
|
|
4
|
+
return new Promise((resolve) => {
|
|
5
|
+
const rl = readline.createInterface({
|
|
6
|
+
input: process.stdin,
|
|
7
|
+
output: process.stdout,
|
|
8
|
+
});
|
|
9
|
+
rl.question('Paste your GitHub Personal Access Token (scopes: `repo`): ', (token) => {
|
|
10
|
+
rl.close();
|
|
11
|
+
resolve(token);
|
|
12
|
+
});
|
|
13
|
+
});
|
|
14
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"use strict";
|
package/dist/index.js
CHANGED
|
@@ -22,6 +22,9 @@ import { runAskCommand } from './commands/AskCmd.js';
|
|
|
22
22
|
import { runBackupCommand } from './commands/BackupCmd.js';
|
|
23
23
|
import { runMigrateCommand } from "./commands/MigrateCmd.js";
|
|
24
24
|
import { runInspectCommand } from "./commands/InspectCmd.js";
|
|
25
|
+
import { reviewPullRequestCmd } from "./commands/ReviewCmd.js";
|
|
26
|
+
import { promptForToken } from "./github/token.js";
|
|
27
|
+
import { validateGitHubTokenAgainstRepo } from "./github/githubAuthCheck.js";
|
|
25
28
|
// 🎛️ CLI Setup
|
|
26
29
|
const cmd = new Command('scai')
|
|
27
30
|
.version(version)
|
|
@@ -47,8 +50,59 @@ cmd
|
|
|
47
50
|
defineSuggCommand(cmd);
|
|
48
51
|
// 🔧 Group: Git-related commands
|
|
49
52
|
const git = cmd.command('git').description('Git utilities');
|
|
53
|
+
git
|
|
54
|
+
.command('review')
|
|
55
|
+
.description('Review an open pull request using AI')
|
|
56
|
+
.option('-a, --all', 'Show all PRs requiring a review (not just for the current user)', false) // New option for showing all PRs
|
|
57
|
+
.action(async (cmd) => {
|
|
58
|
+
const showAll = cmd.all; // Access the flag passed via command line
|
|
59
|
+
await reviewPullRequestCmd('main', showAll); // Pass the flag to the review function
|
|
60
|
+
});
|
|
50
61
|
// Register `sugg` under `git` group
|
|
51
62
|
defineSuggCommand(git);
|
|
63
|
+
// Add auth-related commands
|
|
64
|
+
const auth = cmd.command('auth').description('GitHub authentication commands');
|
|
65
|
+
auth
|
|
66
|
+
.command('check')
|
|
67
|
+
.description('Check if GitHub authentication is set up and valid')
|
|
68
|
+
.action(async () => {
|
|
69
|
+
try {
|
|
70
|
+
const token = Config.getGitHubToken();
|
|
71
|
+
if (!token) {
|
|
72
|
+
console.log('❌ GitHub authentication not found. Please set your token.');
|
|
73
|
+
return;
|
|
74
|
+
}
|
|
75
|
+
// Call the new check
|
|
76
|
+
const result = await validateGitHubTokenAgainstRepo();
|
|
77
|
+
console.log(result);
|
|
78
|
+
}
|
|
79
|
+
catch (err) {
|
|
80
|
+
console.error(typeof err === 'string' ? err : err.message);
|
|
81
|
+
}
|
|
82
|
+
});
|
|
83
|
+
auth
|
|
84
|
+
.command('reset')
|
|
85
|
+
.description('Reset GitHub authentication credentials')
|
|
86
|
+
.action(() => {
|
|
87
|
+
Config.setGitHubToken(''); // Clears the GitHub token from the config
|
|
88
|
+
console.log('🔄 GitHub authentication has been reset.');
|
|
89
|
+
// Check if the token is successfully removed
|
|
90
|
+
const token = Config.getGitHubToken();
|
|
91
|
+
if (!token) {
|
|
92
|
+
console.log('✅ Token successfully removed from configuration.');
|
|
93
|
+
}
|
|
94
|
+
else {
|
|
95
|
+
console.log('❌ Token still exists in the configuration.');
|
|
96
|
+
}
|
|
97
|
+
});
|
|
98
|
+
auth
|
|
99
|
+
.command('set')
|
|
100
|
+
.description('Set your GitHub Personal Access Token')
|
|
101
|
+
.action(async () => {
|
|
102
|
+
const token = await promptForToken();
|
|
103
|
+
Config.setGitHubToken(token.trim());
|
|
104
|
+
console.log('🔑 GitHub token set successfully.');
|
|
105
|
+
});
|
|
52
106
|
// 🛠️ Group: `gen` commands for content generation
|
|
53
107
|
const gen = cmd.command('gen').description('Generate code-related output');
|
|
54
108
|
gen
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import { generate } from '../../lib/generate.js';
|
|
2
|
+
import { Config } from '../../config.js';
|
|
3
|
+
export const reviewModule = {
|
|
4
|
+
name: 'review',
|
|
5
|
+
description: 'Reviews code diff or PR content and provides feedback',
|
|
6
|
+
async run({ content, filepath }) {
|
|
7
|
+
const model = Config.getModel();
|
|
8
|
+
const prompt = `
|
|
9
|
+
You are a senior software engineer reviewing a pull request.
|
|
10
|
+
Give clear, constructive feedback based on the code changes below.
|
|
11
|
+
|
|
12
|
+
Changes:
|
|
13
|
+
${content}
|
|
14
|
+
`.trim();
|
|
15
|
+
const response = await generate({ content: prompt, filepath }, model);
|
|
16
|
+
return {
|
|
17
|
+
content: response.content,
|
|
18
|
+
filepath,
|
|
19
|
+
};
|
|
20
|
+
}
|
|
21
|
+
};
|
package/dist/utils/fileTree.js
CHANGED
|
@@ -1,30 +1,31 @@
|
|
|
1
1
|
import fs from 'fs';
|
|
2
2
|
import path from 'path';
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
3
|
+
export function generateFocusedFileTree(rootDir, focusPath, maxDepth = 2) {
|
|
4
|
+
const absoluteFocus = path.resolve(focusPath);
|
|
5
|
+
const parentDir = path.dirname(absoluteFocus);
|
|
6
|
+
const relativeTitle = path.relative(rootDir, parentDir).replace(/\\/g, '/');
|
|
7
|
+
const tree = generateFileTree(parentDir, maxDepth, absoluteFocus);
|
|
8
|
+
return `📂 ${relativeTitle || '.'}\n${tree}`;
|
|
9
|
+
}
|
|
10
|
+
function generateFileTree(dir, depth, highlightPath, prefix = '') {
|
|
11
|
+
if (depth < 0)
|
|
8
12
|
return '';
|
|
9
13
|
let output = '';
|
|
10
|
-
const
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
if (a.isDirectory() && !b.isDirectory())
|
|
15
|
-
return -1;
|
|
16
|
-
if (!a.isDirectory() && b.isDirectory())
|
|
17
|
-
return 1;
|
|
18
|
-
return a.name.localeCompare(b.name);
|
|
19
|
-
});
|
|
20
|
-
for (const [i, item] of items.entries()) {
|
|
21
|
-
const isLast = i === items.length - 1;
|
|
14
|
+
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
|
15
|
+
const sorted = entries.sort((a, b) => Number(b.isDirectory()) - Number(a.isDirectory()));
|
|
16
|
+
sorted.forEach((entry, index) => {
|
|
17
|
+
const isLast = index === sorted.length - 1;
|
|
22
18
|
const connector = isLast ? '└── ' : '├── ';
|
|
23
|
-
const
|
|
24
|
-
|
|
25
|
-
if (
|
|
26
|
-
output +=
|
|
19
|
+
const fullPath = path.join(dir, entry.name);
|
|
20
|
+
const isHighlighted = highlightPath && path.resolve(fullPath) === path.resolve(highlightPath);
|
|
21
|
+
if (entry.isDirectory()) {
|
|
22
|
+
output += `${prefix}${connector}${entry.name}/\n`;
|
|
23
|
+
output += generateFileTree(fullPath, depth - 1, highlightPath, prefix + (isLast ? ' ' : '│ '));
|
|
27
24
|
}
|
|
28
|
-
|
|
25
|
+
else {
|
|
26
|
+
const name = isHighlighted ? `➡️ ${entry.name}` : entry.name;
|
|
27
|
+
output += `${prefix}${connector}${name}\n`;
|
|
28
|
+
}
|
|
29
|
+
});
|
|
29
30
|
return output;
|
|
30
31
|
}
|