scai 0.1.56 → 0.1.57
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +14 -13
- package/dist/commands/AskCmd.js +3 -3
- package/dist/commands/CommitSuggesterCmd.js +2 -0
- package/dist/commands/IndexCmd.js +12 -27
- package/dist/commands/InspectCmd.js +2 -3
- package/dist/commands/ResetDbCmd.js +16 -11
- package/dist/commands/SummaryCmd.js +4 -3
- package/dist/commands/SwitchCmd.js +73 -0
- package/dist/config.js +157 -37
- package/dist/constants.js +12 -15
- package/dist/daemon/daemonBatch.js +3 -3
- package/dist/daemon/daemonWorker.js +2 -1
- package/dist/db/client.js +30 -8
- package/dist/db/fileIndex.js +5 -1
- package/dist/db/functionExtractors/extractFromJava.js +2 -1
- package/dist/db/functionExtractors/extractFromJs.js +2 -1
- package/dist/db/functionExtractors/extractFromXML.js +2 -1
- package/dist/db/functionExtractors/index.js +2 -1
- package/dist/db/schema.js +2 -1
- package/dist/index.js +44 -24
- package/dist/scripts/migrateDb.js +2 -1
- package/dist/utils/fileTree.js +4 -2
- package/dist/utils/normalizePath.js +9 -0
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -57,6 +57,7 @@ scai runs entirely on your machine and doesn't require cloud APIs or API keys. T
|
|
|
57
57
|
No more struggling to write pull request descriptions by hand. `scai git review` automatically generates a rich summary of your changes, complete with context, suggestions, and rationale.
|
|
58
58
|
|
|
59
59
|
> ⚠️ These features are in **beta** — feedback welcome!
|
|
60
|
+
Ping [@ticcr](https://bsky.app/profile/ticcr.xyz) on Bluesky — I'd love to hear your thoughts!
|
|
60
61
|
|
|
61
62
|
---
|
|
62
63
|
|
|
@@ -87,7 +88,7 @@ To interact with GitHub and create pull requests, `scai` needs a personal access
|
|
|
87
88
|
3. **Set the index dir:**
|
|
88
89
|
|
|
89
90
|
```bash
|
|
90
|
-
scai
|
|
91
|
+
scai index set /path/to/repo
|
|
91
92
|
```
|
|
92
93
|
|
|
93
94
|
This is the repo from which scai will look up pull requests that can be reviewed.
|
|
@@ -116,10 +117,16 @@ SCAI supports an integrated review flow for GitHub pull requests. To get started
|
|
|
116
117
|
1. **Set your working index directory (once per repo):**
|
|
117
118
|
|
|
118
119
|
```sh
|
|
119
|
-
scai set
|
|
120
|
+
scai index set /path/to/repo
|
|
120
121
|
```
|
|
121
122
|
|
|
122
123
|
2. **Authenticate with GitHub:**
|
|
124
|
+
```sh
|
|
125
|
+
scai git review
|
|
126
|
+
```
|
|
127
|
+
|
|
128
|
+
This command will query you for the Personal Access Token and set it for you.
|
|
129
|
+
You may also do this with the auth commands below
|
|
123
130
|
|
|
124
131
|
```sh
|
|
125
132
|
scai auth set
|
|
@@ -170,25 +177,19 @@ You might consider renaming `sessionManager` to better reflect its dual role in
|
|
|
170
177
|
|
|
171
178
|
|
|
172
179
|
|
|
173
|
-
### 🔧 How to Use `scai git
|
|
180
|
+
### 🔧 How to Use `scai git commit`
|
|
174
181
|
|
|
175
182
|
Use AI to suggest a meaningful commit message based on your staged code:
|
|
176
183
|
|
|
177
184
|
```bash
|
|
178
185
|
git add .
|
|
179
|
-
scai git
|
|
180
|
-
```
|
|
181
|
-
|
|
182
|
-
To automatically commit with the selected suggestion:
|
|
183
|
-
|
|
184
|
-
```bash
|
|
185
|
-
scai git sugg --commit
|
|
186
|
+
scai git commit
|
|
186
187
|
```
|
|
187
188
|
|
|
188
189
|
You can also include a changelog entry along with the commit:
|
|
189
190
|
|
|
190
191
|
```bash
|
|
191
|
-
scai git
|
|
192
|
+
scai git commit --changelog
|
|
192
193
|
```
|
|
193
194
|
|
|
194
195
|
This will:
|
|
@@ -287,13 +288,13 @@ You won't gain much value from the index unless you scope it to one repository.
|
|
|
287
288
|
1. **Set index directory:**
|
|
288
289
|
|
|
289
290
|
```bash
|
|
290
|
-
scai set
|
|
291
|
+
scai index set /path/to/repo
|
|
291
292
|
```
|
|
292
293
|
|
|
293
294
|
2. **Index your repo (once):**
|
|
294
295
|
|
|
295
296
|
```bash
|
|
296
|
-
scai index
|
|
297
|
+
scai index start
|
|
297
298
|
```
|
|
298
299
|
|
|
299
300
|
3. The daemon is designed to **consume minimal resources** and run unobtrusively. You can control it with:
|
package/dist/commands/AskCmd.js
CHANGED
|
@@ -7,7 +7,7 @@ import { generate } from '../lib/generate.js';
|
|
|
7
7
|
import { buildContextualPrompt } from '../utils/buildContextualPrompt.js';
|
|
8
8
|
import { generateFocusedFileTree } from '../utils/fileTree.js';
|
|
9
9
|
import { log } from '../utils/log.js';
|
|
10
|
-
import { PROMPT_LOG_PATH, SCAI_HOME,
|
|
10
|
+
import { PROMPT_LOG_PATH, SCAI_HOME, RELATED_FILES_LIMIT, MAX_SUMMARY_LINES, getIndexDir } from '../constants.js';
|
|
11
11
|
export async function runAskCommand(query) {
|
|
12
12
|
if (!query) {
|
|
13
13
|
query = await promptOnce('💬 Ask your question:\n');
|
|
@@ -17,7 +17,7 @@ export async function runAskCommand(query) {
|
|
|
17
17
|
console.error('❌ No question provided.\n👉 Usage: scai ask "your question"');
|
|
18
18
|
return;
|
|
19
19
|
}
|
|
20
|
-
console.log(`📁 Using index root: ${
|
|
20
|
+
console.log(`📁 Using index root: ${getIndexDir}`);
|
|
21
21
|
console.log(`🔍 Searching for: "${query}"\n`);
|
|
22
22
|
// 🟩 STEP 1: Semantic Search
|
|
23
23
|
const start = Date.now();
|
|
@@ -103,7 +103,7 @@ export async function runAskCommand(query) {
|
|
|
103
103
|
// 🟩 STEP 6: Generate file tree
|
|
104
104
|
let fileTree = '';
|
|
105
105
|
try {
|
|
106
|
-
fileTree = generateFocusedFileTree(
|
|
106
|
+
fileTree = generateFocusedFileTree(filepath, 2);
|
|
107
107
|
}
|
|
108
108
|
catch (e) {
|
|
109
109
|
console.warn('⚠️ Could not generate file tree:', e);
|
|
@@ -63,6 +63,7 @@ export async function suggestCommitMessage(options) {
|
|
|
63
63
|
console.log('⚠️ No staged changes to suggest a message for.');
|
|
64
64
|
return;
|
|
65
65
|
}
|
|
66
|
+
// Handle changelog generation if the flag is provided
|
|
66
67
|
if (options.changelog) {
|
|
67
68
|
let entryFinalized = false;
|
|
68
69
|
while (!entryFinalized) {
|
|
@@ -122,6 +123,7 @@ export async function suggestCommitMessage(options) {
|
|
|
122
123
|
console.log("👉 Please stage your changes with 'git add <files>' and rerun the command.");
|
|
123
124
|
return;
|
|
124
125
|
}
|
|
126
|
+
// Automatically commit the suggested message
|
|
125
127
|
execSync(`git commit -m "${message.replace(/"/g, '\\"')}"`, { stdio: 'inherit' });
|
|
126
128
|
console.log('✅ Committed with selected message.');
|
|
127
129
|
}
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
// indexCmd.ts
|
|
1
2
|
import fg from 'fast-glob';
|
|
2
3
|
import path from 'path';
|
|
3
4
|
import { initSchema } from '../db/schema.js';
|
|
@@ -6,14 +7,13 @@ import { detectFileType } from '../fileRules/detectFileType.js';
|
|
|
6
7
|
import { startDaemon } from './DaemonCmd.js';
|
|
7
8
|
import { IGNORED_FOLDER_GLOBS } from '../fileRules/ignoredPaths.js';
|
|
8
9
|
import { Config } from '../config.js';
|
|
9
|
-
import { DB_PATH } from '../constants.js';
|
|
10
10
|
import { log } from '../utils/log.js';
|
|
11
11
|
import lockfile from 'proper-lockfile';
|
|
12
12
|
import { classifyFile } from '../fileRules/classifyFile.js';
|
|
13
|
-
|
|
13
|
+
import { getDbPathForRepo } from '../db/client.js';
|
|
14
14
|
async function lockDb() {
|
|
15
15
|
try {
|
|
16
|
-
const lock = await lockfile.lock(
|
|
16
|
+
const lock = await lockfile.lock(getDbPathForRepo());
|
|
17
17
|
return lock;
|
|
18
18
|
}
|
|
19
19
|
catch (err) {
|
|
@@ -21,7 +21,7 @@ async function lockDb() {
|
|
|
21
21
|
throw err;
|
|
22
22
|
}
|
|
23
23
|
}
|
|
24
|
-
export async function runIndexCommand(
|
|
24
|
+
export async function runIndexCommand() {
|
|
25
25
|
try {
|
|
26
26
|
initSchema();
|
|
27
27
|
}
|
|
@@ -29,31 +29,17 @@ export async function runIndexCommand(targetDir, options = {}) {
|
|
|
29
29
|
console.error('❌ Failed to initialize schema:', err);
|
|
30
30
|
process.exit(1);
|
|
31
31
|
}
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
resolvedDir = path.resolve(targetDir || process.cwd());
|
|
36
|
-
console.warn('⚠️ Running in --force mode. Config will not be updated.');
|
|
37
|
-
}
|
|
38
|
-
else if (targetDir) {
|
|
39
|
-
// User provided a directory: resolve and persist to config
|
|
40
|
-
resolvedDir = path.resolve(targetDir);
|
|
41
|
-
Config.setIndexDir(resolvedDir);
|
|
42
|
-
}
|
|
43
|
-
else {
|
|
44
|
-
// Use configured indexDir or fallback to cwd
|
|
45
|
-
resolvedDir = Config.getIndexDir() || process.cwd();
|
|
46
|
-
Config.setIndexDir(resolvedDir); // persist if not yet saved
|
|
47
|
-
}
|
|
48
|
-
log(`📂 Indexing files in: ${resolvedDir}`);
|
|
32
|
+
const indexDir = Config.getIndexDir() || process.cwd();
|
|
33
|
+
Config.setIndexDir(indexDir); // persist if not already saved
|
|
34
|
+
log(`📂 Indexing files in: ${indexDir}`);
|
|
49
35
|
const files = await fg('**/*.*', {
|
|
50
|
-
cwd:
|
|
36
|
+
cwd: indexDir,
|
|
51
37
|
ignore: IGNORED_FOLDER_GLOBS,
|
|
52
38
|
absolute: true,
|
|
53
39
|
});
|
|
54
40
|
const countByExt = {};
|
|
55
41
|
let count = 0;
|
|
56
|
-
const release = await lockDb();
|
|
42
|
+
const release = await lockDb();
|
|
57
43
|
for (const file of files) {
|
|
58
44
|
const classification = classifyFile(file);
|
|
59
45
|
if (classification !== 'valid') {
|
|
@@ -62,10 +48,10 @@ export async function runIndexCommand(targetDir, options = {}) {
|
|
|
62
48
|
}
|
|
63
49
|
try {
|
|
64
50
|
const type = detectFileType(file);
|
|
65
|
-
indexFile(file, null, type);
|
|
51
|
+
indexFile(file, null, type);
|
|
66
52
|
const ext = path.extname(file);
|
|
67
53
|
countByExt[ext] = (countByExt[ext] || 0) + 1;
|
|
68
|
-
log(`📄 Indexed: ${path.relative(
|
|
54
|
+
log(`📄 Indexed: ${path.relative(indexDir, file)}`);
|
|
69
55
|
count++;
|
|
70
56
|
}
|
|
71
57
|
catch (err) {
|
|
@@ -74,7 +60,6 @@ export async function runIndexCommand(targetDir, options = {}) {
|
|
|
74
60
|
}
|
|
75
61
|
log('📊 Indexed files by extension:', JSON.stringify(countByExt, null, 2));
|
|
76
62
|
log(`✅ Done. Indexed ${count} files.`);
|
|
77
|
-
await release();
|
|
78
|
-
// Auto-start daemon if not already running
|
|
63
|
+
await release();
|
|
79
64
|
startDaemon();
|
|
80
65
|
}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
|
-
import { db } from '../db/client.js';
|
|
2
1
|
import path from 'path';
|
|
3
2
|
import fs from 'fs';
|
|
4
3
|
import { log } from '../utils/log.js';
|
|
4
|
+
import { getDbForRepo } from '../db/client.js';
|
|
5
5
|
export async function runInspectCommand(fileArg) {
|
|
6
6
|
if (!fileArg) {
|
|
7
7
|
log('❌ Please provide a file path to inspect.');
|
|
@@ -12,6 +12,7 @@ export async function runInspectCommand(fileArg) {
|
|
|
12
12
|
log(`❌ File does not exist: ${resolvedPath}`);
|
|
13
13
|
process.exit(1);
|
|
14
14
|
}
|
|
15
|
+
const db = getDbForRepo();
|
|
15
16
|
const file = db
|
|
16
17
|
.prepare(`SELECT * FROM files WHERE REPLACE(path, '\\', '/') = ?`)
|
|
17
18
|
.get(resolvedPath);
|
|
@@ -29,12 +30,10 @@ export async function runInspectCommand(fileArg) {
|
|
|
29
30
|
console.log(`📌 Functions extracted: ${isExtracted ? '✅' : '❌'}`);
|
|
30
31
|
console.log(`📆 Extracted at: ${file.functions_extracted_at || '❌ Not yet'}`);
|
|
31
32
|
console.log(`⚙️ Processing status: ${file.processing_status || 'unknown'}`);
|
|
32
|
-
// 📝 Show summary preview
|
|
33
33
|
if (file.summary) {
|
|
34
34
|
console.log('\n📝 Summary:');
|
|
35
35
|
console.log(file.summary.slice(0, 300) + (file.summary.length > 300 ? '...' : ''));
|
|
36
36
|
}
|
|
37
|
-
// 🧑💻 Show extracted functions
|
|
38
37
|
const functions = db
|
|
39
38
|
.prepare(`SELECT name, start_line, end_line FROM functions WHERE file_id = ? ORDER BY start_line ASC`)
|
|
40
39
|
.all(file.id);
|
|
@@ -1,21 +1,24 @@
|
|
|
1
1
|
import fs from 'fs';
|
|
2
2
|
import path from 'path';
|
|
3
3
|
import lockfile from 'proper-lockfile';
|
|
4
|
-
import {
|
|
5
|
-
import {
|
|
6
|
-
import { backupScaiFolder } from '../db/backup.js'; // <-- New import
|
|
4
|
+
import { backupScaiFolder } from '../db/backup.js';
|
|
5
|
+
import { getDbPathForRepo, getDbForRepo } from '../db/client.js';
|
|
7
6
|
export async function resetDatabase() {
|
|
8
7
|
console.log('🔁 Backing up existing .scai folder...');
|
|
9
8
|
await backupScaiFolder();
|
|
9
|
+
const dbPath = getDbPathForRepo();
|
|
10
|
+
// Close the DB connection
|
|
10
11
|
try {
|
|
12
|
+
const db = getDbForRepo();
|
|
11
13
|
db.close();
|
|
12
14
|
console.log('🔒 Closed SQLite database connection.');
|
|
13
15
|
}
|
|
14
16
|
catch (err) {
|
|
15
17
|
console.warn('⚠️ Could not close database:', err instanceof Error ? err.message : err);
|
|
16
18
|
}
|
|
19
|
+
// Release lockfile if present
|
|
17
20
|
try {
|
|
18
|
-
const releaseLock = await lockfile.unlock(
|
|
21
|
+
const releaseLock = await lockfile.unlock(dbPath).catch(() => null);
|
|
19
22
|
if (releaseLock) {
|
|
20
23
|
console.log('🔓 Released database lock.');
|
|
21
24
|
}
|
|
@@ -23,27 +26,29 @@ export async function resetDatabase() {
|
|
|
23
26
|
catch (err) {
|
|
24
27
|
console.warn('⚠️ Failed to release database lock:', err instanceof Error ? err.message : err);
|
|
25
28
|
}
|
|
26
|
-
|
|
29
|
+
// Delete DB file
|
|
30
|
+
if (fs.existsSync(dbPath)) {
|
|
27
31
|
try {
|
|
28
|
-
fs.unlinkSync(
|
|
29
|
-
console.log(`🧹 Deleted existing database at ${
|
|
32
|
+
fs.unlinkSync(dbPath);
|
|
33
|
+
console.log(`🧹 Deleted existing database at ${dbPath}`);
|
|
30
34
|
}
|
|
31
35
|
catch (err) {
|
|
32
36
|
console.error('❌ Failed to delete DB file:', err instanceof Error ? err.message : err);
|
|
33
|
-
return;
|
|
34
37
|
}
|
|
35
38
|
}
|
|
36
39
|
else {
|
|
37
|
-
console.log('ℹ️ No existing database found at:',
|
|
40
|
+
console.log('ℹ️ No existing database found at:', dbPath);
|
|
38
41
|
}
|
|
42
|
+
// Ensure directory exists
|
|
39
43
|
try {
|
|
40
|
-
fs.mkdirSync(path.dirname(
|
|
44
|
+
fs.mkdirSync(path.dirname(dbPath), { recursive: true });
|
|
41
45
|
console.log('📁 Ensured that the database directory exists.');
|
|
42
46
|
}
|
|
43
47
|
catch (err) {
|
|
44
48
|
console.warn('⚠️ Could not ensure DB directory exists:', err instanceof Error ? err.message : err);
|
|
45
49
|
}
|
|
46
|
-
|
|
50
|
+
// Clean up lock directory
|
|
51
|
+
const lockDir = `${dbPath}.lock`;
|
|
47
52
|
if (fs.existsSync(lockDir)) {
|
|
48
53
|
try {
|
|
49
54
|
fs.rmSync(lockDir, { recursive: true, force: true });
|
|
@@ -7,7 +7,7 @@ import { summarizeCode } from '../utils/summarizer.js';
|
|
|
7
7
|
import { detectFileType } from '../fileRules/detectFileType.js';
|
|
8
8
|
import { generateEmbedding } from '../lib/generateEmbedding.js';
|
|
9
9
|
import { sanitizeQueryForFts } from '../utils/sanitizeQuery.js';
|
|
10
|
-
import {
|
|
10
|
+
import { getDbForRepo } from '../db/client.js';
|
|
11
11
|
export async function summarizeFile(filepath) {
|
|
12
12
|
let content = '';
|
|
13
13
|
let filePathResolved;
|
|
@@ -71,9 +71,10 @@ export async function summarizeFile(filepath) {
|
|
|
71
71
|
console.log('💾 Summary saved to local database.');
|
|
72
72
|
const embedding = await generateEmbedding(response.summary);
|
|
73
73
|
if (embedding) {
|
|
74
|
+
const db = getDbForRepo();
|
|
74
75
|
db.prepare(`
|
|
75
|
-
|
|
76
|
-
|
|
76
|
+
UPDATE files SET embedding = ? WHERE path = ?
|
|
77
|
+
`).run(JSON.stringify(embedding), filePathResolved.replace(/\\/g, '/'));
|
|
77
78
|
console.log('📐 Embedding saved to database.');
|
|
78
79
|
}
|
|
79
80
|
}
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
// File: src/commands/switch.ts
|
|
2
|
+
import readline from 'readline';
|
|
3
|
+
import { Config, writeConfig } from '../config.js';
|
|
4
|
+
import { normalizePath, getRepoKeyForPath } from '../utils/normalizePath.js';
|
|
5
|
+
import chalk from 'chalk';
|
|
6
|
+
export function runSwitchCommand(inputPathOrKey) {
|
|
7
|
+
const config = Config.getRaw();
|
|
8
|
+
const normalizedInput = normalizePath(inputPathOrKey);
|
|
9
|
+
// Try to match by key directly
|
|
10
|
+
if (config.repos[normalizedInput]) {
|
|
11
|
+
config.activeRepo = normalizedInput;
|
|
12
|
+
// Update GitHub token
|
|
13
|
+
Config.setGitHubToken(config.repos[normalizedInput].githubToken ?? '');
|
|
14
|
+
console.log(`✅ Switched active repo to key: ${normalizedInput}`);
|
|
15
|
+
}
|
|
16
|
+
else {
|
|
17
|
+
// Try to match by indexDir path
|
|
18
|
+
const repoKey = getRepoKeyForPath(inputPathOrKey, config);
|
|
19
|
+
if (!repoKey) {
|
|
20
|
+
console.error(`❌ No repo found matching path or key: "${inputPathOrKey}"`);
|
|
21
|
+
process.exit(1);
|
|
22
|
+
}
|
|
23
|
+
config.activeRepo = repoKey;
|
|
24
|
+
// Update GitHub token
|
|
25
|
+
Config.setGitHubToken(config.repos[repoKey]?.githubToken ?? '');
|
|
26
|
+
console.log(`✅ Switched active repo to path match: ${repoKey}`);
|
|
27
|
+
}
|
|
28
|
+
// Ensure the active repo change is saved back to the config
|
|
29
|
+
writeConfig(config);
|
|
30
|
+
}
|
|
31
|
+
export async function runInteractiveSwitch() {
|
|
32
|
+
const config = Config.getRaw();
|
|
33
|
+
const keys = Object.keys(config.repos || {});
|
|
34
|
+
if (!keys.length) {
|
|
35
|
+
console.log('⚠️ No repositories configured.');
|
|
36
|
+
return;
|
|
37
|
+
}
|
|
38
|
+
// Auto-switch to the other repo if only 2 are present
|
|
39
|
+
if (keys.length === 2) {
|
|
40
|
+
const current = config.activeRepo;
|
|
41
|
+
const other = keys.find(k => k !== current);
|
|
42
|
+
if (other) {
|
|
43
|
+
runSwitchCommand(other);
|
|
44
|
+
return;
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
// Otherwise, show interactive selection
|
|
48
|
+
console.log('\n📁 Available Repositories:\n');
|
|
49
|
+
keys.forEach((key, i) => {
|
|
50
|
+
const isActive = config.activeRepo === key ? chalk.green('(active)') : '';
|
|
51
|
+
const dir = config.repos[key]?.indexDir ?? '';
|
|
52
|
+
// Color the number using chalk.blue and make active repo green
|
|
53
|
+
const numberedRepo = chalk.blue(`${i + 1})`);
|
|
54
|
+
// Highlight the active repo in green and list it
|
|
55
|
+
console.log(`${numberedRepo} ${key} ${isActive}`);
|
|
56
|
+
// Use light grey for the indexDir
|
|
57
|
+
console.log(` ↳ ${chalk.grey(dir)}`);
|
|
58
|
+
});
|
|
59
|
+
const rl = readline.createInterface({
|
|
60
|
+
input: process.stdin,
|
|
61
|
+
output: process.stdout,
|
|
62
|
+
});
|
|
63
|
+
rl.question('\n👉 Select a repository number to activate: ', (answer) => {
|
|
64
|
+
rl.close();
|
|
65
|
+
const index = parseInt(answer.trim(), 10) - 1;
|
|
66
|
+
if (isNaN(index) || index < 0 || index >= keys.length) {
|
|
67
|
+
console.log('❌ Invalid selection.');
|
|
68
|
+
return;
|
|
69
|
+
}
|
|
70
|
+
const selectedKey = keys[index];
|
|
71
|
+
runSwitchCommand(selectedKey);
|
|
72
|
+
});
|
|
73
|
+
}
|
package/dist/config.js
CHANGED
|
@@ -1,79 +1,199 @@
|
|
|
1
1
|
import fs from 'fs';
|
|
2
|
-
import
|
|
3
|
-
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import { CONFIG_PATH, SCAI_HOME, SCAI_REPOS } from './constants.js';
|
|
4
|
+
import { getDbForRepo } from './db/client.js';
|
|
5
|
+
import { getRepoKeyForPath, normalizePath } from './utils/normalizePath.js';
|
|
6
|
+
import chalk from 'chalk';
|
|
4
7
|
const defaultConfig = {
|
|
5
8
|
model: 'llama3',
|
|
6
9
|
language: 'ts',
|
|
7
|
-
indexDir:
|
|
8
|
-
githubToken: '',
|
|
10
|
+
indexDir: '',
|
|
11
|
+
githubToken: '',
|
|
12
|
+
repos: {},
|
|
13
|
+
activeRepo: undefined,
|
|
9
14
|
};
|
|
10
|
-
// Function to ensure the configuration directory exists
|
|
11
15
|
function ensureConfigDir() {
|
|
12
16
|
if (!fs.existsSync(SCAI_HOME)) {
|
|
13
17
|
fs.mkdirSync(SCAI_HOME, { recursive: true });
|
|
14
18
|
}
|
|
15
19
|
}
|
|
16
|
-
// Function to read the configuration file
|
|
17
20
|
function readConfig() {
|
|
18
21
|
try {
|
|
19
22
|
const content = fs.readFileSync(CONFIG_PATH, 'utf-8');
|
|
20
23
|
return { ...defaultConfig, ...JSON.parse(content) };
|
|
21
24
|
}
|
|
22
25
|
catch {
|
|
23
|
-
return defaultConfig;
|
|
26
|
+
return defaultConfig;
|
|
24
27
|
}
|
|
25
28
|
}
|
|
26
|
-
|
|
27
|
-
function writeConfig(newConfig) {
|
|
29
|
+
export function writeConfig(newCfg) {
|
|
28
30
|
ensureConfigDir();
|
|
29
31
|
const current = readConfig();
|
|
30
|
-
const merged = {
|
|
32
|
+
const merged = {
|
|
33
|
+
...current,
|
|
34
|
+
...newCfg,
|
|
35
|
+
repos: {
|
|
36
|
+
...current.repos,
|
|
37
|
+
...(newCfg.repos || {}),
|
|
38
|
+
},
|
|
39
|
+
};
|
|
31
40
|
fs.writeFileSync(CONFIG_PATH, JSON.stringify(merged, null, 2));
|
|
32
41
|
}
|
|
33
42
|
export const Config = {
|
|
34
|
-
// Get the current model from the config
|
|
35
43
|
getModel() {
|
|
36
|
-
|
|
44
|
+
const cfg = readConfig();
|
|
45
|
+
const repoCfg = cfg.repos?.[cfg.activeRepo ?? ''];
|
|
46
|
+
return repoCfg?.model || cfg.model;
|
|
37
47
|
},
|
|
38
|
-
// Set a new model in the config
|
|
39
48
|
setModel(model) {
|
|
40
|
-
|
|
41
|
-
|
|
49
|
+
const cfg = readConfig();
|
|
50
|
+
const active = cfg.activeRepo;
|
|
51
|
+
if (active) {
|
|
52
|
+
cfg.repos[active] = { ...cfg.repos[active], model };
|
|
53
|
+
writeConfig(cfg);
|
|
54
|
+
console.log(`📦 Model set to: ${model}`);
|
|
55
|
+
}
|
|
56
|
+
else {
|
|
57
|
+
writeConfig({ model });
|
|
58
|
+
console.log(`📦 Default model set to: ${model}`);
|
|
59
|
+
}
|
|
42
60
|
},
|
|
43
|
-
// Get the current language from the config
|
|
44
61
|
getLanguage() {
|
|
45
|
-
|
|
62
|
+
const cfg = readConfig();
|
|
63
|
+
const repoCfg = cfg.repos?.[cfg.activeRepo ?? ''];
|
|
64
|
+
return repoCfg?.language || cfg.language;
|
|
46
65
|
},
|
|
47
|
-
// Set a new language in the config
|
|
48
66
|
setLanguage(language) {
|
|
49
|
-
|
|
50
|
-
|
|
67
|
+
const cfg = readConfig();
|
|
68
|
+
const active = cfg.activeRepo;
|
|
69
|
+
if (active) {
|
|
70
|
+
cfg.repos[active] = { ...cfg.repos[active], language };
|
|
71
|
+
writeConfig(cfg);
|
|
72
|
+
console.log(`🗣️ Language set to: ${language}`);
|
|
73
|
+
}
|
|
74
|
+
else {
|
|
75
|
+
writeConfig({ language });
|
|
76
|
+
console.log(`🗣️ Default language set to: ${language}`);
|
|
77
|
+
}
|
|
51
78
|
},
|
|
52
|
-
// Get the index directory from the config
|
|
53
79
|
getIndexDir() {
|
|
54
|
-
|
|
80
|
+
const config = readConfig();
|
|
81
|
+
const activeRepo = config.activeRepo;
|
|
82
|
+
if (activeRepo) {
|
|
83
|
+
const normalized = normalizePath(activeRepo);
|
|
84
|
+
return normalizePath(config.repos[normalized]?.indexDir ?? '');
|
|
85
|
+
}
|
|
86
|
+
return '';
|
|
87
|
+
},
|
|
88
|
+
async setIndexDir(indexDir) {
|
|
89
|
+
const absPath = path.resolve(indexDir); // Resolve the index directory to an absolute path
|
|
90
|
+
const repoKey = normalizePath(absPath); // Normalize path for the repo (get repo name, not full path)
|
|
91
|
+
// Ensure repoKey doesn't contain an absolute path, only the repo name or a relative path
|
|
92
|
+
const scaiRepoRoot = path.join(SCAI_REPOS, path.basename(repoKey)); // Use repo name as key to avoid double paths
|
|
93
|
+
// Set the active repo to the provided indexDir
|
|
94
|
+
const cfg = readConfig();
|
|
95
|
+
cfg.activeRepo = repoKey;
|
|
96
|
+
await writeConfig(cfg); // Persist the change in activeRepo
|
|
97
|
+
// Call setRepoIndexDir to update the repo's indexDir and other settings
|
|
98
|
+
await this.setRepoIndexDir(scaiRepoRoot, absPath); // Set the indexDir for the repo
|
|
99
|
+
// Ensure base folders exist
|
|
100
|
+
fs.mkdirSync(scaiRepoRoot, { recursive: true });
|
|
101
|
+
fs.mkdirSync(path.join(scaiRepoRoot, 'summaries'), { recursive: true });
|
|
102
|
+
fs.mkdirSync(path.join(scaiRepoRoot, 'metadata'), { recursive: true });
|
|
103
|
+
// Init DB if not exists
|
|
104
|
+
const dbPath = path.join(scaiRepoRoot, 'db.sqlite');
|
|
105
|
+
if (!fs.existsSync(dbPath)) {
|
|
106
|
+
console.log(`Database not found. Initializing DB at ${normalizePath(dbPath)}`);
|
|
107
|
+
getDbForRepo(); // Now DB creation works after config update
|
|
108
|
+
}
|
|
109
|
+
console.log(`✅ Index directory set to: ${normalizePath(absPath)}`);
|
|
110
|
+
},
|
|
111
|
+
/**
|
|
112
|
+
* Set both the scaiRepoRoot for the config and the indexDir (the actual repo root path)
|
|
113
|
+
* @param scaiRepoRoot
|
|
114
|
+
* @param indexDir
|
|
115
|
+
*/
|
|
116
|
+
async setRepoIndexDir(scaiRepoRoot, indexDir) {
|
|
117
|
+
const normalizedRepoPath = normalizePath(scaiRepoRoot);
|
|
118
|
+
const normalizedIndexDir = normalizePath(indexDir);
|
|
119
|
+
const cfg = readConfig();
|
|
120
|
+
if (!cfg.repos[normalizedRepoPath]) {
|
|
121
|
+
cfg.repos[normalizedRepoPath] = {};
|
|
122
|
+
}
|
|
123
|
+
cfg.repos[normalizedRepoPath] = {
|
|
124
|
+
...cfg.repos[normalizedRepoPath],
|
|
125
|
+
indexDir: normalizedIndexDir, // Ensure the indexDir is always normalized
|
|
126
|
+
};
|
|
127
|
+
await writeConfig(cfg); // Persist the config update
|
|
128
|
+
console.log(`✅ Repo index directory set for ${normalizedRepoPath} : ${normalizedIndexDir}`);
|
|
55
129
|
},
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
130
|
+
setActiveRepo(repoKey) {
|
|
131
|
+
const cfg = readConfig();
|
|
132
|
+
cfg.activeRepo = repoKey;
|
|
133
|
+
writeConfig(cfg);
|
|
134
|
+
console.log(`✅ Active repo switched to: ${repoKey}`);
|
|
135
|
+
},
|
|
136
|
+
printAllRepos() {
|
|
137
|
+
const cfg = readConfig();
|
|
138
|
+
const keys = Object.keys(cfg.repos || {});
|
|
139
|
+
if (!keys.length) {
|
|
140
|
+
console.log('ℹ️ No repositories configured yet.');
|
|
141
|
+
return;
|
|
142
|
+
}
|
|
143
|
+
console.log('📁 Configured repositories:\n');
|
|
144
|
+
for (const key of keys) {
|
|
145
|
+
const r = cfg.repos[key];
|
|
146
|
+
const isActive = cfg.activeRepo === key;
|
|
147
|
+
// Use chalk to ensure proper coloring
|
|
148
|
+
const label = isActive
|
|
149
|
+
? chalk.green(`✅ ${key} (active)`) // Active repo in green
|
|
150
|
+
: chalk.white(` ${key}`); // Inactive repos in white
|
|
151
|
+
console.log(`- ${label}`);
|
|
152
|
+
console.log(` ↳ indexDir: ${r.indexDir}`);
|
|
153
|
+
}
|
|
60
154
|
},
|
|
61
|
-
//
|
|
155
|
+
// Method to get GitHub token for the active repo
|
|
62
156
|
getGitHubToken() {
|
|
63
|
-
|
|
157
|
+
const cfg = readConfig();
|
|
158
|
+
const active = cfg.activeRepo;
|
|
159
|
+
if (active) {
|
|
160
|
+
// Normalize the active repo path and fetch token from repos[activeRepo]
|
|
161
|
+
const normalizedActiveRepo = normalizePath(active);
|
|
162
|
+
return cfg.repos[normalizedActiveRepo]?.githubToken || null;
|
|
163
|
+
}
|
|
164
|
+
// If no activeRepo, fall back to the global githubToken field
|
|
165
|
+
return cfg.githubToken || null;
|
|
64
166
|
},
|
|
65
|
-
// Set the GitHub token in the config
|
|
66
167
|
setGitHubToken(token) {
|
|
67
|
-
|
|
68
|
-
|
|
168
|
+
const cfg = readConfig();
|
|
169
|
+
const active = cfg.activeRepo;
|
|
170
|
+
if (active) {
|
|
171
|
+
const repoKey = getRepoKeyForPath(active, cfg) ?? normalizePath(active);
|
|
172
|
+
if (!cfg.repos[repoKey]) {
|
|
173
|
+
cfg.repos[repoKey] = {};
|
|
174
|
+
}
|
|
175
|
+
cfg.repos[repoKey] = {
|
|
176
|
+
...cfg.repos[repoKey],
|
|
177
|
+
githubToken: token,
|
|
178
|
+
};
|
|
179
|
+
}
|
|
180
|
+
else {
|
|
181
|
+
cfg.githubToken = token;
|
|
182
|
+
}
|
|
183
|
+
writeConfig(cfg);
|
|
184
|
+
console.log('✅ GitHub token updated');
|
|
69
185
|
},
|
|
70
|
-
// Show the current configuration
|
|
71
186
|
show() {
|
|
72
187
|
const cfg = readConfig();
|
|
188
|
+
const active = cfg.activeRepo;
|
|
73
189
|
console.log(`🔧 Current configuration:`);
|
|
74
|
-
console.log(`
|
|
75
|
-
|
|
76
|
-
console.log(`
|
|
77
|
-
console.log(`
|
|
78
|
-
}
|
|
190
|
+
console.log(` Active index dir: ${active || 'Not Set'}`);
|
|
191
|
+
const repoCfg = active ? cfg.repos?.[active] : {};
|
|
192
|
+
console.log(` Model : ${repoCfg?.model || cfg.model}`);
|
|
193
|
+
console.log(` Language : ${repoCfg?.language || cfg.language}`);
|
|
194
|
+
console.log(` GitHub Token : ${cfg.githubToken ? '*****' : 'Not Set'}`);
|
|
195
|
+
},
|
|
196
|
+
getRaw() {
|
|
197
|
+
return readConfig();
|
|
198
|
+
},
|
|
79
199
|
};
|
package/dist/constants.js
CHANGED
|
@@ -7,10 +7,9 @@ import fs from 'fs';
|
|
|
7
7
|
*/
|
|
8
8
|
export const SCAI_HOME = path.join(os.homedir(), '.scai');
|
|
9
9
|
/**
|
|
10
|
-
*
|
|
11
|
-
* ~/.scai/db.sqlite
|
|
10
|
+
* Repos dir for multi-repo setup
|
|
12
11
|
*/
|
|
13
|
-
export const
|
|
12
|
+
export const SCAI_REPOS = path.join(SCAI_HOME, 'repos');
|
|
14
13
|
/**
|
|
15
14
|
* Path to the daemon process ID file (if running in background mode):
|
|
16
15
|
* ~/.scai/daemon.pid
|
|
@@ -32,26 +31,24 @@ export const LOG_PATH = path.join(SCAI_HOME, 'daemon.log');
|
|
|
32
31
|
*/
|
|
33
32
|
export const PROMPT_LOG_PATH = path.join(SCAI_HOME, 'prompt.log');
|
|
34
33
|
/**
|
|
35
|
-
* Get the active index directory.
|
|
34
|
+
* Get the active index directory based on the active repo.
|
|
36
35
|
*
|
|
37
|
-
* - If
|
|
38
|
-
* - If
|
|
36
|
+
* - If there is an active repository, return its `indexDir` from the config.
|
|
37
|
+
* - If no active repo is set, default to the user's home directory (`~`).
|
|
39
38
|
*/
|
|
40
39
|
export function getIndexDir() {
|
|
41
40
|
try {
|
|
42
41
|
const config = JSON.parse(fs.readFileSync(CONFIG_PATH, 'utf-8'));
|
|
43
|
-
|
|
42
|
+
const activeRepo = config.activeRepo;
|
|
43
|
+
if (activeRepo && config.repos[activeRepo]) {
|
|
44
|
+
return config.repos[activeRepo].indexDir || os.homedir(); // Repo-specific indexDir or default to home
|
|
45
|
+
}
|
|
46
|
+
return os.homedir(); // Fallback to home if no active repo
|
|
44
47
|
}
|
|
45
48
|
catch (e) {
|
|
46
|
-
return os.homedir(); //
|
|
49
|
+
return os.homedir(); // Fallback if config file is missing or invalid
|
|
47
50
|
}
|
|
48
51
|
}
|
|
49
|
-
/**
|
|
50
|
-
* On-demand index directory to scan for files.
|
|
51
|
-
*
|
|
52
|
-
* Used by indexing logic (`scai index`) to determine what folder to scan.
|
|
53
|
-
*/
|
|
54
|
-
export const INDEX_DIR = getIndexDir();
|
|
55
52
|
/**
|
|
56
53
|
* Limit for number of related files included in model prompt.
|
|
57
54
|
*/
|
|
@@ -61,6 +58,6 @@ export const RELATED_FILES_LIMIT = 3;
|
|
|
61
58
|
*/
|
|
62
59
|
export const CANDIDATE_LIMIT = 100;
|
|
63
60
|
/**
|
|
64
|
-
* Limit number of
|
|
61
|
+
* Limit number of summary lines
|
|
65
62
|
*/
|
|
66
63
|
export const MAX_SUMMARY_LINES = 12;
|
|
@@ -1,13 +1,12 @@
|
|
|
1
1
|
import { indexFunctionsForFile } from '../db/functionIndex.js';
|
|
2
|
-
import { db } from '../db/client.js';
|
|
3
2
|
import fs from 'fs/promises';
|
|
4
3
|
import fsSync from 'fs';
|
|
5
4
|
import { generateEmbedding } from '../lib/generateEmbedding.js';
|
|
6
|
-
import { DB_PATH } from '../constants.js';
|
|
7
5
|
import { log } from '../utils/log.js';
|
|
8
6
|
import lockfile from 'proper-lockfile';
|
|
9
7
|
import { summaryModule } from '../pipeline/modules/summaryModule.js';
|
|
10
8
|
import { classifyFile } from '../fileRules/classifyFile.js';
|
|
9
|
+
import { getDbForRepo, getDbPathForRepo } from '../db/client.js';
|
|
11
10
|
import { markFileAsSkippedByPath, selectUnprocessedFiles, updateFileWithSummaryAndEmbedding, } from '../db/sqlTemplates.js';
|
|
12
11
|
const MAX_FILES_PER_BATCH = 5;
|
|
13
12
|
/**
|
|
@@ -16,7 +15,7 @@ const MAX_FILES_PER_BATCH = 5;
|
|
|
16
15
|
*/
|
|
17
16
|
async function lockDb() {
|
|
18
17
|
try {
|
|
19
|
-
return await lockfile.lock(
|
|
18
|
+
return await lockfile.lock(getDbPathForRepo());
|
|
20
19
|
}
|
|
21
20
|
catch (err) {
|
|
22
21
|
log('❌ Failed to acquire DB lock: ' + err);
|
|
@@ -34,6 +33,7 @@ async function lockDb() {
|
|
|
34
33
|
export async function runDaemonBatch() {
|
|
35
34
|
log('🟡 Starting daemon batch...');
|
|
36
35
|
// Selects up to MAX_FILES_PER_BATCH files that haven't been processed yet
|
|
36
|
+
const db = getDbForRepo();
|
|
37
37
|
const rows = db.prepare(selectUnprocessedFiles).all(MAX_FILES_PER_BATCH);
|
|
38
38
|
if (rows.length === 0) {
|
|
39
39
|
log('✅ No files left to process.');
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { getDbForRepo } from '../db/client.js';
|
|
2
2
|
import { runDaemonBatch } from './daemonBatch.js';
|
|
3
3
|
import { log } from '../utils/log.js';
|
|
4
4
|
const SLEEP_MS = 2000;
|
|
@@ -7,6 +7,7 @@ const IDLE_SLEEP_MS = 5000;
|
|
|
7
7
|
log('🛠️ daemonWorker.js loaded');
|
|
8
8
|
async function isQueueEmpty() {
|
|
9
9
|
try {
|
|
10
|
+
const db = getDbForRepo();
|
|
10
11
|
const row = db.prepare(`
|
|
11
12
|
SELECT COUNT(*) AS count
|
|
12
13
|
FROM files
|
package/dist/db/client.js
CHANGED
|
@@ -1,9 +1,31 @@
|
|
|
1
|
-
import Database from 'better-sqlite3';
|
|
2
1
|
import fs from 'fs';
|
|
3
|
-
import
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import { SCAI_HOME } from '../constants.js';
|
|
4
|
+
import { Config } from '../config.js';
|
|
5
|
+
import Database from 'better-sqlite3';
|
|
6
|
+
/**
|
|
7
|
+
* Returns a per-repo SQLite database instance.
|
|
8
|
+
* Ensures the directory and file are created.
|
|
9
|
+
*/
|
|
10
|
+
export function getDbForRepo() {
|
|
11
|
+
const repoRoot = Config.getIndexDir();
|
|
12
|
+
if (!repoRoot) {
|
|
13
|
+
throw new Error('No index directory set. Please set an index directory first.');
|
|
14
|
+
}
|
|
15
|
+
fs.mkdirSync(SCAI_HOME, { recursive: true });
|
|
16
|
+
const dbPath = getDbPathForRepo();
|
|
17
|
+
fs.mkdirSync(path.dirname(dbPath), { recursive: true });
|
|
18
|
+
const db = new Database(dbPath);
|
|
19
|
+
db.pragma('journal_mode = WAL');
|
|
20
|
+
return db;
|
|
21
|
+
}
|
|
22
|
+
export function getDbPathForRepo() {
|
|
23
|
+
const repoRoot = Config.getIndexDir();
|
|
24
|
+
if (!repoRoot) {
|
|
25
|
+
throw new Error('No index directory set. Please set an index directory first.');
|
|
26
|
+
}
|
|
27
|
+
// Use path.basename to get the repo name from the full path
|
|
28
|
+
const repoName = path.basename(repoRoot); // Get the last part of the path (the repo name)
|
|
29
|
+
const scaiRepoPath = path.join(SCAI_HOME, 'repos', repoName, 'db.sqlite');
|
|
30
|
+
return scaiRepoPath;
|
|
31
|
+
}
|
package/dist/db/fileIndex.js
CHANGED
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import { db } from './client.js';
|
|
2
1
|
import fs from 'fs';
|
|
3
2
|
import path from 'path';
|
|
4
3
|
import { generateEmbedding } from '../lib/generateEmbedding.js';
|
|
@@ -6,6 +5,7 @@ import { sanitizeQueryForFts } from '../utils/sanitizeQuery.js';
|
|
|
6
5
|
import * as sqlTemplates from './sqlTemplates.js';
|
|
7
6
|
import { stringSimilarity } from 'string-similarity-js';
|
|
8
7
|
import { CANDIDATE_LIMIT } from '../constants.js';
|
|
8
|
+
import { getDbForRepo } from './client.js';
|
|
9
9
|
/**
|
|
10
10
|
* 📄 Index a single file into the database.
|
|
11
11
|
*
|
|
@@ -20,6 +20,7 @@ export function indexFile(filePath, summary, type) {
|
|
|
20
20
|
const normalizedPath = path.normalize(filePath).replace(/\\/g, '/');
|
|
21
21
|
const fileName = path.basename(normalizedPath); // Extracting the filename
|
|
22
22
|
// Insert into files table
|
|
23
|
+
const db = getDbForRepo();
|
|
23
24
|
db.prepare(sqlTemplates.upsertFileTemplate).run({
|
|
24
25
|
path: normalizedPath,
|
|
25
26
|
filename: fileName, // Pass filename
|
|
@@ -51,6 +52,7 @@ export function indexFile(filePath, summary, type) {
|
|
|
51
52
|
*/
|
|
52
53
|
export function queryFiles(safeQuery, limit = 10) {
|
|
53
54
|
console.log(`Executing search query: ${safeQuery}`);
|
|
55
|
+
const db = getDbForRepo();
|
|
54
56
|
const results = db.prepare(`
|
|
55
57
|
SELECT f.id, f.path, f.filename, f.summary, f.type, f.last_modified, f.indexed_at
|
|
56
58
|
FROM files f
|
|
@@ -81,6 +83,7 @@ export async function searchFiles(query, topK = 5) {
|
|
|
81
83
|
}
|
|
82
84
|
const safeQuery = sanitizeQueryForFts(query);
|
|
83
85
|
console.log(`Executing search query in FTS5: ${safeQuery}`);
|
|
86
|
+
const db = getDbForRepo();
|
|
84
87
|
const ftsResults = db.prepare(`
|
|
85
88
|
SELECT fts.rowid AS id, f.path, f.filename, f.summary, f.type, bm25(files_fts) AS bm25Score, f.embedding
|
|
86
89
|
FROM files f
|
|
@@ -158,6 +161,7 @@ export function getFunctionsForFiles(fileIds) {
|
|
|
158
161
|
if (!fileIds.length)
|
|
159
162
|
return {};
|
|
160
163
|
const placeholders = fileIds.map(() => '?').join(',');
|
|
164
|
+
const db = getDbForRepo();
|
|
161
165
|
const stmt = db.prepare(`
|
|
162
166
|
SELECT f.file_id, f.name, f.start_line, f.end_line, f.content
|
|
163
167
|
FROM functions f
|
|
@@ -1,8 +1,9 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { getDbForRepo } from '../client.js';
|
|
2
2
|
import { markFileAsSkippedTemplate } from '../sqlTemplates.js';
|
|
3
3
|
export async function extractFromJava(filePath, _content, fileId) {
|
|
4
4
|
console.warn(`⛔️ Java extraction not implemented: ${filePath}`);
|
|
5
5
|
// Mark the file as skipped with the relevant status update
|
|
6
|
+
const db = getDbForRepo();
|
|
6
7
|
db.prepare(markFileAsSkippedTemplate).run({ id: fileId });
|
|
7
8
|
return false;
|
|
8
9
|
}
|
|
@@ -1,11 +1,11 @@
|
|
|
1
1
|
import { parse } from 'acorn';
|
|
2
2
|
import { ancestor as walkAncestor } from 'acorn-walk';
|
|
3
3
|
import { generateEmbedding } from '../../lib/generateEmbedding.js';
|
|
4
|
-
import { db } from '../client.js';
|
|
5
4
|
import path from 'path';
|
|
6
5
|
import { log } from '../../utils/log.js';
|
|
7
6
|
import fs from 'fs';
|
|
8
7
|
import { markFileAsSkippedTemplate, markFileAsExtractedTemplate, markFileAsFailedTemplate } from '../sqlTemplates.js';
|
|
8
|
+
import { getDbForRepo } from '../client.js';
|
|
9
9
|
function getFunctionName(node, parent, fileName) {
|
|
10
10
|
if (node.id?.name)
|
|
11
11
|
return node.id.name;
|
|
@@ -20,6 +20,7 @@ function getFunctionName(node, parent, fileName) {
|
|
|
20
20
|
return `${fileName}:<anon>`;
|
|
21
21
|
}
|
|
22
22
|
export async function extractFromJS(filePath, content, fileId) {
|
|
23
|
+
const db = getDbForRepo();
|
|
23
24
|
try {
|
|
24
25
|
const code = fs.readFileSync(filePath, 'utf-8');
|
|
25
26
|
console.log(`[Debug] Attempting to parse: ${filePath}`);
|
|
@@ -1,8 +1,9 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { getDbForRepo } from '../client.js';
|
|
2
2
|
import { markFileAsSkippedTemplate } from '../sqlTemplates.js';
|
|
3
3
|
export async function extractFromXML(filePath, _content, fileId) {
|
|
4
4
|
console.warn(`⛔️ XML extraction not implemented: ${filePath}`);
|
|
5
5
|
// Mark the file as skipped with the relevant status update
|
|
6
|
+
const db = getDbForRepo();
|
|
6
7
|
db.prepare(markFileAsSkippedTemplate).run({ id: fileId });
|
|
7
8
|
return false;
|
|
8
9
|
}
|
|
@@ -3,13 +3,14 @@ import { detectFileType } from '../../fileRules/detectFileType.js';
|
|
|
3
3
|
import { extractFromJava } from './extractFromJava.js';
|
|
4
4
|
import { extractFromJS } from './extractFromJs.js';
|
|
5
5
|
import { extractFromXML } from './extractFromXML.js';
|
|
6
|
-
import {
|
|
6
|
+
import { getDbForRepo } from '../client.js';
|
|
7
7
|
import { markFileAsFailedTemplate, markFileAsSkippedByPath } from '../sqlTemplates.js';
|
|
8
8
|
/**
|
|
9
9
|
* Detects file type and delegates to the appropriate extractor.
|
|
10
10
|
*/
|
|
11
11
|
export async function extractFunctionsFromFile(filePath, content, fileId) {
|
|
12
12
|
const type = detectFileType(filePath).trim().toLowerCase();
|
|
13
|
+
const db = getDbForRepo();
|
|
13
14
|
try {
|
|
14
15
|
if (type === 'js' || type === 'ts' || type === 'javascript' || type === 'typescript') {
|
|
15
16
|
log(`✅ Attempting to extract JS functions from ${filePath}`);
|
package/dist/db/schema.js
CHANGED
package/dist/index.js
CHANGED
|
@@ -1,6 +1,5 @@
|
|
|
1
1
|
//!/usr/bin/env node
|
|
2
2
|
import { Command } from "commander";
|
|
3
|
-
import path from "path";
|
|
4
3
|
import { Config } from './config.js';
|
|
5
4
|
import { createRequire } from 'module';
|
|
6
5
|
const require = createRequire(import.meta.url);
|
|
@@ -26,6 +25,7 @@ import { reviewPullRequestCmd } from "./commands/ReviewCmd.js";
|
|
|
26
25
|
import { promptForToken } from "./github/token.js";
|
|
27
26
|
import { validateGitHubTokenAgainstRepo } from "./github/githubAuthCheck.js";
|
|
28
27
|
import { checkGit } from "./commands/GitCmd.js";
|
|
28
|
+
import { runSwitchCommand, runInteractiveSwitch } from "./commands/SwitchCmd.js";
|
|
29
29
|
// 🎛️ CLI Setup
|
|
30
30
|
const cmd = new Command('scai')
|
|
31
31
|
.version(version)
|
|
@@ -50,9 +50,8 @@ git
|
|
|
50
50
|
await reviewPullRequestCmd('main', showAll);
|
|
51
51
|
});
|
|
52
52
|
git
|
|
53
|
-
.command('
|
|
54
|
-
.description('Suggest a commit message from staged changes')
|
|
55
|
-
.option('-c, --commit', 'Automatically commit with suggested message')
|
|
53
|
+
.command('commit')
|
|
54
|
+
.description('Suggest a commit message from staged changes and optionally commit')
|
|
56
55
|
.option('-l, --changelog', 'Generate and optionally stage a changelog entry')
|
|
57
56
|
.action((options) => suggestCommitMessage(options));
|
|
58
57
|
git
|
|
@@ -119,40 +118,61 @@ gen
|
|
|
119
118
|
.description('Generate a Jest test file for the specified JS/TS module')
|
|
120
119
|
.action((file) => generateTests(file));
|
|
121
120
|
// ⚙️ Group: Configuration settings
|
|
122
|
-
const
|
|
123
|
-
|
|
124
|
-
.command('model <model>')
|
|
121
|
+
const config = cmd.command('config').description('Manage SCAI configuration');
|
|
122
|
+
config
|
|
123
|
+
.command('set-model <model>')
|
|
125
124
|
.description('Set the model to use')
|
|
126
125
|
.action((model) => {
|
|
127
126
|
Config.setModel(model);
|
|
128
127
|
Config.show();
|
|
129
128
|
});
|
|
130
|
-
|
|
131
|
-
.command('lang <lang>')
|
|
129
|
+
config
|
|
130
|
+
.command('set-lang <lang>')
|
|
132
131
|
.description('Set the programming language')
|
|
133
132
|
.action((lang) => {
|
|
134
133
|
Config.setLanguage(lang);
|
|
135
134
|
Config.show();
|
|
136
135
|
});
|
|
137
|
-
|
|
138
|
-
.command('
|
|
139
|
-
.
|
|
136
|
+
config
|
|
137
|
+
.command('show')
|
|
138
|
+
.option('--raw', 'Show full raw config')
|
|
139
|
+
.description('Display current configuration')
|
|
140
|
+
.action((options) => {
|
|
141
|
+
if (options.raw) {
|
|
142
|
+
console.log(JSON.stringify(Config.getRaw(), null, 2));
|
|
143
|
+
}
|
|
144
|
+
else {
|
|
145
|
+
Config.show();
|
|
146
|
+
}
|
|
147
|
+
});
|
|
148
|
+
const index = cmd.command('index').description('index operations');
|
|
149
|
+
index
|
|
150
|
+
.command('start')
|
|
151
|
+
.description('Index supported files in the configured index directory')
|
|
152
|
+
.action(runIndexCommand);
|
|
153
|
+
index
|
|
154
|
+
.command('set <dir>')
|
|
155
|
+
.description('Set and activate index directory')
|
|
140
156
|
.action((dir) => {
|
|
141
|
-
Config.setIndexDir(
|
|
157
|
+
Config.setIndexDir(dir);
|
|
142
158
|
Config.show();
|
|
143
159
|
});
|
|
144
|
-
|
|
145
|
-
.command('
|
|
146
|
-
.description('
|
|
160
|
+
index
|
|
161
|
+
.command('list')
|
|
162
|
+
.description('List all indexed repositories')
|
|
147
163
|
.action(() => {
|
|
148
|
-
Config.
|
|
149
|
-
});
|
|
150
|
-
|
|
151
|
-
.command('
|
|
152
|
-
.description('
|
|
153
|
-
.
|
|
154
|
-
|
|
155
|
-
|
|
164
|
+
Config.printAllRepos(); // 👈 simple and clean
|
|
165
|
+
});
|
|
166
|
+
index
|
|
167
|
+
.command('switch [input]')
|
|
168
|
+
.description('Switch active repository (by key or indexDir). Run without input for a list')
|
|
169
|
+
.action((input) => {
|
|
170
|
+
if (input) {
|
|
171
|
+
runSwitchCommand(input);
|
|
172
|
+
}
|
|
173
|
+
else {
|
|
174
|
+
runInteractiveSwitch();
|
|
175
|
+
}
|
|
156
176
|
});
|
|
157
177
|
cmd
|
|
158
178
|
.command('backup')
|
|
@@ -1,7 +1,8 @@
|
|
|
1
1
|
// scripts/migrateDb.ts
|
|
2
|
-
import {
|
|
2
|
+
import { getDbForRepo } from "../db/client.js";
|
|
3
3
|
import { log } from "../utils/log.js";
|
|
4
4
|
// scripts/resetFunctionExtraction.ts
|
|
5
|
+
const db = getDbForRepo();
|
|
5
6
|
try {
|
|
6
7
|
db.prepare(`DELETE FROM function_calls`).run();
|
|
7
8
|
log("✅ Deleted all rows from function_calls.");
|
package/dist/utils/fileTree.js
CHANGED
|
@@ -1,9 +1,11 @@
|
|
|
1
1
|
import fs from 'fs';
|
|
2
2
|
import path from 'path';
|
|
3
|
-
|
|
3
|
+
import { getIndexDir } from '../constants.js';
|
|
4
|
+
export function generateFocusedFileTree(focusPath, maxDepth = 2) {
|
|
4
5
|
const absoluteFocus = path.resolve(focusPath);
|
|
5
6
|
const parentDir = path.dirname(absoluteFocus);
|
|
6
|
-
const
|
|
7
|
+
const indexDir = getIndexDir();
|
|
8
|
+
const relativeTitle = path.relative(indexDir, parentDir).replace(/\\/g, '/');
|
|
7
9
|
const tree = generateFileTree(parentDir, maxDepth, absoluteFocus);
|
|
8
10
|
return `📂 ${relativeTitle || '.'}\n${tree}`;
|
|
9
11
|
}
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
// src/utils/normalizePath.ts
|
|
2
|
+
import path from "path";
|
|
2
3
|
/**
|
|
3
4
|
* Normalizes a path string for loose, fuzzy matching:
|
|
4
5
|
* - Lowercases
|
|
@@ -8,3 +9,11 @@
|
|
|
8
9
|
export function normalizePathForLooseMatch(p) {
|
|
9
10
|
return p.toLowerCase().replace(/[\\/]/g, '').replace(/\s+/g, '');
|
|
10
11
|
}
|
|
12
|
+
// Helper to normalize and resolve paths to a consistent format (forward slashes)
|
|
13
|
+
export function normalizePath(p) {
|
|
14
|
+
return path.resolve(p).replace(/\\/g, '/');
|
|
15
|
+
}
|
|
16
|
+
export function getRepoKeyForPath(pathToMatch, config) {
|
|
17
|
+
const norm = normalizePath(pathToMatch);
|
|
18
|
+
return Object.entries(config.repos).find(([, val]) => normalizePath(val.indexDir) === norm)?.[0] || null;
|
|
19
|
+
}
|