scai 0.1.17 → 0.1.19
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/commands/ChangeLogUpdateCmd.js +3 -3
- package/dist/commands/CommitSuggesterCmd.js +4 -4
- package/dist/commands/DaemonCmd.js +58 -0
- package/dist/commands/IndexCmd.js +44 -0
- package/dist/commands/ModulePipelineCmd.js +1 -1
- package/dist/commands/QueryCmd.js +20 -0
- package/dist/commands/RefactorCmd.js +5 -5
- package/dist/commands/ResetDbCmd.js +27 -0
- package/dist/commands/StopDaemonCmd.js +23 -0
- package/dist/commands/SummaryCmd.js +10 -6
- package/dist/commands/TestGenCmd.js +2 -2
- package/dist/config/IgnoredExtensions.js +25 -0
- package/dist/config/IgnoredPaths.js +21 -0
- package/dist/db/client.js +7 -0
- package/dist/db/fileIndex.js +51 -0
- package/dist/db/schema.js +18 -0
- package/dist/index.js +29 -7
- package/dist/lib/generate.js +9 -3
- package/dist/pipeline/modules/changeLogModule.js +17 -3
- package/dist/pipeline/modules/cleanupModule.js +3 -3
- package/dist/pipeline/modules/commentModule.js +3 -3
- package/dist/pipeline/modules/commitSuggesterModule.js +5 -5
- package/dist/pipeline/modules/generateTestsModule.js +6 -6
- package/dist/pipeline/modules/refactorModule.js +3 -3
- package/dist/pipeline/modules/summaryModule.js +25 -46
- package/dist/pipeline/runModulePipeline.js +4 -3
- package/dist/utils/detectFileType.js +52 -0
- package/dist/utils/shouldIgnoreFiles.js +6 -0
- package/package.json +3 -1
- package/dist/context/generateProjectContext.js +0 -104
- package/dist/context/loadProjectContext.js +0 -1
|
@@ -14,8 +14,8 @@ export async function handleChangelogUpdate() {
|
|
|
14
14
|
console.log("⚠️ No staged or unstaged changes to include in changelog.");
|
|
15
15
|
return;
|
|
16
16
|
}
|
|
17
|
-
const result = await runModulePipeline([changelogModule], {
|
|
18
|
-
if (!result.
|
|
17
|
+
const result = await runModulePipeline([changelogModule], { content: diff });
|
|
18
|
+
if (!result.content.trim()) {
|
|
19
19
|
console.log("✅ No significant changes for changelog.");
|
|
20
20
|
return;
|
|
21
21
|
}
|
|
@@ -29,7 +29,7 @@ export async function handleChangelogUpdate() {
|
|
|
29
29
|
console.log("📄 Creating new CHANGELOG.md");
|
|
30
30
|
}
|
|
31
31
|
const today = new Date().toISOString().split("T")[0];
|
|
32
|
-
const newEntry = `\n\n## ${today}\n\n${result.
|
|
32
|
+
const newEntry = `\n\n## ${today}\n\n${result.content}`;
|
|
33
33
|
await fs.writeFile(changelogPath, existing + newEntry, "utf-8");
|
|
34
34
|
console.log("📝 CHANGELOG.md updated.");
|
|
35
35
|
}
|
|
@@ -60,8 +60,8 @@ export async function suggestCommitMessage(options) {
|
|
|
60
60
|
console.log('⚠️ No staged changes to suggest a message for.');
|
|
61
61
|
return;
|
|
62
62
|
}
|
|
63
|
-
const
|
|
64
|
-
const suggestions =
|
|
63
|
+
const response = await commitSuggesterModule.run({ content: diff });
|
|
64
|
+
const suggestions = response.suggestions || [];
|
|
65
65
|
if (!suggestions.length) {
|
|
66
66
|
console.log('⚠️ No commit suggestions generated.');
|
|
67
67
|
return;
|
|
@@ -80,8 +80,8 @@ export async function suggestCommitMessage(options) {
|
|
|
80
80
|
const choice = await askUserToChoose(suggestions);
|
|
81
81
|
if (choice === 'regenerate') {
|
|
82
82
|
console.log('\n🔄 Regenerating suggestions...\n');
|
|
83
|
-
const
|
|
84
|
-
suggestions.splice(0, suggestions.length, ...(
|
|
83
|
+
const response = await commitSuggesterModule.run({ content: diff });
|
|
84
|
+
suggestions.splice(0, suggestions.length, ...(response.suggestions || []));
|
|
85
85
|
continue;
|
|
86
86
|
}
|
|
87
87
|
if (choice === 'custom') {
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
import { summaryModule } from '../pipeline/modules/summaryModule.js';
|
|
2
|
+
import { db } from '../db/client.js';
|
|
3
|
+
import fs from 'fs/promises';
|
|
4
|
+
import fsSync from 'fs';
|
|
5
|
+
import os from 'os';
|
|
6
|
+
import path from 'path';
|
|
7
|
+
const MAX_FILES = 500;
|
|
8
|
+
const DAEMON_DURATION_MINUTES = 10;
|
|
9
|
+
const DAEMON_INTERVAL_MINUTES = 30;
|
|
10
|
+
const PID_PATH = path.join(os.homedir(), '.scai/daemon.pid');
|
|
11
|
+
export async function runDaemonBatch() {
|
|
12
|
+
console.log('📥 Daemon batch: scanning for files to summarize...');
|
|
13
|
+
const rows = db.prepare(`
|
|
14
|
+
SELECT path, type FROM files
|
|
15
|
+
WHERE summary IS NULL OR summary = ''
|
|
16
|
+
ORDER BY last_modified DESC
|
|
17
|
+
LIMIT ?
|
|
18
|
+
`).all(MAX_FILES);
|
|
19
|
+
if (rows.length === 0) {
|
|
20
|
+
console.log('✅ No files left to summarize.');
|
|
21
|
+
return;
|
|
22
|
+
}
|
|
23
|
+
for (const row of rows) {
|
|
24
|
+
try {
|
|
25
|
+
const content = await fs.readFile(row.path, 'utf-8');
|
|
26
|
+
const result = await summaryModule.run({ content, filepath: row.path });
|
|
27
|
+
const summary = result?.summary?.trim() ? result.summary : null;
|
|
28
|
+
db.prepare(`UPDATE files SET summary = ?, indexed_at = datetime('now') WHERE path = ?`)
|
|
29
|
+
.run(summary, row.path);
|
|
30
|
+
console.log(`📝 Summarized: ${row.path}`);
|
|
31
|
+
}
|
|
32
|
+
catch (err) {
|
|
33
|
+
console.warn(`⚠️ Failed: ${row.path}`, err instanceof Error ? err.message : err);
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
export async function runDaemonScheduler() {
|
|
38
|
+
// Write PID to file
|
|
39
|
+
fsSync.mkdirSync(path.dirname(PID_PATH), { recursive: true });
|
|
40
|
+
fsSync.writeFileSync(PID_PATH, process.pid.toString(), 'utf-8');
|
|
41
|
+
console.log('🧠 Daemon started. PID:', process.pid);
|
|
42
|
+
console.log('⏱️ Will run every 30 minutes for 10 minutes.');
|
|
43
|
+
console.log('🧠 Background summarizer started. Will run every 30 minutes for 10 minutes.');
|
|
44
|
+
const startDaemonCycle = async () => {
|
|
45
|
+
const startTime = Date.now();
|
|
46
|
+
const endTime = startTime + DAEMON_DURATION_MINUTES * 60 * 1000;
|
|
47
|
+
while (Date.now() < endTime) {
|
|
48
|
+
await runDaemonBatch();
|
|
49
|
+
await new Promise(res => setTimeout(res, 60 * 1000)); // 1 min pause between mini-batches
|
|
50
|
+
}
|
|
51
|
+
console.log(`⏱️ Daemon completed 10-minute cycle. Next in ${DAEMON_INTERVAL_MINUTES} min.`);
|
|
52
|
+
};
|
|
53
|
+
// Repeat every 30 minutes
|
|
54
|
+
while (true) {
|
|
55
|
+
await startDaemonCycle();
|
|
56
|
+
await new Promise(res => setTimeout(res, DAEMON_INTERVAL_MINUTES * 60 * 1000));
|
|
57
|
+
}
|
|
58
|
+
}
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
import fg from 'fast-glob';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import { initSchema } from '../db/schema.js';
|
|
4
|
+
import { indexFile } from '../db/fileIndex.js';
|
|
5
|
+
import { shouldIgnoreFile } from '../utils/shouldIgnoreFiles.js';
|
|
6
|
+
import { detectFileType } from '../utils/detectFileType.js';
|
|
7
|
+
import { runDaemonScheduler } from './DaemonCmd.js';
|
|
8
|
+
import { IGNORED_FOLDER_GLOBS } from '../config/IgnoredPaths.js';
|
|
9
|
+
const IGNORE = [
|
|
10
|
+
'**/node_modules/**', '**/dist/**', '**/build/**',
|
|
11
|
+
'**/coverage/**', '**/.git/**', '**/*.test.*'
|
|
12
|
+
];
|
|
13
|
+
export async function runIndexCommand(targetDir = process.cwd(), options = {}) {
|
|
14
|
+
console.log(`📂 Indexing files in: ${targetDir}`);
|
|
15
|
+
initSchema();
|
|
16
|
+
const files = await fg('**/*.*', {
|
|
17
|
+
cwd: targetDir,
|
|
18
|
+
ignore: IGNORED_FOLDER_GLOBS,
|
|
19
|
+
absolute: true,
|
|
20
|
+
});
|
|
21
|
+
const countByExt = {};
|
|
22
|
+
let count = 0;
|
|
23
|
+
for (const file of files) {
|
|
24
|
+
if (shouldIgnoreFile(file))
|
|
25
|
+
continue;
|
|
26
|
+
try {
|
|
27
|
+
const type = detectFileType(file);
|
|
28
|
+
indexFile(file, null, type); // empty summary for now
|
|
29
|
+
const ext = path.extname(file);
|
|
30
|
+
countByExt[ext] = (countByExt[ext] || 0) + 1;
|
|
31
|
+
console.log(`📄 Indexed: ${path.relative(targetDir, file)}`);
|
|
32
|
+
count++;
|
|
33
|
+
}
|
|
34
|
+
catch (err) {
|
|
35
|
+
console.warn(`⚠️ Skipped ${file}:`, err instanceof Error ? err.message : err);
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
console.log('📊 Indexed files by extension:', countByExt);
|
|
39
|
+
console.log(`✅ Done. Indexed ${count} files.`);
|
|
40
|
+
if (options.detached) {
|
|
41
|
+
console.log('🚀 Starting summarizer daemon in background mode...');
|
|
42
|
+
runDaemonScheduler(); // Infinite loop every 30 min
|
|
43
|
+
}
|
|
44
|
+
}
|
|
@@ -16,7 +16,7 @@ export async function runModulePipelineFromCLI(file, options) {
|
|
|
16
16
|
console.error(`❌ Could not read file: ${file}`);
|
|
17
17
|
process.exit(1);
|
|
18
18
|
}
|
|
19
|
-
const input = {
|
|
19
|
+
const input = { content: fileContent, filepath: file };
|
|
20
20
|
// Retrieve modules from the registry
|
|
21
21
|
const resolvedModules = moduleNames.map((name) => {
|
|
22
22
|
const module = getModuleByName(name);
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import { searchFiles } from '../db/fileIndex.js';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
export async function runQueryCommand(query) {
|
|
4
|
+
if (!query) {
|
|
5
|
+
console.error('❌ Please provide a search query.\n👉 Usage: scai query "keyword"');
|
|
6
|
+
return;
|
|
7
|
+
}
|
|
8
|
+
console.log(`🔍 Searching for: "${query}"\n`);
|
|
9
|
+
const results = searchFiles(query);
|
|
10
|
+
if (results.length === 0) {
|
|
11
|
+
console.log('⚠️ No matching files found.');
|
|
12
|
+
return;
|
|
13
|
+
}
|
|
14
|
+
results.forEach((result, index) => {
|
|
15
|
+
console.log(`📄 [${index + 1}] ${path.relative(process.cwd(), result.path)}`);
|
|
16
|
+
console.log(` 📝 ${result.summary}`);
|
|
17
|
+
console.log();
|
|
18
|
+
});
|
|
19
|
+
console.log(`✅ Found ${results.length} result(s).`);
|
|
20
|
+
}
|
|
@@ -25,17 +25,17 @@ export async function handleRefactor(filepath, options = {}) {
|
|
|
25
25
|
return;
|
|
26
26
|
}
|
|
27
27
|
// Read source code
|
|
28
|
-
const
|
|
28
|
+
const content = await fs.readFile(filepath, 'utf-8');
|
|
29
29
|
// Run through pipeline modules
|
|
30
|
-
const
|
|
31
|
-
if (!
|
|
30
|
+
const response = await runModulePipeline([addCommentsModule, cleanupModule], { content });
|
|
31
|
+
if (!response.content.trim())
|
|
32
32
|
throw new Error('⚠️ Model returned empty result');
|
|
33
33
|
// Save refactored output
|
|
34
|
-
await fs.writeFile(refactoredPath,
|
|
34
|
+
await fs.writeFile(refactoredPath, response.content, 'utf-8');
|
|
35
35
|
console.log(`✅ Refactored code saved to: ${refactoredPath}`);
|
|
36
36
|
console.log(`ℹ️ Run again with '--apply' to overwrite the original.`);
|
|
37
37
|
}
|
|
38
38
|
catch (err) {
|
|
39
|
-
console.error('❌ Error in refactor command:', err.message);
|
|
39
|
+
console.error('❌ Error in refactor command:', err instanceof Error ? err.message : err);
|
|
40
40
|
}
|
|
41
41
|
}
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
import fs from 'fs';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import { db } from '../db/client.js';
|
|
4
|
+
export function resetDatabase() {
|
|
5
|
+
const dbPath = path.resolve(process.cwd(), '.scai/db.sqlite');
|
|
6
|
+
try {
|
|
7
|
+
db.close(); // 🔒 Make sure the DB is closed
|
|
8
|
+
console.log('🔒 Closed SQLite database connection.');
|
|
9
|
+
}
|
|
10
|
+
catch (err) {
|
|
11
|
+
console.warn('⚠️ Could not close database:', err);
|
|
12
|
+
}
|
|
13
|
+
if (fs.existsSync(dbPath)) {
|
|
14
|
+
try {
|
|
15
|
+
fs.unlinkSync(dbPath);
|
|
16
|
+
console.log('🧹 Deleted existing database.');
|
|
17
|
+
}
|
|
18
|
+
catch (err) {
|
|
19
|
+
console.error('❌ Failed to delete DB file:', err instanceof Error ? err.message : err);
|
|
20
|
+
return;
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
else {
|
|
24
|
+
console.log('ℹ️ No existing database found.');
|
|
25
|
+
}
|
|
26
|
+
console.log('✅ Database has been reset. You can now re-run: scai index');
|
|
27
|
+
}
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
import fs from 'fs';
|
|
2
|
+
import os from 'os';
|
|
3
|
+
import path from 'path';
|
|
4
|
+
const PID_PATH = path.join(os.homedir(), '.scai/daemon.pid');
|
|
5
|
+
export async function runStopDaemonCommand() {
|
|
6
|
+
if (!fs.existsSync(PID_PATH)) {
|
|
7
|
+
console.log('🛑 No daemon is currently running.');
|
|
8
|
+
return;
|
|
9
|
+
}
|
|
10
|
+
const pid = parseInt(fs.readFileSync(PID_PATH, 'utf-8'), 10);
|
|
11
|
+
if (isNaN(pid)) {
|
|
12
|
+
console.error('⚠️ Invalid PID file.');
|
|
13
|
+
return;
|
|
14
|
+
}
|
|
15
|
+
try {
|
|
16
|
+
process.kill(pid);
|
|
17
|
+
fs.unlinkSync(PID_PATH);
|
|
18
|
+
console.log(`✅ Daemon process ${pid} stopped.`);
|
|
19
|
+
}
|
|
20
|
+
catch (err) {
|
|
21
|
+
console.error(`❌ Failed to stop process ${pid}:`, err instanceof Error ? err.message : err);
|
|
22
|
+
}
|
|
23
|
+
}
|
|
@@ -3,10 +3,10 @@ import readline from 'readline';
|
|
|
3
3
|
import { summaryModule } from '../pipeline/modules/summaryModule.js'; // Import summaryModule
|
|
4
4
|
import { summarizeCode } from '../utils/summarizer.js'; // Import summarizeCode
|
|
5
5
|
export async function summarizeFile(filepath) {
|
|
6
|
-
let
|
|
6
|
+
let content = '';
|
|
7
7
|
if (filepath) {
|
|
8
8
|
try {
|
|
9
|
-
|
|
9
|
+
content = await fs.readFile(filepath, 'utf-8');
|
|
10
10
|
}
|
|
11
11
|
catch (err) {
|
|
12
12
|
console.error(`❌ Could not read or summarize ${filepath}:`, err.message);
|
|
@@ -24,14 +24,18 @@ export async function summarizeFile(filepath) {
|
|
|
24
24
|
terminal: false,
|
|
25
25
|
});
|
|
26
26
|
for await (const line of rl) {
|
|
27
|
-
|
|
27
|
+
content += line + '\n';
|
|
28
28
|
}
|
|
29
29
|
}
|
|
30
|
-
if (
|
|
30
|
+
if (content.trim()) {
|
|
31
31
|
// Call the summary module to get the raw summary
|
|
32
|
-
const
|
|
32
|
+
const response = await summaryModule.run({ content, filepath });
|
|
33
33
|
// Pass the summary text to the utility function for formatting
|
|
34
|
-
|
|
34
|
+
if (!response.summary) {
|
|
35
|
+
console.warn("No summary available.");
|
|
36
|
+
return;
|
|
37
|
+
}
|
|
38
|
+
const formattedSummary = summarizeCode(response.summary);
|
|
35
39
|
console.log(formattedSummary);
|
|
36
40
|
}
|
|
37
41
|
else {
|
|
@@ -4,8 +4,8 @@ import { cleanupModule } from '../pipeline/modules/cleanupModule.js';
|
|
|
4
4
|
import { runModulePipeline } from '../pipeline/runModulePipeline.js';
|
|
5
5
|
export async function generateTests(filepath) {
|
|
6
6
|
try {
|
|
7
|
-
const
|
|
8
|
-
const result = await runModulePipeline([generateTestsModule, cleanupModule], {
|
|
7
|
+
const content = await fs.readFile(filepath, 'utf-8');
|
|
8
|
+
const result = await runModulePipeline([generateTestsModule, cleanupModule], { content, filepath });
|
|
9
9
|
console.log('✅ Test generated and cleaned up.');
|
|
10
10
|
}
|
|
11
11
|
catch (err) {
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
// src/config/IgnoredExtensions.ts
|
|
2
|
+
export const IGNORED_EXTENSIONS = [
|
|
3
|
+
// 🖼 Media
|
|
4
|
+
'.png', '.jpg', '.jpeg', '.gif', '.webp', '.svg', '.ico',
|
|
5
|
+
'.mp4', '.mp3', '.mov', '.avi', '.mkv', '.flv', '.wav', '.flac',
|
|
6
|
+
// 📦 Archives & install packages
|
|
7
|
+
'.zip', '.tar', '.gz', '.bz2', '.xz', '.rar', '.7z',
|
|
8
|
+
'.jar', '.war', '.ear', // Java packaging
|
|
9
|
+
'.deb', '.rpm', '.pkg', '.msi', '.dmg', '.cab', '.xz',
|
|
10
|
+
// 🧱 Binaries & executables
|
|
11
|
+
'.exe', '.dll', '.bin', '.so', '.dylib', '.a', '.lib',
|
|
12
|
+
'.iso', '.img', '.elf', '.o', '.obj',
|
|
13
|
+
// 🧪 Runtime / build / cache
|
|
14
|
+
'.log', '.lock', '.tmp', '.map',
|
|
15
|
+
'.db', '.sqlite', '.pkl', '.sav', '.rdb', '.ldb',
|
|
16
|
+
'.pyc', '.class', '.tsbuildinfo', '.coverage',
|
|
17
|
+
// 🔤 Fonts & styles
|
|
18
|
+
'.woff', '.woff2', '.ttf', '.eot', '.otf', '.css.map',
|
|
19
|
+
// 🔐 Certs, keys, credentials
|
|
20
|
+
'.crt', '.key', '.pem', '.pub', '.asc', '.gpg',
|
|
21
|
+
// ♻️ Backups / temp
|
|
22
|
+
'.bak', '.old', '.swp', '.swo', '.tmp', '.orig',
|
|
23
|
+
// 🌐 Misc
|
|
24
|
+
'.torrent', '.DS_Store', '.env.local', '.env.production', '.env.development',
|
|
25
|
+
];
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
// List of folders and file globs to ignore during indexing and scanning.
|
|
2
|
+
// These patterns are used by fast-glob in all commands.
|
|
3
|
+
export const IGNORED_FOLDER_GLOBS = [
|
|
4
|
+
'**/node_modules/**',
|
|
5
|
+
'**/dist/**',
|
|
6
|
+
'**/build/**',
|
|
7
|
+
'**/target/**',
|
|
8
|
+
'**/coverage/**',
|
|
9
|
+
'**/.git/**',
|
|
10
|
+
'**/.next/**',
|
|
11
|
+
'**/.vercel/**',
|
|
12
|
+
'**/.idea/**',
|
|
13
|
+
'**/.vscode/**',
|
|
14
|
+
'**/__pycache__/**',
|
|
15
|
+
'**/.venv/**',
|
|
16
|
+
'**/env/**',
|
|
17
|
+
'**/.gradle/**',
|
|
18
|
+
'**/.output/**',
|
|
19
|
+
'**/tmp/**',
|
|
20
|
+
'**/*.test.*',
|
|
21
|
+
];
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
import Database from 'better-sqlite3';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import fs from 'fs';
|
|
4
|
+
const DB_PATH = path.resolve(process.cwd(), '.scai/db.sqlite');
|
|
5
|
+
// Ensure directory exists
|
|
6
|
+
fs.mkdirSync(path.dirname(DB_PATH), { recursive: true });
|
|
7
|
+
export const db = new Database(DB_PATH);
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
import { db } from './client.js';
|
|
2
|
+
import fs from 'fs';
|
|
3
|
+
export function indexFile(filePath, summary, type) {
|
|
4
|
+
const stats = fs.statSync(filePath);
|
|
5
|
+
const lastModified = stats.mtime.toISOString();
|
|
6
|
+
// 1) INSERT new rows (only when path not present)
|
|
7
|
+
const insertStmt = db.prepare(`
|
|
8
|
+
INSERT OR IGNORE INTO files
|
|
9
|
+
(path, summary, type, indexed_at, last_modified)
|
|
10
|
+
VALUES (?, ?, ?, datetime('now'), ?)
|
|
11
|
+
`);
|
|
12
|
+
insertStmt.run(filePath, summary, type, lastModified);
|
|
13
|
+
// 2) UPDATE metadata if file already existed and changed
|
|
14
|
+
const updateStmt = db.prepare(`
|
|
15
|
+
UPDATE files
|
|
16
|
+
SET type = ?,
|
|
17
|
+
last_modified = ?,
|
|
18
|
+
indexed_at = datetime('now')
|
|
19
|
+
WHERE path = ?
|
|
20
|
+
AND last_modified != ?
|
|
21
|
+
`);
|
|
22
|
+
updateStmt.run(type, lastModified, filePath, lastModified);
|
|
23
|
+
// Step 1: Delete from FTS where the path matches
|
|
24
|
+
db.prepare(`
|
|
25
|
+
DELETE FROM files_fts
|
|
26
|
+
WHERE rowid = (SELECT id FROM files WHERE path = ?)
|
|
27
|
+
`).run(filePath);
|
|
28
|
+
// Step 2: Insert into FTS with the same id
|
|
29
|
+
db.prepare(`
|
|
30
|
+
INSERT INTO files_fts(rowid, path, summary)
|
|
31
|
+
VALUES((SELECT id FROM files WHERE path = ?), ?, ?)
|
|
32
|
+
`).run(filePath, filePath, summary);
|
|
33
|
+
}
|
|
34
|
+
export function searchFiles(query, limit = 10) {
|
|
35
|
+
// Use FTS5 MATCH for relevance-ranked results
|
|
36
|
+
const stmt = db.prepare(`
|
|
37
|
+
SELECT f.path, f.summary, f.type, f.last_modified, f.indexed_at,
|
|
38
|
+
bm25(files_fts) AS rank
|
|
39
|
+
FROM files_fts
|
|
40
|
+
JOIN files f ON files_fts.rowid = f.id
|
|
41
|
+
WHERE files_fts MATCH ?
|
|
42
|
+
ORDER BY rank
|
|
43
|
+
LIMIT ?
|
|
44
|
+
`);
|
|
45
|
+
const matchQuery = query
|
|
46
|
+
.trim()
|
|
47
|
+
.split(/\s+/)
|
|
48
|
+
.map(token => `${token}*`) // prefix search
|
|
49
|
+
.join(' ');
|
|
50
|
+
return stmt.all(matchQuery, limit);
|
|
51
|
+
}
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import { db } from "./client.js";
|
|
2
|
+
export function initSchema() {
|
|
3
|
+
db.exec(`
|
|
4
|
+
CREATE TABLE IF NOT EXISTS files (
|
|
5
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
6
|
+
path TEXT UNIQUE,
|
|
7
|
+
summary TEXT,
|
|
8
|
+
type TEXT,
|
|
9
|
+
indexed_at TEXT,
|
|
10
|
+
last_modified TEXT
|
|
11
|
+
);
|
|
12
|
+
|
|
13
|
+
-- FTS5 table for fast full‑text search of summaries and paths
|
|
14
|
+
CREATE VIRTUAL TABLE IF NOT EXISTS files_fts
|
|
15
|
+
USING fts5(path, summary, content='');
|
|
16
|
+
`);
|
|
17
|
+
console.log('✅ SQLite schema initialized');
|
|
18
|
+
}
|
package/dist/index.js
CHANGED
|
@@ -15,7 +15,11 @@ import { ModelConfig } from './config/ModelConfig.js';
|
|
|
15
15
|
import { summarizeFile } from "./commands/SummaryCmd.js";
|
|
16
16
|
import { handleChangelogUpdate } from './commands/ChangeLogUpdateCmd.js';
|
|
17
17
|
import { runModulePipelineFromCLI } from './commands/ModulePipelineCmd.js';
|
|
18
|
-
import {
|
|
18
|
+
import { runIndexCommand } from './commands/IndexCmd.js';
|
|
19
|
+
import { resetDatabase } from './commands/ResetDbCmd.js';
|
|
20
|
+
import { runQueryCommand } from './commands/QueryCmd.js';
|
|
21
|
+
import { runDaemonBatch } from './commands/DaemonCmd.js';
|
|
22
|
+
import { runStopDaemonCommand } from "./commands/StopDaemonCmd.js";
|
|
19
23
|
// Create the CLI instance
|
|
20
24
|
const cmd = new Command('scai')
|
|
21
25
|
.version(version)
|
|
@@ -29,12 +33,6 @@ cmd
|
|
|
29
33
|
await bootstrap();
|
|
30
34
|
console.log('✅ Model initialization completed!');
|
|
31
35
|
});
|
|
32
|
-
cmd
|
|
33
|
-
.command('context')
|
|
34
|
-
.description('Generate a summary-based context map of your project')
|
|
35
|
-
.action(async () => {
|
|
36
|
-
await generateProjectContext(); // Your new scanner module
|
|
37
|
-
});
|
|
38
36
|
cmd
|
|
39
37
|
.command('sugg')
|
|
40
38
|
.description('Suggest a commit message from staged changes')
|
|
@@ -73,6 +71,30 @@ cmd
|
|
|
73
71
|
.action(() => {
|
|
74
72
|
ModelConfig.logCurrentConfig();
|
|
75
73
|
});
|
|
74
|
+
cmd
|
|
75
|
+
.command('daemon')
|
|
76
|
+
.description('Run background summarization of indexed files')
|
|
77
|
+
.action(runDaemonBatch);
|
|
78
|
+
cmd
|
|
79
|
+
.command('stop-daemon')
|
|
80
|
+
.description('Stop the background summarizer daemon')
|
|
81
|
+
.action(runStopDaemonCommand);
|
|
82
|
+
cmd
|
|
83
|
+
.command('index [targetDir]')
|
|
84
|
+
.description('Index supported files in the given directory (or current folder if none)')
|
|
85
|
+
.option('-d, --detached', 'Run summarizer daemon after indexing')
|
|
86
|
+
.action((targetDir, options) => {
|
|
87
|
+
runIndexCommand(targetDir, { detached: options.detached });
|
|
88
|
+
});
|
|
89
|
+
cmd
|
|
90
|
+
.command('query <query>')
|
|
91
|
+
.description('Search indexed files by keyword')
|
|
92
|
+
.action(runQueryCommand);
|
|
93
|
+
cmd
|
|
94
|
+
.command('reset-db')
|
|
95
|
+
.description('Delete and reset the SQLite database')
|
|
96
|
+
.action(() => resetDatabase());
|
|
97
|
+
// Default
|
|
76
98
|
cmd
|
|
77
99
|
.arguments('<file>')
|
|
78
100
|
.option('-m, --modules <modules>', 'Comma-separated list of modules to run (e.g., comments,cleanup,summary)')
|
package/dist/lib/generate.js
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
|
+
// File: lib/generate.ts
|
|
1
2
|
import ora from 'ora';
|
|
2
|
-
export async function generate(
|
|
3
|
+
export async function generate(input, model) {
|
|
3
4
|
const spinner = ora(`🧠 Thinking with ${model}...`).start();
|
|
4
5
|
try {
|
|
5
6
|
const res = await fetch('http://localhost:11434/api/generate', {
|
|
@@ -7,16 +8,21 @@ export async function generate(prompt, model) {
|
|
|
7
8
|
headers: { 'Content-Type': 'application/json' },
|
|
8
9
|
body: JSON.stringify({
|
|
9
10
|
model,
|
|
10
|
-
prompt,
|
|
11
|
+
prompt: input.content,
|
|
11
12
|
stream: false,
|
|
12
13
|
}),
|
|
13
14
|
});
|
|
14
15
|
const data = await res.json();
|
|
15
16
|
spinner.succeed('✅ Model response received.');
|
|
16
|
-
|
|
17
|
+
process.stdout.write('\n'); // ✅ Prevents terminal suppression bug
|
|
18
|
+
return {
|
|
19
|
+
content: data.response?.trim() ?? '',
|
|
20
|
+
filepath: input.filepath,
|
|
21
|
+
};
|
|
17
22
|
}
|
|
18
23
|
catch (err) {
|
|
19
24
|
spinner.fail('❌ Model request failed.');
|
|
25
|
+
process.stdout.write('\n'); // In case of error flush output too
|
|
20
26
|
throw err;
|
|
21
27
|
}
|
|
22
28
|
}
|
|
@@ -9,13 +9,27 @@ export const changelogModule = {
|
|
|
9
9
|
You're an experienced changelog writer. Based on this Git diff, write a markdown bullet-point entry suitable for CHANGELOG.md:
|
|
10
10
|
|
|
11
11
|
--- DIFF START ---
|
|
12
|
-
${input.
|
|
12
|
+
${input.content}
|
|
13
13
|
--- DIFF END ---
|
|
14
14
|
|
|
15
15
|
✅ If the changes are significant, return a changelog entry.
|
|
16
16
|
❌ If not, return ONLY: "NO UPDATE".
|
|
17
17
|
`.trim();
|
|
18
|
-
const
|
|
19
|
-
|
|
18
|
+
const response = await generate({ content: prompt }, model);
|
|
19
|
+
const summary = response?.summary?.trim();
|
|
20
|
+
if (!summary || summary === 'NO UPDATE') {
|
|
21
|
+
// Return an empty summary and empty suggestions if there is no update.
|
|
22
|
+
return { content: response.content,
|
|
23
|
+
summary,
|
|
24
|
+
suggestions: response?.suggestions ?? [],
|
|
25
|
+
filepath: input.filepath };
|
|
26
|
+
}
|
|
27
|
+
// Return the actual changelog summary and any suggestions
|
|
28
|
+
return {
|
|
29
|
+
content: response.content,
|
|
30
|
+
summary,
|
|
31
|
+
suggestions: response?.suggestions ?? [],
|
|
32
|
+
filepath: input.filepath,
|
|
33
|
+
};
|
|
20
34
|
},
|
|
21
35
|
};
|
|
@@ -15,8 +15,8 @@ function isNaturalLanguageNoise(line) {
|
|
|
15
15
|
export const cleanupModule = {
|
|
16
16
|
name: 'cleanup',
|
|
17
17
|
description: 'Remove markdown fences and natural language noise from top/bottom of code',
|
|
18
|
-
async run({
|
|
19
|
-
let lines =
|
|
18
|
+
async run({ content }) {
|
|
19
|
+
let lines = content.trim().split('\n');
|
|
20
20
|
// ───── Clean top ─────
|
|
21
21
|
while (lines.length) {
|
|
22
22
|
const line = lines[0].trim();
|
|
@@ -37,6 +37,6 @@ export const cleanupModule = {
|
|
|
37
37
|
break;
|
|
38
38
|
}
|
|
39
39
|
}
|
|
40
|
-
return {
|
|
40
|
+
return { content: lines.join('\n').trim() };
|
|
41
41
|
}
|
|
42
42
|
};
|
|
@@ -19,10 +19,10 @@ Your task is to add clear and insightful single-line comments to the code.
|
|
|
19
19
|
- The code should be valid ${lang.toUpperCase()} after your changes.
|
|
20
20
|
|
|
21
21
|
--- CODE START ---
|
|
22
|
-
${input.
|
|
22
|
+
${input.content}
|
|
23
23
|
--- CODE END ---
|
|
24
24
|
`.trim();
|
|
25
|
-
const
|
|
26
|
-
return {
|
|
25
|
+
const response = await generate({ content: prompt }, model);
|
|
26
|
+
return { content: response.content === 'NO UPDATE' ? '' : response.content };
|
|
27
27
|
},
|
|
28
28
|
};
|
|
@@ -3,7 +3,7 @@ import { ModelConfig } from '../../config/ModelConfig.js';
|
|
|
3
3
|
export const commitSuggesterModule = {
|
|
4
4
|
name: 'commitSuggester',
|
|
5
5
|
description: 'Suggests conventional commit messages from Git diff',
|
|
6
|
-
async run({
|
|
6
|
+
async run({ content }) {
|
|
7
7
|
const model = ModelConfig.getModel();
|
|
8
8
|
const prompt = `
|
|
9
9
|
Suggest ALWAYS 3 concise, conventional Git commit messages based on the input code diff.
|
|
@@ -14,16 +14,16 @@ Use this format ONLY:
|
|
|
14
14
|
3. refactor: ...
|
|
15
15
|
|
|
16
16
|
Here is the diff:
|
|
17
|
-
${
|
|
17
|
+
${content}
|
|
18
18
|
`.trim();
|
|
19
|
-
const
|
|
20
|
-
const lines =
|
|
19
|
+
const response = await generate({ content: prompt }, model);
|
|
20
|
+
const lines = response.content
|
|
21
21
|
.split('\n')
|
|
22
22
|
.map(line => line.trim())
|
|
23
23
|
.filter(line => /^\d+\.\s+/.test(line));
|
|
24
24
|
const suggestions = lines.map(line => line.replace(/^\d+\.\s+/, '').replace(/^"(.*)"$/, '$1').trim());
|
|
25
25
|
return {
|
|
26
|
-
|
|
26
|
+
content,
|
|
27
27
|
suggestions
|
|
28
28
|
};
|
|
29
29
|
}
|
|
@@ -6,7 +6,7 @@ import { generate } from '../../lib/generate.js';
|
|
|
6
6
|
export const generateTestsModule = {
|
|
7
7
|
name: 'generateTests',
|
|
8
8
|
description: 'Generate a Jest test file for the class/module',
|
|
9
|
-
async run({
|
|
9
|
+
async run({ content, filepath }) {
|
|
10
10
|
const model = ModelConfig.getModel();
|
|
11
11
|
const lang = ModelConfig.getLanguage();
|
|
12
12
|
if (!filepath)
|
|
@@ -21,16 +21,16 @@ Guidelines:
|
|
|
21
21
|
- Only return valid TypeScript code
|
|
22
22
|
|
|
23
23
|
--- CODE START ---
|
|
24
|
-
${
|
|
24
|
+
${content}
|
|
25
25
|
--- CODE END ---
|
|
26
26
|
`.trim();
|
|
27
|
-
const
|
|
28
|
-
if (!
|
|
27
|
+
const response = await generate({ content: prompt }, model);
|
|
28
|
+
if (!response)
|
|
29
29
|
throw new Error('⚠️ No test code returned from model');
|
|
30
30
|
const { dir, name } = path.parse(filepath);
|
|
31
31
|
const testPath = path.join(dir, `${name}.test.ts`);
|
|
32
|
-
await fs.writeFile(testPath,
|
|
32
|
+
await fs.writeFile(testPath, response.content, 'utf-8');
|
|
33
33
|
console.log(`✅ Test file saved to: ${testPath}`);
|
|
34
|
-
return {
|
|
34
|
+
return { content, filepath }; // unchanged input
|
|
35
35
|
}
|
|
36
36
|
};
|
|
@@ -16,13 +16,13 @@ Refactor the following code:
|
|
|
16
16
|
- Output the full, valid ${lang.toUpperCase()} code
|
|
17
17
|
|
|
18
18
|
--- CODE START ---
|
|
19
|
-
${input.
|
|
19
|
+
${input.content}
|
|
20
20
|
--- CODE END ---
|
|
21
21
|
`.trim();
|
|
22
|
-
const response = await generate(prompt, model);
|
|
22
|
+
const response = await generate({ content: prompt }, model);
|
|
23
23
|
if (!response) {
|
|
24
24
|
throw new Error('❌ Model returned empty response for refactoring.');
|
|
25
25
|
}
|
|
26
|
-
return {
|
|
26
|
+
return { content: response.content };
|
|
27
27
|
}
|
|
28
28
|
};
|
|
@@ -1,64 +1,43 @@
|
|
|
1
1
|
import { ModelConfig } from '../../config/ModelConfig.js';
|
|
2
2
|
import { generate } from '../../lib/generate.js';
|
|
3
|
-
import fs from 'fs/promises';
|
|
4
3
|
import path from 'path';
|
|
5
4
|
export const summaryModule = {
|
|
6
5
|
name: 'summary',
|
|
7
|
-
description: '
|
|
8
|
-
async
|
|
6
|
+
description: 'Generates a general summary of any file content.',
|
|
7
|
+
run: async ({ content, filepath }) => {
|
|
9
8
|
const model = ModelConfig.getModel();
|
|
10
|
-
const
|
|
11
|
-
|
|
12
|
-
//
|
|
13
|
-
try {
|
|
14
|
-
const raw = await fs.readFile('./.scai/context.flat.json', 'utf-8');
|
|
15
|
-
const flatContext = JSON.parse(raw);
|
|
16
|
-
if (filepath) {
|
|
17
|
-
const dir = path.dirname(filepath).replace(/\\/g, '/'); // Normalize slashes
|
|
18
|
-
console.log("Dir: ", dir);
|
|
19
|
-
const contextSubset = Object.entries(flatContext)
|
|
20
|
-
.filter(([file]) => file.startsWith(dir))
|
|
21
|
-
.slice(0, 10); // limit if needed
|
|
22
|
-
if (contextSubset.length) {
|
|
23
|
-
contextString = '📁 Local Context:\n' + contextSubset
|
|
24
|
-
.map(([file, summary]) => `- ${file}: ${summary}`)
|
|
25
|
-
.join('\n');
|
|
26
|
-
console.log("Context string input to prompt: ", contextString);
|
|
27
|
-
}
|
|
28
|
-
}
|
|
29
|
-
}
|
|
30
|
-
catch (err) {
|
|
31
|
-
console.warn('⚠️ Context file not found or failed to parse.');
|
|
32
|
-
}
|
|
9
|
+
const ext = filepath ? path.extname(filepath).toLowerCase() : '';
|
|
10
|
+
const filename = filepath ? path.basename(filepath) : '';
|
|
11
|
+
// More neutral prompt for general-purpose content
|
|
33
12
|
const prompt = `
|
|
34
|
-
You are
|
|
35
|
-
|
|
36
|
-
Project Overview:
|
|
37
|
-
${contextString ? contextString + '\n\n' : ''}
|
|
13
|
+
You are an assistant specialized in summarizing files.
|
|
38
14
|
|
|
39
|
-
|
|
15
|
+
Your task is to summarize the contents of the following file as clearly and concisely as possible.
|
|
40
16
|
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
// - [Main features or components]
|
|
44
|
-
// - [Any interesting logic or patterns]
|
|
17
|
+
File: ${filename}
|
|
18
|
+
Extension: ${ext}
|
|
45
19
|
|
|
46
|
-
|
|
47
|
-
-
|
|
48
|
-
-
|
|
20
|
+
📋 Instructions:
|
|
21
|
+
- Identify the main topic and purpose of the file
|
|
22
|
+
- Summarize key content and sections
|
|
23
|
+
- Mention any technical, legal, or structural info if relevant
|
|
24
|
+
- Do NOT include the raw content or repeat lines from it
|
|
25
|
+
- Return a human-readable bullet-point summary
|
|
49
26
|
|
|
50
|
-
---
|
|
51
|
-
${
|
|
52
|
-
---
|
|
27
|
+
--- FILE CONTENT START ---
|
|
28
|
+
${content}
|
|
29
|
+
--- FILE CONTENT END ---
|
|
53
30
|
`.trim();
|
|
54
|
-
const
|
|
55
|
-
if (
|
|
56
|
-
|
|
57
|
-
console.log(
|
|
31
|
+
const response = await generate({ content, filepath }, model);
|
|
32
|
+
if (response.content) {
|
|
33
|
+
response.summary = response.content;
|
|
34
|
+
console.log('\n📝 Summary:\n');
|
|
35
|
+
console.log(response.summary);
|
|
58
36
|
}
|
|
59
37
|
else {
|
|
60
38
|
console.warn('⚠️ No summary generated.');
|
|
39
|
+
response.summary = '⚠️ No summary generated.';
|
|
61
40
|
}
|
|
62
|
-
return
|
|
41
|
+
return response;
|
|
63
42
|
}
|
|
64
43
|
};
|
|
@@ -5,12 +5,13 @@ export async function runModulePipeline(modules, input) {
|
|
|
5
5
|
if (isDebug) {
|
|
6
6
|
console.log('Input: ', input);
|
|
7
7
|
}
|
|
8
|
+
let response = { content: '' };
|
|
8
9
|
for (const mod of modules) {
|
|
9
10
|
try {
|
|
10
|
-
|
|
11
|
+
response = await mod.run(current);
|
|
11
12
|
if (isDebug) {
|
|
12
13
|
console.log(`⚙️ Running: ${mod.name}`);
|
|
13
|
-
console.log("Current: ",
|
|
14
|
+
console.log("Current: ", response.content);
|
|
14
15
|
}
|
|
15
16
|
}
|
|
16
17
|
catch (error) {
|
|
@@ -19,5 +20,5 @@ export async function runModulePipeline(modules, input) {
|
|
|
19
20
|
}
|
|
20
21
|
}
|
|
21
22
|
// Return the output, assuming 'code' holds the relevant transformed content
|
|
22
|
-
return
|
|
23
|
+
return response; // Ensure the return type matches PromptOutput
|
|
23
24
|
}
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
import path from 'path';
|
|
2
|
+
export function detectFileType(filepath) {
|
|
3
|
+
const ext = path.extname(filepath).toLowerCase();
|
|
4
|
+
const map = {
|
|
5
|
+
// Programming languages
|
|
6
|
+
'.ts': 'typescript',
|
|
7
|
+
'.tsx': 'typescript',
|
|
8
|
+
'.js': 'javascript',
|
|
9
|
+
'.jsx': 'javascript',
|
|
10
|
+
'.java': 'java',
|
|
11
|
+
'.py': 'python',
|
|
12
|
+
'.rb': 'ruby',
|
|
13
|
+
'.php': 'php',
|
|
14
|
+
'.go': 'go',
|
|
15
|
+
'.rs': 'rust',
|
|
16
|
+
'.c': 'c',
|
|
17
|
+
'.cpp': 'cpp',
|
|
18
|
+
'.cs': 'csharp',
|
|
19
|
+
'.swift': 'swift',
|
|
20
|
+
'.kt': 'kotlin',
|
|
21
|
+
'.scala': 'scala',
|
|
22
|
+
// Markup & docs
|
|
23
|
+
'.md': 'markdown',
|
|
24
|
+
'.html': 'html',
|
|
25
|
+
'.htm': 'html',
|
|
26
|
+
'.xml': 'xml',
|
|
27
|
+
'.json': 'json',
|
|
28
|
+
'.yaml': 'yaml',
|
|
29
|
+
'.yml': 'yaml',
|
|
30
|
+
// Configs
|
|
31
|
+
'.ini': 'config',
|
|
32
|
+
'.toml': 'config',
|
|
33
|
+
'.env': 'config',
|
|
34
|
+
// Data
|
|
35
|
+
'.sql': 'sql',
|
|
36
|
+
'.csv': 'csv',
|
|
37
|
+
'.tsv': 'tsv',
|
|
38
|
+
// Text & writing
|
|
39
|
+
'.txt': 'text',
|
|
40
|
+
'.log': 'log',
|
|
41
|
+
'.rst': 'text',
|
|
42
|
+
// Office
|
|
43
|
+
'.doc': 'word',
|
|
44
|
+
'.docx': 'word',
|
|
45
|
+
'.pdf': 'pdf',
|
|
46
|
+
'.ppt': 'powerpoint',
|
|
47
|
+
'.pptx': 'powerpoint',
|
|
48
|
+
'.xls': 'excel',
|
|
49
|
+
'.xlsx': 'excel',
|
|
50
|
+
};
|
|
51
|
+
return map[ext] || ext.replace('.', '') || 'unknown';
|
|
52
|
+
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "scai",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.19",
|
|
4
4
|
"type": "module",
|
|
5
5
|
"bin": {
|
|
6
6
|
"scai": "./dist/index.js"
|
|
@@ -24,11 +24,13 @@
|
|
|
24
24
|
"start": "node dist/index.js"
|
|
25
25
|
},
|
|
26
26
|
"dependencies": {
|
|
27
|
+
"better-sqlite3": "^12.1.1",
|
|
27
28
|
"commander": "^11.0.0",
|
|
28
29
|
"fast-glob": "^3.3.3",
|
|
29
30
|
"ora": "^8.2.0"
|
|
30
31
|
},
|
|
31
32
|
"devDependencies": {
|
|
33
|
+
"@types/better-sqlite3": "^7.6.13",
|
|
32
34
|
"@types/jest": "^30.0.0",
|
|
33
35
|
"@types/node": "^24.0.1",
|
|
34
36
|
"jest": "^30.0.2",
|
|
@@ -1,104 +0,0 @@
|
|
|
1
|
-
import fs from 'fs/promises';
|
|
2
|
-
import path from 'path';
|
|
3
|
-
import fg from 'fast-glob';
|
|
4
|
-
import { posix as pathPosix } from 'path';
|
|
5
|
-
import { ModelConfig } from '../config/ModelConfig.js';
|
|
6
|
-
const IGNORE = ['node_modules', 'dist', 'build', 'coverage', '.git', '**/*.test.*'];
|
|
7
|
-
const EXTENSIONS_BY_LANG = {
|
|
8
|
-
ts: ['.ts', '.tsx'],
|
|
9
|
-
js: ['.js', '.jsx'],
|
|
10
|
-
java: ['.java'],
|
|
11
|
-
rust: ['.rs'],
|
|
12
|
-
python: ['.py'],
|
|
13
|
-
};
|
|
14
|
-
function getSummary(filename, lang) {
|
|
15
|
-
const base = path.basename(filename).toLowerCase();
|
|
16
|
-
const ext = path.extname(base);
|
|
17
|
-
if (base === 'package.json')
|
|
18
|
-
return 'Defines project metadata and dependencies.';
|
|
19
|
-
if (base === 'tsconfig.json')
|
|
20
|
-
return 'TypeScript compiler settings.';
|
|
21
|
-
if (base === 'pyproject.toml')
|
|
22
|
-
return 'Python build and dependency configuration.';
|
|
23
|
-
if (base === 'Cargo.toml')
|
|
24
|
-
return 'Rust project configuration.';
|
|
25
|
-
if (base === 'pom.xml')
|
|
26
|
-
return 'Maven config for a Java project.';
|
|
27
|
-
if (base === 'README.md')
|
|
28
|
-
return 'Project documentation.';
|
|
29
|
-
if (base.startsWith('index'))
|
|
30
|
-
return 'Entry point module.';
|
|
31
|
-
if (lang === 'ts' || lang === 'js') {
|
|
32
|
-
if (base.includes('service'))
|
|
33
|
-
return 'Service logic module.';
|
|
34
|
-
if (base.includes('util'))
|
|
35
|
-
return 'Utility/helper module.';
|
|
36
|
-
if (base.includes('controller'))
|
|
37
|
-
return 'Handles request/response logic.';
|
|
38
|
-
if (base.includes('router'))
|
|
39
|
-
return 'Routing definitions.';
|
|
40
|
-
}
|
|
41
|
-
if (lang === 'java') {
|
|
42
|
-
if (base.includes('controller'))
|
|
43
|
-
return 'Java controller class.';
|
|
44
|
-
if (base.includes('service'))
|
|
45
|
-
return 'Business logic in Java.';
|
|
46
|
-
}
|
|
47
|
-
if (lang === 'python') {
|
|
48
|
-
if (base.includes('main'))
|
|
49
|
-
return 'Main execution script.';
|
|
50
|
-
if (base.includes('config'))
|
|
51
|
-
return 'Configuration or settings.';
|
|
52
|
-
}
|
|
53
|
-
if (lang === 'rust') {
|
|
54
|
-
if (base === 'main.rs')
|
|
55
|
-
return 'Main Rust binary entry point.';
|
|
56
|
-
if (base === 'lib.rs')
|
|
57
|
-
return 'Rust library root module.';
|
|
58
|
-
}
|
|
59
|
-
return `Generic ${ext.replace('.', '')} file.`;
|
|
60
|
-
}
|
|
61
|
-
function insertNested(tree, parts, summary) {
|
|
62
|
-
const name = parts.pop();
|
|
63
|
-
let curr = tree;
|
|
64
|
-
for (const dir of parts) {
|
|
65
|
-
if (!curr[dir])
|
|
66
|
-
curr[dir] = {};
|
|
67
|
-
curr = curr[dir];
|
|
68
|
-
}
|
|
69
|
-
curr[name] = summary;
|
|
70
|
-
}
|
|
71
|
-
export async function generateProjectContext(root = process.cwd()) {
|
|
72
|
-
const lang = ModelConfig.getLanguage();
|
|
73
|
-
const extensions = EXTENSIONS_BY_LANG[lang];
|
|
74
|
-
const patterns = extensions.map((ext) => `**/*${ext}`);
|
|
75
|
-
// Add language-relevant config files
|
|
76
|
-
if (lang === 'ts')
|
|
77
|
-
patterns.push('tsconfig.json', 'package.json');
|
|
78
|
-
if (lang === 'js')
|
|
79
|
-
patterns.push('package.json');
|
|
80
|
-
if (lang === 'java')
|
|
81
|
-
patterns.push('pom.xml');
|
|
82
|
-
if (lang === 'python')
|
|
83
|
-
patterns.push('pyproject.toml', '*.py');
|
|
84
|
-
if (lang === 'rust')
|
|
85
|
-
patterns.push('Cargo.toml', '*.rs');
|
|
86
|
-
const files = await fg(patterns, {
|
|
87
|
-
cwd: root,
|
|
88
|
-
ignore: IGNORE,
|
|
89
|
-
});
|
|
90
|
-
const flat = {};
|
|
91
|
-
const tree = {};
|
|
92
|
-
for (const file of files) {
|
|
93
|
-
const summary = getSummary(file, lang);
|
|
94
|
-
flat[file] = summary;
|
|
95
|
-
insertNested(tree, pathPosix.normalize(file).split('/'), summary);
|
|
96
|
-
}
|
|
97
|
-
await fs.mkdir('.scai', { recursive: true });
|
|
98
|
-
await fs.writeFile('.scai/context.flat.json', JSON.stringify(flat, null, 2));
|
|
99
|
-
await fs.writeFile('.scai/context.tree.json', JSON.stringify(tree, null, 2));
|
|
100
|
-
console.log(`✅ Context generated:
|
|
101
|
-
- .scai/context.flat.json
|
|
102
|
-
- .scai/context.tree.json
|
|
103
|
-
- Language used: ${lang}`);
|
|
104
|
-
}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
"use strict";
|