scai 0.1.40 ā 0.1.42
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/commands/AskCmd.js +62 -27
- package/dist/constants.js +5 -0
- package/dist/daemon/daemonBatch.js +34 -24
- package/dist/db/schema.js +2 -1
- package/dist/utils/buildContextualPrompt.js +20 -0
- package/dist/utils/fileTree.js +30 -0
- package/package.json +1 -1
package/dist/commands/AskCmd.js
CHANGED
|
@@ -1,9 +1,16 @@
|
|
|
1
|
-
import
|
|
1
|
+
import fs from 'fs';
|
|
2
2
|
import path from 'path';
|
|
3
|
-
import
|
|
4
|
-
import {
|
|
5
|
-
import {
|
|
3
|
+
import readline from 'readline';
|
|
4
|
+
import { searchFiles, queryFiles } from '../db/fileIndex.js';
|
|
5
|
+
import { sanitizeQueryForFts } from '../utils/sanitizeQuery.js';
|
|
6
|
+
import { generate } from '../lib/generate.js';
|
|
7
|
+
import { buildContextualPrompt } from '../utils/buildContextualPrompt.js';
|
|
8
|
+
import { generateFileTree } from '../utils/fileTree.js';
|
|
9
|
+
import { log } from '../utils/log.js';
|
|
10
|
+
import { PROMPT_LOG_PATH, SCAI_HOME, INDEX_DIR } from '../constants.js';
|
|
11
|
+
const MAX_RELATED_FILES = 5;
|
|
6
12
|
export async function runAskCommand(query) {
|
|
13
|
+
// š§ Prompt the user if no query is passed
|
|
7
14
|
if (!query) {
|
|
8
15
|
query = await promptOnce('š§ Ask your question:\n> ');
|
|
9
16
|
}
|
|
@@ -12,21 +19,32 @@ export async function runAskCommand(query) {
|
|
|
12
19
|
console.error('ā No question provided.\nš Usage: scai ask "your question"');
|
|
13
20
|
return;
|
|
14
21
|
}
|
|
22
|
+
console.log(`š Using index root: ${INDEX_DIR}`);
|
|
15
23
|
console.log(`š Searching for: "${query}"\n`);
|
|
24
|
+
// š§ Step 1: Semantic + fallback search
|
|
16
25
|
const start = Date.now();
|
|
17
|
-
const semanticResults = await searchFiles(query,
|
|
26
|
+
const semanticResults = await searchFiles(query, MAX_RELATED_FILES);
|
|
18
27
|
const duration = Date.now() - start;
|
|
19
28
|
console.log(`ā±ļø searchFiles took ${duration}ms and returned ${semanticResults.length} result(s)`);
|
|
20
|
-
//
|
|
29
|
+
// š Log raw semantic results
|
|
30
|
+
console.log('š Raw semantic search results:');
|
|
31
|
+
semanticResults.forEach((file, i) => {
|
|
32
|
+
console.log(` ${i + 1}. š Path: ${file.path} | Score: ${file.score?.toFixed(3) ?? 'n/a'}`);
|
|
33
|
+
});
|
|
21
34
|
const safeQuery = sanitizeQueryForFts(query);
|
|
22
35
|
const fallbackResults = queryFiles(safeQuery, 10);
|
|
23
|
-
//
|
|
36
|
+
// š Log raw keyword fallback results
|
|
37
|
+
console.log('\nš Raw fallback keyword (FTS) search results:');
|
|
38
|
+
fallbackResults.forEach((file, i) => {
|
|
39
|
+
console.log(` ${i + 1}. š Path: ${file.path}`);
|
|
40
|
+
});
|
|
41
|
+
// š§ Step 2: Merge results
|
|
24
42
|
const seen = new Set();
|
|
25
43
|
const combinedResults = [];
|
|
26
44
|
for (const file of semanticResults) {
|
|
27
45
|
const resolved = path.resolve(file.path);
|
|
28
46
|
seen.add(resolved);
|
|
29
|
-
combinedResults.push(file);
|
|
47
|
+
combinedResults.push(file);
|
|
30
48
|
}
|
|
31
49
|
for (const file of fallbackResults) {
|
|
32
50
|
const resolved = path.resolve(file.path);
|
|
@@ -35,7 +53,7 @@ export async function runAskCommand(query) {
|
|
|
35
53
|
combinedResults.push({
|
|
36
54
|
path: file.path,
|
|
37
55
|
summary: file.summary,
|
|
38
|
-
score: 0.0,
|
|
56
|
+
score: 0.0,
|
|
39
57
|
});
|
|
40
58
|
}
|
|
41
59
|
}
|
|
@@ -46,26 +64,43 @@ export async function runAskCommand(query) {
|
|
|
46
64
|
});
|
|
47
65
|
}
|
|
48
66
|
else {
|
|
49
|
-
console.log('ā ļø No similar files found. Asking the model
|
|
67
|
+
console.log('ā ļø No similar files found. Asking the model using question only...');
|
|
50
68
|
}
|
|
51
|
-
//
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
69
|
+
// š§ Step 3: Build metadata for prompt
|
|
70
|
+
const relatedFiles = combinedResults.slice(0, MAX_RELATED_FILES).map(file => ({
|
|
71
|
+
path: file.path,
|
|
72
|
+
summary: file.summary || '(No summary available)',
|
|
73
|
+
}));
|
|
74
|
+
let fileTree = '';
|
|
75
|
+
try {
|
|
76
|
+
fileTree = generateFileTree(INDEX_DIR, 2); // Limit depth
|
|
77
|
+
}
|
|
78
|
+
catch (e) {
|
|
79
|
+
console.warn('ā ļø Failed to generate file tree:', e);
|
|
80
|
+
}
|
|
81
|
+
const prompt = buildContextualPrompt({
|
|
82
|
+
baseInstruction: query,
|
|
83
|
+
code: '', // No specific code selected
|
|
84
|
+
relatedFiles,
|
|
85
|
+
projectFileTree: fileTree || undefined,
|
|
86
|
+
});
|
|
87
|
+
// š§ Step 4: Log prompt to file
|
|
88
|
+
try {
|
|
89
|
+
if (!fs.existsSync(SCAI_HOME))
|
|
90
|
+
fs.mkdirSync(SCAI_HOME, { recursive: true });
|
|
91
|
+
fs.writeFileSync(PROMPT_LOG_PATH, prompt, 'utf-8');
|
|
92
|
+
log(`š Prompt saved to ${PROMPT_LOG_PATH}`);
|
|
93
|
+
}
|
|
94
|
+
catch (err) {
|
|
95
|
+
log('ā Failed to write prompt log:', err);
|
|
60
96
|
}
|
|
61
|
-
|
|
62
|
-
content: allSummaries ? `${query}\n\n${allSummaries}` : query,
|
|
63
|
-
filepath: '',
|
|
64
|
-
};
|
|
97
|
+
// š§ Step 5: Call the model
|
|
65
98
|
try {
|
|
66
|
-
console.log(
|
|
67
|
-
|
|
68
|
-
:
|
|
99
|
+
console.log('š¤ Asking the model...');
|
|
100
|
+
const input = {
|
|
101
|
+
content: prompt,
|
|
102
|
+
filepath: '',
|
|
103
|
+
};
|
|
69
104
|
const modelResponse = await generate(input, 'llama3');
|
|
70
105
|
console.log(`\nš Model response:\n${modelResponse.content}`);
|
|
71
106
|
}
|
|
@@ -77,7 +112,7 @@ function promptOnce(promptText) {
|
|
|
77
112
|
return new Promise(resolve => {
|
|
78
113
|
const rl = readline.createInterface({
|
|
79
114
|
input: process.stdin,
|
|
80
|
-
output: process.stdout
|
|
115
|
+
output: process.stdout,
|
|
81
116
|
});
|
|
82
117
|
rl.question(promptText, answer => {
|
|
83
118
|
rl.close();
|
package/dist/constants.js
CHANGED
|
@@ -26,6 +26,11 @@ export const CONFIG_PATH = path.join(SCAI_HOME, 'config.json');
|
|
|
26
26
|
* ~/.scai/daemon.log
|
|
27
27
|
*/
|
|
28
28
|
export const LOG_PATH = path.join(SCAI_HOME, 'daemon.log');
|
|
29
|
+
/**
|
|
30
|
+
* Path to the last prompt sent to the model:
|
|
31
|
+
* ~/.scai/prompt.log
|
|
32
|
+
*/
|
|
33
|
+
export const PROMPT_LOG_PATH = path.join(SCAI_HOME, 'prompt.log');
|
|
29
34
|
/**
|
|
30
35
|
* Get the active index directory.
|
|
31
36
|
*
|
|
@@ -21,9 +21,9 @@ async function lockDb() {
|
|
|
21
21
|
export async function runDaemonBatch() {
|
|
22
22
|
log('š” Starting daemon batch...');
|
|
23
23
|
const rows = db.prepare(`
|
|
24
|
-
SELECT path, type,
|
|
25
|
-
|
|
26
|
-
|
|
24
|
+
SELECT id, path, type, summary, indexed_at, last_modified, processing_status
|
|
25
|
+
FROM files
|
|
26
|
+
WHERE processing_status = 'unprocessed' OR summary IS NULL OR summary = ''
|
|
27
27
|
ORDER BY last_modified DESC
|
|
28
28
|
LIMIT ?
|
|
29
29
|
`).all(MAX_FILES_PER_BATCH);
|
|
@@ -47,32 +47,42 @@ export async function runDaemonBatch() {
|
|
|
47
47
|
}
|
|
48
48
|
try {
|
|
49
49
|
const content = await fs.readFile(row.path, 'utf-8');
|
|
50
|
-
|
|
51
|
-
const
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
50
|
+
// š§ Only re-generate summary if it's missing or outdated
|
|
51
|
+
const needsResummary = !row.summary ||
|
|
52
|
+
!row.indexed_at ||
|
|
53
|
+
(row.last_modified && new Date(row.last_modified) > new Date(row.indexed_at));
|
|
54
|
+
if (needsResummary) {
|
|
55
|
+
log(`š Generating summary for ${row.path}...`);
|
|
56
|
+
const summaryResult = await summaryModule.run({ content, filepath: row.path });
|
|
57
|
+
const summary = summaryResult?.summary?.trim() || null;
|
|
58
|
+
let embedding = null;
|
|
59
|
+
if (summary) {
|
|
60
|
+
const vector = await generateEmbedding(summary);
|
|
61
|
+
if (vector) {
|
|
62
|
+
embedding = JSON.stringify(vector);
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
db.prepare(`
|
|
66
|
+
UPDATE files
|
|
67
|
+
SET summary = @summary,
|
|
68
|
+
embedding = @embedding,
|
|
69
|
+
indexed_at = datetime('now')
|
|
70
|
+
WHERE path = @path
|
|
71
|
+
`).run({ summary, embedding, path: row.path });
|
|
72
|
+
log(`ā
Updated summary & embedding for ${row.path}`);
|
|
56
73
|
}
|
|
57
74
|
else {
|
|
58
|
-
log(
|
|
59
|
-
}
|
|
60
|
-
log(`š Generating summary for ${row.path}...`);
|
|
61
|
-
const summaryResult = await summaryModule.run({ content, filepath: row.path });
|
|
62
|
-
const summary = summaryResult?.summary?.trim() || null;
|
|
63
|
-
let embedding = null;
|
|
64
|
-
if (summary) {
|
|
65
|
-
const vector = await generateEmbedding(summary);
|
|
66
|
-
if (vector) {
|
|
67
|
-
embedding = JSON.stringify(vector);
|
|
68
|
-
}
|
|
75
|
+
log(`ā” Skipped summary (up-to-date) for ${row.path}`);
|
|
69
76
|
}
|
|
77
|
+
// š§© Extract functions and update processing status
|
|
78
|
+
await indexFunctionsForFile(row.path, row.id);
|
|
70
79
|
db.prepare(`
|
|
71
80
|
UPDATE files
|
|
72
|
-
SET
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
81
|
+
SET processing_status = 'extracted',
|
|
82
|
+
functions_extracted_at = datetime('now')
|
|
83
|
+
WHERE id = @id
|
|
84
|
+
`).run({ id: row.id });
|
|
85
|
+
log(`ā
Function extraction complete for ${row.path}\n`);
|
|
76
86
|
}
|
|
77
87
|
catch (err) {
|
|
78
88
|
log(`ā Failed: ${row.path}: ${err instanceof Error ? err.message : String(err)}\n`);
|
package/dist/db/schema.js
CHANGED
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
export function buildContextualPrompt({ baseInstruction, code, summary, functions, relatedFiles, projectFileTree }) {
|
|
2
|
+
const parts = [baseInstruction];
|
|
3
|
+
if (summary) {
|
|
4
|
+
parts.push(`š File Summary:\n${summary}`);
|
|
5
|
+
}
|
|
6
|
+
if (functions?.length) {
|
|
7
|
+
parts.push(`š§ Functions:\n${functions.join(', ')}`);
|
|
8
|
+
}
|
|
9
|
+
if (relatedFiles?.length) {
|
|
10
|
+
const formatted = relatedFiles
|
|
11
|
+
.map(f => `⢠${f.path}: ${f.summary}`)
|
|
12
|
+
.join('\n');
|
|
13
|
+
parts.push(`š Related Files:\n${formatted}`);
|
|
14
|
+
}
|
|
15
|
+
if (projectFileTree) {
|
|
16
|
+
parts.push(`š Project File Structure:\n\`\`\`\n${projectFileTree.trim()}\n\`\`\``);
|
|
17
|
+
}
|
|
18
|
+
parts.push(`\n--- CODE START ---\n${code}\n--- CODE END ---`);
|
|
19
|
+
return parts.join('\n\n');
|
|
20
|
+
}
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import fs from 'fs';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
const IGNORED_DIRS = new Set([
|
|
4
|
+
'node_modules', 'dist', '.git', '.vscode', 'coverage', 'build', 'out', 'logs', 'tmp'
|
|
5
|
+
]);
|
|
6
|
+
export function generateFileTree(dir, maxDepth = 3, currentDepth = 0, prefix = '') {
|
|
7
|
+
if (currentDepth > maxDepth)
|
|
8
|
+
return '';
|
|
9
|
+
let output = '';
|
|
10
|
+
const items = fs.readdirSync(dir, { withFileTypes: true })
|
|
11
|
+
.filter(item => !IGNORED_DIRS.has(item.name))
|
|
12
|
+
.sort((a, b) => {
|
|
13
|
+
// Directories first
|
|
14
|
+
if (a.isDirectory() && !b.isDirectory())
|
|
15
|
+
return -1;
|
|
16
|
+
if (!a.isDirectory() && b.isDirectory())
|
|
17
|
+
return 1;
|
|
18
|
+
return a.name.localeCompare(b.name);
|
|
19
|
+
});
|
|
20
|
+
for (const [i, item] of items.entries()) {
|
|
21
|
+
const isLast = i === items.length - 1;
|
|
22
|
+
const connector = isLast ? 'āāā ' : 'āāā ';
|
|
23
|
+
const childPrefix = prefix + (isLast ? ' ' : 'ā ');
|
|
24
|
+
output += `${prefix}${connector}${item.name}\n`;
|
|
25
|
+
if (item.isDirectory()) {
|
|
26
|
+
output += generateFileTree(path.join(dir, item.name), maxDepth, currentDepth + 1, childPrefix);
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
return output;
|
|
30
|
+
}
|