scai 0.1.46 โ 0.1.47
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/commands/AskCmd.js +3 -5
- package/dist/commands/DaemonCmd.js +12 -2
- package/dist/commands/ResetDbCmd.js +10 -0
- package/dist/constants.js +12 -0
- package/dist/daemon/daemonBatch.js +13 -8
- package/dist/daemon/daemonWorker.js +48 -31
- package/dist/db/fileIndex.js +16 -3
- package/dist/db/functionExtractors/extractFromJs.js +2 -1
- package/dist/db/functionExtractors/index.js +1 -2
- package/dist/db/functionIndex.js +2 -1
- package/dist/db/schema.js +6 -19
- package/dist/db/sqlTemplates.js +9 -7
- package/dist/pipeline/modules/summaryModule.js +1 -1
- package/package.json +1 -1
package/dist/commands/AskCmd.js
CHANGED
|
@@ -7,9 +7,7 @@ import { generate } from '../lib/generate.js';
|
|
|
7
7
|
import { buildContextualPrompt } from '../utils/buildContextualPrompt.js';
|
|
8
8
|
import { generateFileTree } from '../utils/fileTree.js';
|
|
9
9
|
import { log } from '../utils/log.js';
|
|
10
|
-
import { PROMPT_LOG_PATH, SCAI_HOME, INDEX_DIR } from '../constants.js';
|
|
11
|
-
const MAX_RELATED_FILES = 3;
|
|
12
|
-
const MAX_SUMMARY_LINES = 12;
|
|
10
|
+
import { PROMPT_LOG_PATH, SCAI_HOME, INDEX_DIR, RELATED_FILES_LIMIT, MAX_SUMMARY_LINES } from '../constants.js';
|
|
13
11
|
export async function runAskCommand(query) {
|
|
14
12
|
if (!query) {
|
|
15
13
|
query = await promptOnce('๐ง Ask your question:\n> ');
|
|
@@ -23,7 +21,7 @@ export async function runAskCommand(query) {
|
|
|
23
21
|
console.log(`๐ Searching for: "${query}"\n`);
|
|
24
22
|
// ๐ฉ STEP 1: Semantic Search
|
|
25
23
|
const start = Date.now();
|
|
26
|
-
const semanticResults = await searchFiles(query,
|
|
24
|
+
const semanticResults = await searchFiles(query, RELATED_FILES_LIMIT); // RankedFile[]
|
|
27
25
|
const duration = Date.now() - start;
|
|
28
26
|
console.log(`โฑ๏ธ searchFiles took ${duration}ms and returned ${semanticResults.length} result(s)`);
|
|
29
27
|
semanticResults.forEach((file, i) => {
|
|
@@ -90,7 +88,7 @@ export async function runAskCommand(query) {
|
|
|
90
88
|
console.warn(`โ ๏ธ Failed to read or analyze top file (${filepath}):`, err);
|
|
91
89
|
}
|
|
92
90
|
// ๐ฉ STEP 5: Build relatedFiles with functions
|
|
93
|
-
const relatedFiles = combinedResults.slice(0,
|
|
91
|
+
const relatedFiles = combinedResults.slice(0, RELATED_FILES_LIMIT).map(file => {
|
|
94
92
|
const fileId = file.id;
|
|
95
93
|
let summary = file.summary || '(No summary available)';
|
|
96
94
|
if (summary) {
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import fsSync from 'fs';
|
|
1
|
+
import fsSync, { mkdirSync } from 'fs';
|
|
2
2
|
import { LOG_PATH, PID_PATH } from '../constants.js';
|
|
3
3
|
import { log } from '../utils/log.js';
|
|
4
4
|
import { spawn } from 'child_process';
|
|
@@ -20,13 +20,23 @@ export async function startDaemon() {
|
|
|
20
20
|
}
|
|
21
21
|
log('๐ Starting summarizer daemon in background mode...');
|
|
22
22
|
log(`๐ Logs will be saved to: ${LOG_PATH}`);
|
|
23
|
-
process.env.BACKGROUND_MODE = 'true';
|
|
24
23
|
const __filename = fileURLToPath(import.meta.url);
|
|
25
24
|
const __dirname = path.dirname(__filename);
|
|
26
25
|
const daemonWorkerPath = path.join(__dirname, '../daemon/daemonWorker.js');
|
|
27
26
|
const child = spawn(process.execPath, [daemonWorkerPath], {
|
|
28
27
|
detached: true,
|
|
29
28
|
stdio: ['ignore', 'ignore', 'ignore'],
|
|
29
|
+
env: {
|
|
30
|
+
...process.env,
|
|
31
|
+
BACKGROUND_MODE: 'true',
|
|
32
|
+
}
|
|
30
33
|
});
|
|
31
34
|
child.unref();
|
|
35
|
+
try {
|
|
36
|
+
mkdirSync(path.dirname(PID_PATH), { recursive: true });
|
|
37
|
+
fsSync.writeFileSync(PID_PATH, String(child.pid));
|
|
38
|
+
}
|
|
39
|
+
catch (err) {
|
|
40
|
+
log(`โ Failed to write PID file: ${err instanceof Error ? err.message : err}`);
|
|
41
|
+
}
|
|
32
42
|
}
|
|
@@ -43,5 +43,15 @@ export async function resetDatabase() {
|
|
|
43
43
|
catch (err) {
|
|
44
44
|
console.warn('โ ๏ธ Could not ensure DB directory exists:', err instanceof Error ? err.message : err);
|
|
45
45
|
}
|
|
46
|
+
const lockDir = `${DB_PATH}.lock`;
|
|
47
|
+
if (fs.existsSync(lockDir)) {
|
|
48
|
+
try {
|
|
49
|
+
fs.rmSync(lockDir, { recursive: true, force: true });
|
|
50
|
+
console.log('๐งน Removed leftover lock directory.');
|
|
51
|
+
}
|
|
52
|
+
catch (err) {
|
|
53
|
+
console.warn('โ ๏ธ Failed to remove lock directory:', err instanceof Error ? err.message : err);
|
|
54
|
+
}
|
|
55
|
+
}
|
|
46
56
|
console.log('โ
Database has been reset. You can now re-run: scai index');
|
|
47
57
|
}
|
package/dist/constants.js
CHANGED
|
@@ -52,3 +52,15 @@ export function getIndexDir() {
|
|
|
52
52
|
* Used by indexing logic (`scai index`) to determine what folder to scan.
|
|
53
53
|
*/
|
|
54
54
|
export const INDEX_DIR = getIndexDir();
|
|
55
|
+
/**
|
|
56
|
+
* Limit for number of related files included in model prompt.
|
|
57
|
+
*/
|
|
58
|
+
export const RELATED_FILES_LIMIT = 3;
|
|
59
|
+
/**
|
|
60
|
+
* Limit for number of candidate files to score.
|
|
61
|
+
*/
|
|
62
|
+
export const CANDIDATE_LIMIT = 100;
|
|
63
|
+
/**
|
|
64
|
+
* Limit number of summery lines
|
|
65
|
+
*/
|
|
66
|
+
export const MAX_SUMMARY_LINES = 12;
|
|
@@ -75,14 +75,19 @@ export async function runDaemonBatch() {
|
|
|
75
75
|
log(`โก Skipped summary (up-to-date) for ${row.path}`);
|
|
76
76
|
}
|
|
77
77
|
// ๐งฉ Extract functions and update processing status
|
|
78
|
-
await indexFunctionsForFile(row.path, row.id);
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
78
|
+
const extracted = await indexFunctionsForFile(row.path, row.id);
|
|
79
|
+
if (extracted) {
|
|
80
|
+
db.prepare(`
|
|
81
|
+
UPDATE files
|
|
82
|
+
SET processing_status = 'extracted',
|
|
83
|
+
functions_extracted_at = datetime('now')
|
|
84
|
+
WHERE id = @id
|
|
85
|
+
`).run({ id: row.id });
|
|
86
|
+
log(`โ
Function extraction complete for ${row.path}\n`);
|
|
87
|
+
}
|
|
88
|
+
else {
|
|
89
|
+
log(`โน๏ธ No functions extracted for ${row.path}\n`);
|
|
90
|
+
}
|
|
86
91
|
}
|
|
87
92
|
catch (err) {
|
|
88
93
|
log(`โ Failed: ${row.path}: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
@@ -1,45 +1,62 @@
|
|
|
1
1
|
import { db } from '../db/client.js';
|
|
2
|
-
import { runDaemonBatch } from './daemonBatch.js';
|
|
2
|
+
import { runDaemonBatch } from './daemonBatch.js';
|
|
3
3
|
import { log } from '../utils/log.js';
|
|
4
|
-
|
|
5
|
-
const
|
|
6
|
-
|
|
7
|
-
|
|
4
|
+
const SLEEP_MS = 2000;
|
|
5
|
+
const IDLE_SLEEP_MS = 5000;
|
|
6
|
+
// ๐จ Immediate signal that the worker even starts
|
|
7
|
+
log('๐ ๏ธ daemonWorker.js loaded');
|
|
8
8
|
async function isQueueEmpty() {
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
9
|
+
try {
|
|
10
|
+
const row = db.prepare(`
|
|
11
|
+
SELECT COUNT(*) AS count
|
|
12
|
+
FROM files
|
|
13
|
+
WHERE processing_status IN ('unprocessed')
|
|
14
|
+
`).get();
|
|
15
|
+
const castRow = row;
|
|
16
|
+
if (typeof castRow.count !== 'number') {
|
|
17
|
+
log('โ ๏ธ Invalid count value in DB query result:', row);
|
|
18
|
+
return true;
|
|
19
|
+
}
|
|
20
|
+
log(`๐ฆ Queue size: ${castRow.count}`);
|
|
21
|
+
return castRow.count === 0;
|
|
22
|
+
}
|
|
23
|
+
catch (error) {
|
|
24
|
+
log('โ Error checking if queue is empty:', error);
|
|
25
|
+
return true;
|
|
21
26
|
}
|
|
22
|
-
// Return true if count is 0, otherwise false
|
|
23
|
-
return castRow.count === 0;
|
|
24
27
|
}
|
|
25
28
|
export async function daemonWorker() {
|
|
29
|
+
log('๐ Daemon worker starting up...');
|
|
26
30
|
while (true) {
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
31
|
+
try {
|
|
32
|
+
log('๐ Running daemon batch...');
|
|
33
|
+
const didWork = await runDaemonBatch();
|
|
34
|
+
if (!didWork) {
|
|
35
|
+
const queueEmpty = await isQueueEmpty();
|
|
36
|
+
if (queueEmpty) {
|
|
37
|
+
log('๐๏ธ No work found. Idling...');
|
|
38
|
+
await sleep(IDLE_SLEEP_MS * 3);
|
|
39
|
+
continue;
|
|
40
|
+
}
|
|
41
|
+
else {
|
|
42
|
+
log('๐ก Work queue not empty, but no batch executed.');
|
|
43
|
+
}
|
|
36
44
|
}
|
|
45
|
+
else {
|
|
46
|
+
log('โ
Batch executed, sleeping briefly...');
|
|
47
|
+
}
|
|
48
|
+
await sleep(didWork ? SLEEP_MS : IDLE_SLEEP_MS);
|
|
49
|
+
}
|
|
50
|
+
catch (err) {
|
|
51
|
+
log('๐ฅ Error in daemonWorker loop:', err);
|
|
52
|
+
await sleep(IDLE_SLEEP_MS * 2); // prevent tight error loop
|
|
37
53
|
}
|
|
38
|
-
// Sleep for a set amount of time before checking again
|
|
39
|
-
await sleep(didWork ? SLEEP_MS : IDLE_SLEEP_MS);
|
|
40
54
|
}
|
|
41
55
|
}
|
|
42
|
-
//
|
|
56
|
+
// Run the daemon
|
|
57
|
+
daemonWorker().catch(err => {
|
|
58
|
+
log('โ daemonWorker failed to start:', err);
|
|
59
|
+
});
|
|
43
60
|
function sleep(ms) {
|
|
44
61
|
return new Promise(resolve => setTimeout(resolve, ms));
|
|
45
62
|
}
|
package/dist/db/fileIndex.js
CHANGED
|
@@ -5,6 +5,7 @@ import { generateEmbedding } from '../lib/generateEmbedding.js';
|
|
|
5
5
|
import { sanitizeQueryForFts } from '../utils/sanitizeQuery.js';
|
|
6
6
|
import * as sqlTemplates from './sqlTemplates.js';
|
|
7
7
|
import { stringSimilarity } from 'string-similarity-js';
|
|
8
|
+
import { CANDIDATE_LIMIT } from '../constants.js';
|
|
8
9
|
/**
|
|
9
10
|
* ๐ Index a single file into the database.
|
|
10
11
|
*
|
|
@@ -17,14 +18,26 @@ export function indexFile(filePath, summary, type) {
|
|
|
17
18
|
const lastModified = stats.mtime.toISOString();
|
|
18
19
|
const indexedAt = new Date().toISOString();
|
|
19
20
|
const normalizedPath = path.normalize(filePath).replace(/\\/g, '/');
|
|
21
|
+
const fileName = path.basename(normalizedPath); // Extracting the filename
|
|
22
|
+
// Insert into files table
|
|
20
23
|
db.prepare(sqlTemplates.upsertFileTemplate).run({
|
|
21
24
|
path: normalizedPath,
|
|
25
|
+
filename: fileName, // Pass filename
|
|
22
26
|
summary,
|
|
23
27
|
type,
|
|
24
28
|
lastModified,
|
|
25
29
|
indexedAt,
|
|
26
30
|
embedding: null
|
|
27
31
|
});
|
|
32
|
+
// Insert into files_fts table for full-text search, including filename
|
|
33
|
+
db.prepare(`
|
|
34
|
+
INSERT OR REPLACE INTO files_fts (rowid, filename, summary, path)
|
|
35
|
+
VALUES ((SELECT id FROM files WHERE path = :path), :filename, :summary, :path)
|
|
36
|
+
`).run({
|
|
37
|
+
path: normalizedPath,
|
|
38
|
+
filename: fileName,
|
|
39
|
+
summary: summary,
|
|
40
|
+
});
|
|
28
41
|
console.log(`๐ Indexed: ${normalizedPath}`);
|
|
29
42
|
}
|
|
30
43
|
/**
|
|
@@ -39,7 +52,7 @@ export function indexFile(filePath, summary, type) {
|
|
|
39
52
|
export function queryFiles(safeQuery, limit = 10) {
|
|
40
53
|
console.log(`Executing search query: ${safeQuery}`);
|
|
41
54
|
const results = db.prepare(`
|
|
42
|
-
SELECT f.id, f.path, f.summary, f.type, f.last_modified, f.indexed_at
|
|
55
|
+
SELECT f.id, f.path, f.filename, f.summary, f.type, f.last_modified, f.indexed_at
|
|
43
56
|
FROM files f
|
|
44
57
|
JOIN files_fts fts ON f.id = fts.rowid
|
|
45
58
|
WHERE fts.files_fts MATCH ?
|
|
@@ -69,13 +82,13 @@ export async function searchFiles(query, topK = 5) {
|
|
|
69
82
|
const safeQuery = sanitizeQueryForFts(query);
|
|
70
83
|
console.log(`Executing search query in FTS5: ${safeQuery}`);
|
|
71
84
|
const ftsResults = db.prepare(`
|
|
72
|
-
SELECT fts.rowid AS id, f.path, f.summary, f.type, bm25(files_fts) AS bm25Score, f.embedding
|
|
85
|
+
SELECT fts.rowid AS id, f.path, f.filename, f.summary, f.type, bm25(files_fts) AS bm25Score, f.embedding
|
|
73
86
|
FROM files f
|
|
74
87
|
JOIN files_fts fts ON f.id = fts.rowid
|
|
75
88
|
WHERE fts.files_fts MATCH ?
|
|
76
89
|
ORDER BY bm25Score ASC
|
|
77
90
|
LIMIT ?
|
|
78
|
-
`).all(safeQuery,
|
|
91
|
+
`).all(safeQuery, CANDIDATE_LIMIT);
|
|
79
92
|
console.log(`FTS search returned ${ftsResults.length} results`);
|
|
80
93
|
if (ftsResults.length === 0) {
|
|
81
94
|
return [];
|
|
@@ -58,7 +58,7 @@ export async function extractFromJS(filePath, content, fileId) {
|
|
|
58
58
|
});
|
|
59
59
|
if (functions.length === 0) {
|
|
60
60
|
log(`โ ๏ธ No functions found in: ${filePath}`);
|
|
61
|
-
return;
|
|
61
|
+
return false;
|
|
62
62
|
}
|
|
63
63
|
log(`๐ Found ${functions.length} functions in ${filePath}`);
|
|
64
64
|
for (const fn of functions) {
|
|
@@ -109,4 +109,5 @@ export async function extractFromJS(filePath, content, fileId) {
|
|
|
109
109
|
WHERE id = @fileId
|
|
110
110
|
`).run({ fileId });
|
|
111
111
|
log(`โ
Marked functions as extracted for ${filePath}`);
|
|
112
|
+
return true;
|
|
112
113
|
}
|
|
@@ -12,8 +12,7 @@ export async function extractFunctionsFromFile(filePath, content, fileId) {
|
|
|
12
12
|
try {
|
|
13
13
|
if (type === 'js' || type === 'ts' || type === 'javascript' || type === 'typescript') {
|
|
14
14
|
log(`โ
Attempting to extract JS functions from ${filePath}\n`);
|
|
15
|
-
await extractFromJS(filePath, content, fileId);
|
|
16
|
-
return true;
|
|
15
|
+
return await extractFromJS(filePath, content, fileId);
|
|
17
16
|
}
|
|
18
17
|
if (type === 'java') {
|
|
19
18
|
log(`โ Nothing extracted for ${filePath} due to missing implementation`);
|
package/dist/db/functionIndex.js
CHANGED
|
@@ -3,9 +3,10 @@ import path from 'path';
|
|
|
3
3
|
import { extractFunctionsFromFile } from './functionExtractors/index.js';
|
|
4
4
|
/**
|
|
5
5
|
* Extracts functions from file if language is supported.
|
|
6
|
+
* Returns true if functions were extracted, false otherwise.
|
|
6
7
|
*/
|
|
7
8
|
export async function indexFunctionsForFile(filePath, fileId) {
|
|
8
9
|
const normalizedPath = path.normalize(filePath).replace(/\\/g, '/');
|
|
9
10
|
const content = fs.readFileSync(filePath, 'utf-8');
|
|
10
|
-
await extractFunctionsFromFile(normalizedPath, content, fileId);
|
|
11
|
+
return await extractFunctionsFromFile(normalizedPath, content, fileId);
|
|
11
12
|
}
|
package/dist/db/schema.js
CHANGED
|
@@ -1,9 +1,11 @@
|
|
|
1
1
|
import { db } from "./client.js";
|
|
2
2
|
export function initSchema() {
|
|
3
3
|
db.exec(`
|
|
4
|
+
-- Create the files table
|
|
4
5
|
CREATE TABLE IF NOT EXISTS files (
|
|
5
6
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
6
7
|
path TEXT UNIQUE,
|
|
8
|
+
filename TEXT, -- โ
Store extracted filename here
|
|
7
9
|
summary TEXT,
|
|
8
10
|
type TEXT,
|
|
9
11
|
indexed_at TEXT,
|
|
@@ -13,27 +15,12 @@ export function initSchema() {
|
|
|
13
15
|
functions_extracted_at TEXT
|
|
14
16
|
);
|
|
15
17
|
|
|
18
|
+
-- Create the full-text search table, auto-updated via content=files
|
|
16
19
|
CREATE VIRTUAL TABLE IF NOT EXISTS files_fts
|
|
17
|
-
USING fts5(
|
|
18
|
-
|
|
19
|
-
-- FTS Triggers to keep files_fts in sync
|
|
20
|
-
CREATE TRIGGER IF NOT EXISTS files_ai AFTER INSERT ON files BEGIN
|
|
21
|
-
INSERT INTO files_fts(rowid, path, summary)
|
|
22
|
-
VALUES (new.id, new.path, new.summary);
|
|
23
|
-
END;
|
|
24
|
-
|
|
25
|
-
CREATE TRIGGER IF NOT EXISTS files_au AFTER UPDATE ON files BEGIN
|
|
26
|
-
UPDATE files_fts SET
|
|
27
|
-
path = new.path,
|
|
28
|
-
summary = new.summary
|
|
29
|
-
WHERE rowid = new.id;
|
|
30
|
-
END;
|
|
31
|
-
|
|
32
|
-
CREATE TRIGGER IF NOT EXISTS files_ad AFTER DELETE ON files BEGIN
|
|
33
|
-
DELETE FROM files_fts WHERE rowid = old.id;
|
|
34
|
-
END;
|
|
20
|
+
USING fts5(filename, summary, path, content='files', content_rowid='id');
|
|
35
21
|
`);
|
|
36
|
-
console.log('โ
SQLite schema initialized with FTS5
|
|
22
|
+
console.log('โ
SQLite schema initialized with FTS5 auto-sync');
|
|
23
|
+
// Create additional tables for functions and function_calls
|
|
37
24
|
db.exec(`
|
|
38
25
|
CREATE TABLE IF NOT EXISTS functions (
|
|
39
26
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
package/dist/db/sqlTemplates.js
CHANGED
|
@@ -1,8 +1,9 @@
|
|
|
1
1
|
// Upsert file metadata into `files`
|
|
2
2
|
export const upsertFileTemplate = `
|
|
3
|
-
INSERT INTO files (path, summary, type, last_modified, indexed_at, embedding)
|
|
4
|
-
VALUES (:path, :summary, :type, :lastModified, :indexedAt, :embedding)
|
|
3
|
+
INSERT INTO files (path, filename, summary, type, last_modified, indexed_at, embedding)
|
|
4
|
+
VALUES (:path, :filename, :summary, :type, :lastModified, :indexedAt, :embedding)
|
|
5
5
|
ON CONFLICT(path) DO UPDATE SET
|
|
6
|
+
filename = excluded.filename, -- Update filename when path conflicts
|
|
6
7
|
summary = CASE
|
|
7
8
|
WHEN excluded.summary IS NOT NULL AND excluded.summary != files.summary
|
|
8
9
|
THEN excluded.summary
|
|
@@ -17,9 +18,10 @@ export const upsertFileTemplate = `
|
|
|
17
18
|
ELSE files.embedding
|
|
18
19
|
END
|
|
19
20
|
`;
|
|
20
|
-
//
|
|
21
|
+
// ๐ CHANGE 1: Include `filename` in SELECT + weight `filename` highest in bm25
|
|
21
22
|
export const fetchBm25ScoresTemplate = `
|
|
22
|
-
SELECT f.path, f.summary, f.type,
|
|
23
|
+
SELECT f.path, f.filename, f.summary, f.type,
|
|
24
|
+
bm25(files_fts, 10.0, 2.0, 1.0) AS bm25Score
|
|
23
25
|
FROM files_fts
|
|
24
26
|
JOIN files f ON files_fts.rowid = f.id
|
|
25
27
|
WHERE files_fts MATCH :query
|
|
@@ -29,10 +31,10 @@ export const fetchBm25ScoresTemplate = `
|
|
|
29
31
|
export const fetchEmbeddingTemplate = `
|
|
30
32
|
SELECT embedding FROM files WHERE path = :path
|
|
31
33
|
`;
|
|
32
|
-
//
|
|
34
|
+
// ๐ CHANGE 2: Also added weighted `bm25()` with explicit weights here
|
|
33
35
|
export const rawQueryTemplate = `
|
|
34
|
-
SELECT f.path, f.summary, f.type, f.last_modified, f.indexed_at,
|
|
35
|
-
bm25(files_fts) AS rank
|
|
36
|
+
SELECT f.path, f.filename, f.summary, f.type, f.last_modified, f.indexed_at,
|
|
37
|
+
bm25(files_fts, 10.0, 2.0, 1.0) AS rank
|
|
36
38
|
FROM files_fts
|
|
37
39
|
JOIN files f ON files_fts.rowid = f.id
|
|
38
40
|
WHERE files_fts MATCH :query
|
|
@@ -28,7 +28,7 @@ Extension: ${ext}
|
|
|
28
28
|
${content}
|
|
29
29
|
--- FILE CONTENT END ---
|
|
30
30
|
`.trim();
|
|
31
|
-
const response = await generate({ content, filepath }, model);
|
|
31
|
+
const response = await generate({ content: prompt, filepath }, model);
|
|
32
32
|
if (response.content) {
|
|
33
33
|
response.summary = response.content;
|
|
34
34
|
console.log('\n๐ Summary:\n');
|