scai 0.1.45 โ†’ 0.1.47

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -7,8 +7,7 @@ import { generate } from '../lib/generate.js';
7
7
  import { buildContextualPrompt } from '../utils/buildContextualPrompt.js';
8
8
  import { generateFileTree } from '../utils/fileTree.js';
9
9
  import { log } from '../utils/log.js';
10
- import { PROMPT_LOG_PATH, SCAI_HOME, INDEX_DIR } from '../constants.js';
11
- const MAX_RELATED_FILES = 5;
10
+ import { PROMPT_LOG_PATH, SCAI_HOME, INDEX_DIR, RELATED_FILES_LIMIT, MAX_SUMMARY_LINES } from '../constants.js';
12
11
  export async function runAskCommand(query) {
13
12
  if (!query) {
14
13
  query = await promptOnce('๐Ÿง  Ask your question:\n> ');
@@ -22,7 +21,7 @@ export async function runAskCommand(query) {
22
21
  console.log(`๐Ÿ” Searching for: "${query}"\n`);
23
22
  // ๐ŸŸฉ STEP 1: Semantic Search
24
23
  const start = Date.now();
25
- const semanticResults = await searchFiles(query, MAX_RELATED_FILES); // RankedFile[]
24
+ const semanticResults = await searchFiles(query, RELATED_FILES_LIMIT); // RankedFile[]
26
25
  const duration = Date.now() - start;
27
26
  console.log(`โฑ๏ธ searchFiles took ${duration}ms and returned ${semanticResults.length} result(s)`);
28
27
  semanticResults.forEach((file, i) => {
@@ -37,7 +36,6 @@ export async function runAskCommand(query) {
37
36
  // ๐ŸŸฉ STEP 2: Merge results (de-duplicate by full resolved path)
38
37
  const seen = new Set();
39
38
  const combinedResults = [];
40
- // Merging results now ensures all have `id`
41
39
  for (const file of semanticResults) {
42
40
  const resolved = path.resolve(file.path);
43
41
  seen.add(resolved);
@@ -48,7 +46,7 @@ export async function runAskCommand(query) {
48
46
  if (!seen.has(resolved)) {
49
47
  seen.add(resolved);
50
48
  combinedResults.push({
51
- id: file.id, // Ensure the id is included here
49
+ id: file.id,
52
50
  path: file.path,
53
51
  summary: file.summary || '',
54
52
  score: 0.0,
@@ -73,9 +71,12 @@ export async function runAskCommand(query) {
73
71
  let code = '';
74
72
  let topSummary = topFile.summary || '(No summary available)';
75
73
  let topFunctions = [];
76
- // Gather all file IDs from the combined results
74
+ // Truncate summary if needed
75
+ if (topSummary) {
76
+ topSummary = topSummary.split('\n').slice(0, MAX_SUMMARY_LINES).join('\n');
77
+ }
77
78
  const allFileIds = combinedResults
78
- .map(file => file.id) // Now file.id exists
79
+ .map(file => file.id)
79
80
  .filter((id) => typeof id === 'number');
80
81
  const allFunctionsMap = getFunctionsForFiles(allFileIds);
81
82
  try {
@@ -87,11 +88,15 @@ export async function runAskCommand(query) {
87
88
  console.warn(`โš ๏ธ Failed to read or analyze top file (${filepath}):`, err);
88
89
  }
89
90
  // ๐ŸŸฉ STEP 5: Build relatedFiles with functions
90
- const relatedFiles = combinedResults.slice(0, MAX_RELATED_FILES).map(file => {
91
+ const relatedFiles = combinedResults.slice(0, RELATED_FILES_LIMIT).map(file => {
91
92
  const fileId = file.id;
93
+ let summary = file.summary || '(No summary available)';
94
+ if (summary) {
95
+ summary = summary.split('\n').slice(0, MAX_SUMMARY_LINES).join('\n');
96
+ }
92
97
  return {
93
98
  path: file.path,
94
- summary: file.summary || '(No summary available)',
99
+ summary,
95
100
  functions: allFunctionsMap[fileId]?.map(fn => fn.name) || [],
96
101
  };
97
102
  });
@@ -1,4 +1,4 @@
1
- import fsSync from 'fs';
1
+ import fsSync, { mkdirSync } from 'fs';
2
2
  import { LOG_PATH, PID_PATH } from '../constants.js';
3
3
  import { log } from '../utils/log.js';
4
4
  import { spawn } from 'child_process';
@@ -20,13 +20,23 @@ export async function startDaemon() {
20
20
  }
21
21
  log('๐Ÿš€ Starting summarizer daemon in background mode...');
22
22
  log(`๐Ÿ“ Logs will be saved to: ${LOG_PATH}`);
23
- process.env.BACKGROUND_MODE = 'true';
24
23
  const __filename = fileURLToPath(import.meta.url);
25
24
  const __dirname = path.dirname(__filename);
26
25
  const daemonWorkerPath = path.join(__dirname, '../daemon/daemonWorker.js');
27
26
  const child = spawn(process.execPath, [daemonWorkerPath], {
28
27
  detached: true,
29
28
  stdio: ['ignore', 'ignore', 'ignore'],
29
+ env: {
30
+ ...process.env,
31
+ BACKGROUND_MODE: 'true',
32
+ }
30
33
  });
31
34
  child.unref();
35
+ try {
36
+ mkdirSync(path.dirname(PID_PATH), { recursive: true });
37
+ fsSync.writeFileSync(PID_PATH, String(child.pid));
38
+ }
39
+ catch (err) {
40
+ log(`โŒ Failed to write PID file: ${err instanceof Error ? err.message : err}`);
41
+ }
32
42
  }
@@ -43,5 +43,15 @@ export async function resetDatabase() {
43
43
  catch (err) {
44
44
  console.warn('โš ๏ธ Could not ensure DB directory exists:', err instanceof Error ? err.message : err);
45
45
  }
46
+ const lockDir = `${DB_PATH}.lock`;
47
+ if (fs.existsSync(lockDir)) {
48
+ try {
49
+ fs.rmSync(lockDir, { recursive: true, force: true });
50
+ console.log('๐Ÿงน Removed leftover lock directory.');
51
+ }
52
+ catch (err) {
53
+ console.warn('โš ๏ธ Failed to remove lock directory:', err instanceof Error ? err.message : err);
54
+ }
55
+ }
46
56
  console.log('โœ… Database has been reset. You can now re-run: scai index');
47
57
  }
package/dist/constants.js CHANGED
@@ -52,3 +52,15 @@ export function getIndexDir() {
52
52
  * Used by indexing logic (`scai index`) to determine what folder to scan.
53
53
  */
54
54
  export const INDEX_DIR = getIndexDir();
55
+ /**
56
+ * Limit for number of related files included in model prompt.
57
+ */
58
+ export const RELATED_FILES_LIMIT = 3;
59
+ /**
60
+ * Limit for number of candidate files to score.
61
+ */
62
+ export const CANDIDATE_LIMIT = 100;
63
+ /**
64
+ * Limit number of summery lines
65
+ */
66
+ export const MAX_SUMMARY_LINES = 12;
@@ -75,14 +75,19 @@ export async function runDaemonBatch() {
75
75
  log(`โšก Skipped summary (up-to-date) for ${row.path}`);
76
76
  }
77
77
  // ๐Ÿงฉ Extract functions and update processing status
78
- await indexFunctionsForFile(row.path, row.id);
79
- db.prepare(`
80
- UPDATE files
81
- SET processing_status = 'extracted',
82
- functions_extracted_at = datetime('now')
83
- WHERE id = @id
84
- `).run({ id: row.id });
85
- log(`โœ… Function extraction complete for ${row.path}\n`);
78
+ const extracted = await indexFunctionsForFile(row.path, row.id);
79
+ if (extracted) {
80
+ db.prepare(`
81
+ UPDATE files
82
+ SET processing_status = 'extracted',
83
+ functions_extracted_at = datetime('now')
84
+ WHERE id = @id
85
+ `).run({ id: row.id });
86
+ log(`โœ… Function extraction complete for ${row.path}\n`);
87
+ }
88
+ else {
89
+ log(`โ„น๏ธ No functions extracted for ${row.path}\n`);
90
+ }
86
91
  }
87
92
  catch (err) {
88
93
  log(`โŒ Failed: ${row.path}: ${err instanceof Error ? err.message : String(err)}\n`);
@@ -1,45 +1,62 @@
1
1
  import { db } from '../db/client.js';
2
- import { runDaemonBatch } from './daemonBatch.js'; // assuming this function is already defined
2
+ import { runDaemonBatch } from './daemonBatch.js';
3
3
  import { log } from '../utils/log.js';
4
- // Time between each batch in milliseconds
5
- const SLEEP_MS = 2000; // Adjust as needed
6
- const IDLE_SLEEP_MS = 5000; // Adjust as needed
7
- // Check if there are any files left to process
4
+ const SLEEP_MS = 2000;
5
+ const IDLE_SLEEP_MS = 5000;
6
+ // ๐Ÿšจ Immediate signal that the worker even starts
7
+ log('๐Ÿ› ๏ธ daemonWorker.js loaded');
8
8
  async function isQueueEmpty() {
9
- // Query the database for the count of files with certain processing statuses
10
- const row = db.prepare(`
11
- SELECT COUNT(*) AS count
12
- FROM files
13
- WHERE processing_status IN ('unprocessed')
14
- `).get();
15
- // Cast the row to an object that has a `count` property of type number
16
- const castRow = row;
17
- // Check if the casted `row` has a valid `count` property (number)
18
- if (typeof castRow.count !== 'number') {
19
- console.error('Error: Invalid count value in the database query result.');
20
- return true; // Assume queue is empty if the count is invalid
9
+ try {
10
+ const row = db.prepare(`
11
+ SELECT COUNT(*) AS count
12
+ FROM files
13
+ WHERE processing_status IN ('unprocessed')
14
+ `).get();
15
+ const castRow = row;
16
+ if (typeof castRow.count !== 'number') {
17
+ log('โš ๏ธ Invalid count value in DB query result:', row);
18
+ return true;
19
+ }
20
+ log(`๐Ÿ“ฆ Queue size: ${castRow.count}`);
21
+ return castRow.count === 0;
22
+ }
23
+ catch (error) {
24
+ log('โŒ Error checking if queue is empty:', error);
25
+ return true;
21
26
  }
22
- // Return true if count is 0, otherwise false
23
- return castRow.count === 0;
24
27
  }
25
28
  export async function daemonWorker() {
29
+ log('๐Ÿš€ Daemon worker starting up...');
26
30
  while (true) {
27
- // Execute a batch job
28
- const didWork = await runDaemonBatch();
29
- if (!didWork) {
30
- // Check if the queue is empty after a batch job
31
- const queueEmpty = await isQueueEmpty();
32
- if (queueEmpty) {
33
- // If no files are left to process, stop the daemon
34
- log("โœ… No more work left. Stopping daemon.");
35
- break;
31
+ try {
32
+ log('๐Ÿ”„ Running daemon batch...');
33
+ const didWork = await runDaemonBatch();
34
+ if (!didWork) {
35
+ const queueEmpty = await isQueueEmpty();
36
+ if (queueEmpty) {
37
+ log('๐Ÿ•Š๏ธ No work found. Idling...');
38
+ await sleep(IDLE_SLEEP_MS * 3);
39
+ continue;
40
+ }
41
+ else {
42
+ log('๐ŸŸก Work queue not empty, but no batch executed.');
43
+ }
36
44
  }
45
+ else {
46
+ log('โœ… Batch executed, sleeping briefly...');
47
+ }
48
+ await sleep(didWork ? SLEEP_MS : IDLE_SLEEP_MS);
49
+ }
50
+ catch (err) {
51
+ log('๐Ÿ”ฅ Error in daemonWorker loop:', err);
52
+ await sleep(IDLE_SLEEP_MS * 2); // prevent tight error loop
37
53
  }
38
- // Sleep for a set amount of time before checking again
39
- await sleep(didWork ? SLEEP_MS : IDLE_SLEEP_MS);
40
54
  }
41
55
  }
42
- // Sleep function to control how often the worker checks
56
+ // Run the daemon
57
+ daemonWorker().catch(err => {
58
+ log('โŒ daemonWorker failed to start:', err);
59
+ });
43
60
  function sleep(ms) {
44
61
  return new Promise(resolve => setTimeout(resolve, ms));
45
62
  }
@@ -4,6 +4,8 @@ import path from 'path';
4
4
  import { generateEmbedding } from '../lib/generateEmbedding.js';
5
5
  import { sanitizeQueryForFts } from '../utils/sanitizeQuery.js';
6
6
  import * as sqlTemplates from './sqlTemplates.js';
7
+ import { stringSimilarity } from 'string-similarity-js';
8
+ import { CANDIDATE_LIMIT } from '../constants.js';
7
9
  /**
8
10
  * ๐Ÿ“„ Index a single file into the database.
9
11
  *
@@ -16,14 +18,26 @@ export function indexFile(filePath, summary, type) {
16
18
  const lastModified = stats.mtime.toISOString();
17
19
  const indexedAt = new Date().toISOString();
18
20
  const normalizedPath = path.normalize(filePath).replace(/\\/g, '/');
21
+ const fileName = path.basename(normalizedPath); // Extracting the filename
22
+ // Insert into files table
19
23
  db.prepare(sqlTemplates.upsertFileTemplate).run({
20
24
  path: normalizedPath,
25
+ filename: fileName, // Pass filename
21
26
  summary,
22
27
  type,
23
28
  lastModified,
24
29
  indexedAt,
25
30
  embedding: null
26
31
  });
32
+ // Insert into files_fts table for full-text search, including filename
33
+ db.prepare(`
34
+ INSERT OR REPLACE INTO files_fts (rowid, filename, summary, path)
35
+ VALUES ((SELECT id FROM files WHERE path = :path), :filename, :summary, :path)
36
+ `).run({
37
+ path: normalizedPath,
38
+ filename: fileName,
39
+ summary: summary,
40
+ });
27
41
  console.log(`๐Ÿ“„ Indexed: ${normalizedPath}`);
28
42
  }
29
43
  /**
@@ -38,7 +52,7 @@ export function indexFile(filePath, summary, type) {
38
52
  export function queryFiles(safeQuery, limit = 10) {
39
53
  console.log(`Executing search query: ${safeQuery}`);
40
54
  const results = db.prepare(`
41
- SELECT f.id, f.path, f.summary, f.type, f.last_modified, f.indexed_at
55
+ SELECT f.id, f.path, f.filename, f.summary, f.type, f.last_modified, f.indexed_at
42
56
  FROM files f
43
57
  JOIN files_fts fts ON f.id = fts.rowid
44
58
  WHERE fts.files_fts MATCH ?
@@ -67,43 +81,51 @@ export async function searchFiles(query, topK = 5) {
67
81
  }
68
82
  const safeQuery = sanitizeQueryForFts(query);
69
83
  console.log(`Executing search query in FTS5: ${safeQuery}`);
70
- // Step 1: Narrow candidate set using fast keyword match
71
84
  const ftsResults = db.prepare(`
72
- SELECT fts.rowid AS id, f.path, f.summary, f.type, bm25(files_fts) AS bm25Score, f.embedding
85
+ SELECT fts.rowid AS id, f.path, f.filename, f.summary, f.type, bm25(files_fts) AS bm25Score, f.embedding
73
86
  FROM files f
74
87
  JOIN files_fts fts ON f.id = fts.rowid
75
88
  WHERE fts.files_fts MATCH ?
76
89
  ORDER BY bm25Score ASC
77
90
  LIMIT ?
78
- `).all(safeQuery, 20);
91
+ `).all(safeQuery, CANDIDATE_LIMIT);
79
92
  console.log(`FTS search returned ${ftsResults.length} results`);
80
93
  if (ftsResults.length === 0) {
81
94
  return [];
82
95
  }
83
- // Step 2: Compute score based on embedding similarity + BM25 score
84
96
  const bm25Min = Math.min(...ftsResults.map(r => r.bm25Score));
85
97
  const bm25Max = Math.max(...ftsResults.map(r => r.bm25Score));
86
98
  const scored = ftsResults.map(result => {
87
- let finalScore = 0;
88
99
  let sim = 0;
100
+ let finalScore = 0;
101
+ const normalizedBm25 = 1 - ((result.bm25Score - bm25Min) / (bm25Max - bm25Min + 1e-5));
89
102
  if (result.embedding) {
90
103
  try {
91
104
  const vector = JSON.parse(result.embedding);
92
105
  sim = cosineSimilarity(embedding, vector);
93
- const normalizedBm25 = 1 - ((result.bm25Score - bm25Min) / (bm25Max - bm25Min + 1e-5));
94
- finalScore = 0.7 * sim + 0.3 * normalizedBm25;
95
106
  }
96
107
  catch (err) {
97
108
  console.error(`โŒ Failed to parse embedding for ${result.path}:`, err);
98
- finalScore = 0;
99
109
  }
100
110
  }
101
- else {
102
- // Fallback to BM25-only score
103
- finalScore = 1 - ((result.bm25Score - bm25Min) / (bm25Max - bm25Min + 1e-5));
111
+ const terms = query.toLowerCase().split(/\s+/);
112
+ const path = result.path.toLowerCase();
113
+ const summary = (result.summary || '').toLowerCase();
114
+ let termMatches = 0;
115
+ for (const term of terms) {
116
+ if (path.includes(term) || summary.includes(term)) {
117
+ termMatches += 1;
118
+ }
104
119
  }
120
+ const matchRatio = termMatches / terms.length;
121
+ const termBoost = matchRatio >= 1 ? 1.0 : matchRatio >= 0.5 ? 0.5 : 0;
122
+ // ๐Ÿง  Final score with hybrid weighting (BM25 + Embedding + Term Boost)
123
+ finalScore = 0.4 * normalizedBm25 + 0.4 * sim + 0.2 * termBoost;
124
+ // โœ… Fuzzy score using string-similarity-js
125
+ const fuzzyScore = stringSimilarity(query.toLowerCase(), `${path} ${summary}`);
126
+ finalScore += fuzzyScore * 10;
105
127
  return {
106
- id: result.id, // Ensure the id is included here
128
+ id: result.id,
107
129
  path: result.path,
108
130
  summary: result.summary,
109
131
  score: finalScore,
@@ -58,7 +58,7 @@ export async function extractFromJS(filePath, content, fileId) {
58
58
  });
59
59
  if (functions.length === 0) {
60
60
  log(`โš ๏ธ No functions found in: ${filePath}`);
61
- return;
61
+ return false;
62
62
  }
63
63
  log(`๐Ÿ” Found ${functions.length} functions in ${filePath}`);
64
64
  for (const fn of functions) {
@@ -109,4 +109,5 @@ export async function extractFromJS(filePath, content, fileId) {
109
109
  WHERE id = @fileId
110
110
  `).run({ fileId });
111
111
  log(`โœ… Marked functions as extracted for ${filePath}`);
112
+ return true;
112
113
  }
@@ -12,8 +12,7 @@ export async function extractFunctionsFromFile(filePath, content, fileId) {
12
12
  try {
13
13
  if (type === 'js' || type === 'ts' || type === 'javascript' || type === 'typescript') {
14
14
  log(`โœ… Attempting to extract JS functions from ${filePath}\n`);
15
- await extractFromJS(filePath, content, fileId);
16
- return true;
15
+ return await extractFromJS(filePath, content, fileId);
17
16
  }
18
17
  if (type === 'java') {
19
18
  log(`โŒ Nothing extracted for ${filePath} due to missing implementation`);
@@ -3,9 +3,10 @@ import path from 'path';
3
3
  import { extractFunctionsFromFile } from './functionExtractors/index.js';
4
4
  /**
5
5
  * Extracts functions from file if language is supported.
6
+ * Returns true if functions were extracted, false otherwise.
6
7
  */
7
8
  export async function indexFunctionsForFile(filePath, fileId) {
8
9
  const normalizedPath = path.normalize(filePath).replace(/\\/g, '/');
9
10
  const content = fs.readFileSync(filePath, 'utf-8');
10
- await extractFunctionsFromFile(normalizedPath, content, fileId);
11
+ return await extractFunctionsFromFile(normalizedPath, content, fileId);
11
12
  }
package/dist/db/schema.js CHANGED
@@ -1,9 +1,11 @@
1
1
  import { db } from "./client.js";
2
2
  export function initSchema() {
3
3
  db.exec(`
4
+ -- Create the files table
4
5
  CREATE TABLE IF NOT EXISTS files (
5
6
  id INTEGER PRIMARY KEY AUTOINCREMENT,
6
7
  path TEXT UNIQUE,
8
+ filename TEXT, -- โœ… Store extracted filename here
7
9
  summary TEXT,
8
10
  type TEXT,
9
11
  indexed_at TEXT,
@@ -13,27 +15,12 @@ export function initSchema() {
13
15
  functions_extracted_at TEXT
14
16
  );
15
17
 
18
+ -- Create the full-text search table, auto-updated via content=files
16
19
  CREATE VIRTUAL TABLE IF NOT EXISTS files_fts
17
- USING fts5(path, summary, content='files', content_rowid='id');
18
-
19
- -- FTS Triggers to keep files_fts in sync
20
- CREATE TRIGGER IF NOT EXISTS files_ai AFTER INSERT ON files BEGIN
21
- INSERT INTO files_fts(rowid, path, summary)
22
- VALUES (new.id, new.path, new.summary);
23
- END;
24
-
25
- CREATE TRIGGER IF NOT EXISTS files_au AFTER UPDATE ON files BEGIN
26
- UPDATE files_fts SET
27
- path = new.path,
28
- summary = new.summary
29
- WHERE rowid = new.id;
30
- END;
31
-
32
- CREATE TRIGGER IF NOT EXISTS files_ad AFTER DELETE ON files BEGIN
33
- DELETE FROM files_fts WHERE rowid = old.id;
34
- END;
20
+ USING fts5(filename, summary, path, content='files', content_rowid='id');
35
21
  `);
36
- console.log('โœ… SQLite schema initialized with FTS5 triggers');
22
+ console.log('โœ… SQLite schema initialized with FTS5 auto-sync');
23
+ // Create additional tables for functions and function_calls
37
24
  db.exec(`
38
25
  CREATE TABLE IF NOT EXISTS functions (
39
26
  id INTEGER PRIMARY KEY AUTOINCREMENT,
@@ -1,8 +1,9 @@
1
1
  // Upsert file metadata into `files`
2
2
  export const upsertFileTemplate = `
3
- INSERT INTO files (path, summary, type, last_modified, indexed_at, embedding)
4
- VALUES (:path, :summary, :type, :lastModified, :indexedAt, :embedding)
3
+ INSERT INTO files (path, filename, summary, type, last_modified, indexed_at, embedding)
4
+ VALUES (:path, :filename, :summary, :type, :lastModified, :indexedAt, :embedding)
5
5
  ON CONFLICT(path) DO UPDATE SET
6
+ filename = excluded.filename, -- Update filename when path conflicts
6
7
  summary = CASE
7
8
  WHEN excluded.summary IS NOT NULL AND excluded.summary != files.summary
8
9
  THEN excluded.summary
@@ -17,9 +18,10 @@ export const upsertFileTemplate = `
17
18
  ELSE files.embedding
18
19
  END
19
20
  `;
20
- // Fetch search results with BM25 ranking
21
+ // ๐Ÿ“Œ CHANGE 1: Include `filename` in SELECT + weight `filename` highest in bm25
21
22
  export const fetchBm25ScoresTemplate = `
22
- SELECT f.path, f.summary, f.type, bm25(files_fts) AS bm25Score
23
+ SELECT f.path, f.filename, f.summary, f.type,
24
+ bm25(files_fts, 10.0, 2.0, 1.0) AS bm25Score
23
25
  FROM files_fts
24
26
  JOIN files f ON files_fts.rowid = f.id
25
27
  WHERE files_fts MATCH :query
@@ -29,10 +31,10 @@ export const fetchBm25ScoresTemplate = `
29
31
  export const fetchEmbeddingTemplate = `
30
32
  SELECT embedding FROM files WHERE path = :path
31
33
  `;
32
- // Used for non-embedding query in `queryFiles()`
34
+ // ๐Ÿ“Œ CHANGE 2: Also added weighted `bm25()` with explicit weights here
33
35
  export const rawQueryTemplate = `
34
- SELECT f.path, f.summary, f.type, f.last_modified, f.indexed_at,
35
- bm25(files_fts) AS rank
36
+ SELECT f.path, f.filename, f.summary, f.type, f.last_modified, f.indexed_at,
37
+ bm25(files_fts, 10.0, 2.0, 1.0) AS rank
36
38
  FROM files_fts
37
39
  JOIN files f ON files_fts.rowid = f.id
38
40
  WHERE files_fts MATCH :query
@@ -28,7 +28,7 @@ Extension: ${ext}
28
28
  ${content}
29
29
  --- FILE CONTENT END ---
30
30
  `.trim();
31
- const response = await generate({ content, filepath }, model);
31
+ const response = await generate({ content: prompt, filepath }, model);
32
32
  if (response.content) {
33
33
  response.summary = response.content;
34
34
  console.log('\n๐Ÿ“ Summary:\n');
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "scai",
3
- "version": "0.1.45",
3
+ "version": "0.1.47",
4
4
  "type": "module",
5
5
  "bin": {
6
6
  "scai": "./dist/index.js"
@@ -30,7 +30,8 @@
30
30
  "better-sqlite3": "^12.1.1",
31
31
  "commander": "^11.0.0",
32
32
  "fast-glob": "^3.3.3",
33
- "proper-lockfile": "^4.1.2"
33
+ "proper-lockfile": "^4.1.2",
34
+ "string-similarity-js": "^2.1.4"
34
35
  },
35
36
  "devDependencies": {
36
37
  "@types/better-sqlite3": "^7.6.13",