scai 0.1.114 → 0.1.116

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/CHANGELOG.md CHANGED
@@ -193,4 +193,10 @@ Type handling with the module pipeline
193
193
 
194
194
  ## 2025-09-13
195
195
 
196
- • Improve robustness of context update logic
196
+ • Improve robustness of context update logic
197
+
198
+ ## 2025-09-18
199
+
200
+ • Improved logic for handling PR approval and inline comments
201
+ • Add function call table to graph edge functionality
202
+ • Improve JSON validation and cleanup in pipeline module
@@ -75,5 +75,5 @@ export async function resetDatabase() {
75
75
  console.warn('⚠️ Failed to remove lock directory:', err instanceof Error ? err.message : err);
76
76
  }
77
77
  }
78
- console.log('✅ Database has been reset.' + chalk.yellow('You can now re-run: scai index'));
78
+ console.log('✅ Database has been reset.' + chalk.yellow('You can now re-run: scai index start'));
79
79
  }
@@ -330,7 +330,7 @@ async function promptAIReviewSuggestions(aiOutput, chunkContent) {
330
330
  else if (trimmed === 'c') {
331
331
  selected = await promptCustomReview();
332
332
  }
333
- else if (trimmed === 's') {
333
+ else if (trimmed === 's' || trimmed === '' || trimmed === ' ') {
334
334
  return "skip";
335
335
  }
336
336
  else if (trimmed === 'q') {
@@ -545,23 +545,25 @@ export async function reviewPullRequestCmd(branch = 'main', showAll = false) {
545
545
  });
546
546
  const shouldApprove = allApproved;
547
547
  const hasInlineComments = reviewComments.length > 0;
548
- // We always submit comments first if any
549
- const initialReviewState = shouldApprove ? 'APPROVE' : 'REQUEST_CHANGES';
550
- const initialReviewBody = shouldApprove
551
- ? '✅ Reviewed.'
552
- : '⛔ Requested changes based on review.';
553
- console.log(shouldApprove && !hasInlineComments
554
- ? chalk.green('✔️ All chunks approved. Submitting final PR approval.')
555
- : !shouldApprove
556
- ? chalk.red('❌ Not all chunks were approved. Changes requested.')
557
- : chalk.green('📝 Submitting inline comments before approval.') // ✅ NEW
558
- );
559
- // ✅ Submit review with inline comments or direct approval/request
560
- await submitReview(pr.number, initialReviewBody, initialReviewState, reviewComments);
561
- // ✅ Then submit separate approval if needed
562
- if (shouldApprove && hasInlineComments) {
563
- await submitReview(pr.number, 'PR approved after inline comments.', 'APPROVE');
548
+ let reviewState;
549
+ let reviewBody;
550
+ if (shouldApprove) {
551
+ reviewState = "APPROVE";
552
+ reviewBody = hasInlineComments
553
+ ? "PR approved after inline comments."
554
+ : "✅ Reviewed.";
564
555
  }
556
+ else {
557
+ reviewState = "REQUEST_CHANGES";
558
+ reviewBody = "⛔ Requested changes based on review.";
559
+ }
560
+ console.log(shouldApprove
561
+ ? hasInlineComments
562
+ ? chalk.green("📝 Submitting inline comments with approval.")
563
+ : chalk.green("✔️ All chunks approved. Submitting final PR approval.")
564
+ : chalk.red("❌ Not all chunks were approved. Changes requested."));
565
+ // ✅ Only one submission, inline comments are preserved
566
+ await submitReview(pr.number, reviewBody, reviewState, reviewComments);
565
567
  }
566
568
  }
567
569
  catch (err) {
@@ -7,12 +7,11 @@ import lockfile from 'proper-lockfile';
7
7
  import { summaryModule } from '../pipeline/modules/summaryModule.js';
8
8
  import { classifyFile } from '../fileRules/classifyFile.js';
9
9
  import { getDbForRepo, getDbPathForRepo } from '../db/client.js';
10
- import { markFileAsSkippedByPath, selectUnprocessedFiles, updateFileWithSummaryAndEmbedding, } from '../db/sqlTemplates.js';
10
+ import { markFileAsSkippedByPath, selectUnprocessedFiles, updateFileWithSummaryAndEmbedding, insertGraphTagTemplate, selectGraphTagIdTemplate, insertGraphEntityTagTemplate, } from '../db/sqlTemplates.js';
11
11
  import { kgModule } from '../pipeline/modules/kgModule.js';
12
12
  const MAX_FILES_PER_BATCH = 5;
13
13
  /**
14
- * Acquires a lock on the database to ensure that only one daemon batch
15
- * can modify it at a time.
14
+ * Acquire a lock on the DB so only one daemon batch modifies it at a time
16
15
  */
17
16
  async function lockDb() {
18
17
  try {
@@ -24,12 +23,10 @@ async function lockDb() {
24
23
  }
25
24
  }
26
25
  /**
27
- * Runs a daemon batch to process up to MAX_FILES_PER_BATCH unprocessed files.
28
- * This includes:
29
- * - Verifying file existence and validity
30
- * - Generating summaries and embeddings if needed
31
- * - Extracting functions from source files
32
- * - Marking skipped files as necessary
26
+ * Run a daemon batch to process up to MAX_FILES_PER_BATCH files.
27
+ * - Generates summaries & embeddings
28
+ * - Extracts functions
29
+ * - Builds Knowledge Graph (persisting entities, tags, and edges)
33
30
  */
34
31
  export async function runDaemonBatch() {
35
32
  log('🟡 Starting daemon batch...');
@@ -92,32 +89,22 @@ export async function runDaemonBatch() {
92
89
  const kgResult = await kgModule.run(kgInput, content);
93
90
  log(`✅ Knowledge Graph built for ${row.path}`);
94
91
  log(`Entities: ${kgResult.entities.length}, Edges: ${kgResult.edges.length}`);
95
- // Persist KG entities + tags only if there are any
92
+ // Persist KG entities + tags
96
93
  if (kgResult.entities.length > 0) {
97
- const insertTag = db.prepare(`
98
- INSERT OR IGNORE INTO tags_master (name) VALUES (:name)
99
- `);
100
- const getTagId = db.prepare(`
101
- SELECT id FROM tags_master WHERE name = :name
102
- `);
103
- const insertEntityTag = db.prepare(`
104
- INSERT OR IGNORE INTO entity_tags (entity_type, entity_id, tag_id)
105
- VALUES (:entity_type, :entity_id, :tag_id)
106
- `);
94
+ const insertTag = db.prepare(insertGraphTagTemplate);
95
+ const getTagId = db.prepare(selectGraphTagIdTemplate);
96
+ const insertEntityTag = db.prepare(insertGraphEntityTagTemplate);
107
97
  for (const entity of kgResult.entities) {
108
- // Skip entity if type or tags are missing
109
98
  if (!entity.type || !Array.isArray(entity.tags) || entity.tags.length === 0) {
110
99
  console.warn(`⚠ Skipping entity due to missing type or tags:`, entity);
111
100
  continue;
112
101
  }
113
102
  for (const tag of entity.tags) {
114
- // Skip empty or invalid tags
115
103
  if (!tag || typeof tag !== 'string') {
116
104
  console.warn(`⚠ Skipping invalid tag for entity ${entity.type}:`, tag);
117
105
  continue;
118
106
  }
119
107
  try {
120
- // ✅ Use :name in SQL and plain key in object
121
108
  insertTag.run({ name: tag });
122
109
  const tagRow = getTagId.get({ name: tag });
123
110
  if (!tagRow) {
@@ -126,7 +113,45 @@ export async function runDaemonBatch() {
126
113
  }
127
114
  insertEntityTag.run({
128
115
  entity_type: entity.type,
129
- entity_id: row.id,
116
+ entity_unique_id: `${entity.name}@${row.path}`,
117
+ tag_id: tagRow.id,
118
+ });
119
+ }
120
+ catch (err) {
121
+ console.error(`❌ Failed to persist entity/tag:`, { entity, tag, error: err });
122
+ }
123
+ }
124
+ }
125
+ log(`✅ Persisted entities + tags for ${row.path}`);
126
+ }
127
+ else {
128
+ log(`⚠️ No entities found for ${row.path}, skipping DB inserts`);
129
+ }
130
+ // Persist KG entities + tags (from LLM)
131
+ if (kgResult.entities.length > 0) {
132
+ const insertTag = db.prepare(insertGraphTagTemplate);
133
+ const getTagId = db.prepare(selectGraphTagIdTemplate);
134
+ const insertEntityTag = db.prepare(insertGraphEntityTagTemplate);
135
+ for (const entity of kgResult.entities) {
136
+ if (!entity.type || !Array.isArray(entity.tags) || entity.tags.length === 0) {
137
+ console.warn(`⚠ Skipping entity due to missing type or tags:`, entity);
138
+ continue;
139
+ }
140
+ for (const tag of entity.tags) {
141
+ if (!tag || typeof tag !== 'string') {
142
+ console.warn(`⚠ Skipping invalid tag for entity ${entity.type}:`, tag);
143
+ continue;
144
+ }
145
+ try {
146
+ insertTag.run({ name: tag });
147
+ const tagRow = getTagId.get({ name: tag });
148
+ if (!tagRow) {
149
+ console.warn(`⚠ Could not find tag ID for: ${tag}`);
150
+ continue;
151
+ }
152
+ insertEntityTag.run({
153
+ entity_type: entity.type,
154
+ entity_unique_id: `${entity.name}@${row.path}`,
130
155
  tag_id: tagRow.id,
131
156
  });
132
157
  }
@@ -152,7 +177,8 @@ export async function runDaemonBatch() {
152
177
  catch (err) {
153
178
  log(`❌ Failed: ${row.path}: ${err instanceof Error ? err.message : String(err)}\n`);
154
179
  }
155
- await new Promise(resolve => setTimeout(resolve, 200));
180
+ // Short delay between files
181
+ await new Promise((resolve) => setTimeout(resolve, 200));
156
182
  }
157
183
  await release();
158
184
  log('✅ Finished daemon batch.\n\n');
@@ -5,7 +5,7 @@ import { sanitizeQueryForFts } from '../utils/sanitizeQuery.js';
5
5
  import * as sqlTemplates from './sqlTemplates.js';
6
6
  import { CANDIDATE_LIMIT } from '../constants.js';
7
7
  import { getDbForRepo } from './client.js';
8
- import { scoreFiles } from '../fileRules/scoreFiles.js'; // 👈 NEW
8
+ import { scoreFiles } from '../fileRules/scoreFiles.js';
9
9
  import chalk from 'chalk';
10
10
  export function indexFile(filePath, summary, type) {
11
11
  const stats = fs.statSync(filePath);
@@ -23,10 +23,7 @@ export function indexFile(filePath, summary, type) {
23
23
  indexedAt,
24
24
  embedding: null
25
25
  });
26
- db.prepare(`
27
- INSERT OR REPLACE INTO files_fts (rowid, filename, summary, path)
28
- VALUES ((SELECT id FROM files WHERE path = :path), :filename, :summary, :path)
29
- `).run({
26
+ db.prepare(sqlTemplates.upsertFileFtsTemplate).run({
30
27
  path: normalizedPath,
31
28
  filename: fileName,
32
29
  summary,
@@ -36,13 +33,7 @@ export function indexFile(filePath, summary, type) {
36
33
  export function queryFiles(safeQuery, limit = 10) {
37
34
  console.log(`Executing search query: ${safeQuery}`);
38
35
  const db = getDbForRepo();
39
- return db.prepare(`
40
- SELECT f.id, f.path, f.filename, f.summary, f.type, f.last_modified, f.indexed_at
41
- FROM files f
42
- JOIN files_fts fts ON f.id = fts.rowid
43
- WHERE fts.files_fts MATCH ?
44
- LIMIT ?
45
- `).all(safeQuery, limit);
36
+ return db.prepare(sqlTemplates.queryFilesTemplate).all(safeQuery, limit);
46
37
  }
47
38
  export async function searchFiles(query, topK = 5) {
48
39
  console.log(chalk.yellow(`🧠 Searching for query: "${query}"`));
@@ -54,14 +45,7 @@ export async function searchFiles(query, topK = 5) {
54
45
  const safeQuery = sanitizeQueryForFts(query);
55
46
  console.log(`Executing search query in FTS5: ${safeQuery}`);
56
47
  const db = getDbForRepo();
57
- const ftsResults = db.prepare(`
58
- SELECT fts.rowid AS id, f.path, f.filename, f.summary, f.type, bm25(files_fts) AS bm25Score, f.embedding
59
- FROM files f
60
- JOIN files_fts fts ON f.id = fts.rowid
61
- WHERE fts.files_fts MATCH ?
62
- ORDER BY bm25Score ASC
63
- LIMIT ?
64
- `).all(safeQuery, CANDIDATE_LIMIT);
48
+ const ftsResults = db.prepare(sqlTemplates.searchFilesTemplate).all(safeQuery, CANDIDATE_LIMIT);
65
49
  console.log(`FTS search returned ${ftsResults.length} results`);
66
50
  if (ftsResults.length === 0)
67
51
  return [];
@@ -3,8 +3,9 @@ import { ancestor as walkAncestor } from 'acorn-walk';
3
3
  import { generateEmbedding } from '../../lib/generateEmbedding.js';
4
4
  import path from 'path';
5
5
  import { log } from '../../utils/log.js';
6
- import { markFileAsSkippedTemplate, markFileAsExtractedTemplate, markFileAsFailedTemplate } from '../sqlTemplates.js';
6
+ import { markFileAsSkippedTemplate, markFileAsExtractedTemplate, markFileAsFailedTemplate, insertFunctionTemplate, insertGraphClassTemplate, insertEdgeTemplate, insertGraphEntityTagTemplate, insertGraphTagTemplate, selectGraphTagIdTemplate, } from '../sqlTemplates.js';
7
7
  import { getDbForRepo } from '../client.js';
8
+ import { kgModule } from '../../pipeline/modules/kgModule.js';
8
9
  function getFunctionName(node, parent, fileName) {
9
10
  if (node.id?.name)
10
11
  return node.id.name;
@@ -16,69 +17,99 @@ function getFunctionName(node, parent, fileName) {
16
17
  return parent.left.name;
17
18
  if (parent?.type === 'MethodDefinition' && parent.key?.name)
18
19
  return parent.key.name;
19
- return `${fileName}:<anon>`;
20
+ return '<anon>';
20
21
  }
21
22
  export async function extractFromJS(filePath, content, fileId) {
22
23
  const db = getDbForRepo();
24
+ const normalizedPath = path.normalize(filePath).replace(/\\/g, '/');
23
25
  try {
24
- console.log(`[Debug] Attempting to parse: ${filePath}`);
25
- console.log(`[Debug] First 3 lines:\n${content.split('\n').slice(0, 3).join('\n')}`);
26
- const ast = parse(content, {
27
- ecmaVersion: 'latest',
28
- sourceType: 'module',
29
- locations: true,
30
- });
26
+ const ast = parse(content, { ecmaVersion: 'latest', sourceType: 'module', locations: true });
31
27
  const functions = [];
32
28
  const classes = [];
29
+ const imports = [];
30
+ const exports = [];
31
+ // --- Traverse AST ---
33
32
  walkAncestor(ast, {
33
+ ImportDeclaration(node) {
34
+ if (node.source?.value)
35
+ imports.push(node.source.value);
36
+ },
37
+ ExportNamedDeclaration(node) {
38
+ if (node.source?.value)
39
+ exports.push(node.source.value);
40
+ },
41
+ ExportAllDeclaration(node) {
42
+ if (node.source?.value)
43
+ exports.push(node.source.value);
44
+ },
34
45
  FunctionDeclaration(node, ancestors) {
35
46
  const parent = ancestors[ancestors.length - 2];
36
47
  const name = getFunctionName(node, parent, path.basename(filePath));
48
+ const uniqueId = name !== '<anon>'
49
+ ? `${name}@${normalizedPath}`
50
+ : `${path.basename(filePath)}:<anon>@${normalizedPath}:${node.loc?.start.line}`;
37
51
  functions.push({
38
52
  name,
39
53
  start_line: node.loc?.start.line ?? -1,
40
54
  end_line: node.loc?.end.line ?? -1,
41
55
  content: content.slice(node.start, node.end),
56
+ uniqueId,
42
57
  });
43
58
  },
44
59
  FunctionExpression(node, ancestors) {
45
60
  const parent = ancestors[ancestors.length - 2];
46
61
  const name = getFunctionName(node, parent, path.basename(filePath));
62
+ const uniqueId = name !== '<anon>'
63
+ ? `${name}@${normalizedPath}`
64
+ : `${path.basename(filePath)}:<anon>@${normalizedPath}:${node.loc?.start.line}`;
47
65
  functions.push({
48
66
  name,
49
67
  start_line: node.loc?.start.line ?? -1,
50
68
  end_line: node.loc?.end.line ?? -1,
51
69
  content: content.slice(node.start, node.end),
70
+ uniqueId,
52
71
  });
53
72
  },
54
73
  ArrowFunctionExpression(node, ancestors) {
55
74
  const parent = ancestors[ancestors.length - 2];
56
75
  const name = getFunctionName(node, parent, path.basename(filePath));
76
+ const uniqueId = name !== '<anon>'
77
+ ? `${name}@${normalizedPath}`
78
+ : `${path.basename(filePath)}:<anon>@${normalizedPath}:${node.loc?.start.line}`;
57
79
  functions.push({
58
80
  name,
59
81
  start_line: node.loc?.start.line ?? -1,
60
82
  end_line: node.loc?.end.line ?? -1,
61
83
  content: content.slice(node.start, node.end),
84
+ uniqueId,
62
85
  });
63
86
  },
64
87
  ClassDeclaration(node) {
65
- const className = node.id?.name || `${path.basename(filePath)}:<anon-class>`;
88
+ const className = node.id?.name || '<anon-class>';
89
+ const uniqueId = className !== '<anon-class>'
90
+ ? `${className}@${normalizedPath}`
91
+ : `${path.basename(filePath)}:<anon-class>@${normalizedPath}:${node.loc?.start.line}`;
66
92
  classes.push({
67
93
  name: className,
68
94
  start_line: node.loc?.start.line ?? -1,
69
95
  end_line: node.loc?.end.line ?? -1,
70
96
  content: content.slice(node.start, node.end),
71
97
  superClass: node.superClass?.name ?? null,
98
+ uniqueId,
72
99
  });
73
100
  },
74
101
  ClassExpression(node) {
75
- const className = node.id?.name || `${path.basename(filePath)}:<anon-class>`;
102
+ const className = node.id?.name || '<anon-class>';
103
+ const uniqueId = className !== '<anon-class>'
104
+ ? `${className}@${normalizedPath}`
105
+ : `${path.basename(filePath)}:<anon-class>@${normalizedPath}:${node.loc?.start.line}`;
76
106
  classes.push({
77
107
  name: className,
78
108
  start_line: node.loc?.start.line ?? -1,
79
109
  end_line: node.loc?.end.line ?? -1,
80
110
  content: content.slice(node.start, node.end),
81
111
  superClass: node.superClass?.name ?? null,
112
+ uniqueId,
82
113
  });
83
114
  },
84
115
  });
@@ -88,109 +119,144 @@ export async function extractFromJS(filePath, content, fileId) {
88
119
  return false;
89
120
  }
90
121
  log(`🔍 Found ${functions.length} functions and ${classes.length} classes in ${filePath}`);
91
- // Insert functions
92
- for (const fn of functions) {
93
- const embedding = await generateEmbedding(fn.content);
94
- const result = db
95
- .prepare(`
96
- INSERT INTO functions (
97
- file_id, name, start_line, end_line, content, embedding, lang
98
- ) VALUES (
99
- @file_id, @name, @start_line, @end_line, @content, @embedding, @lang
100
- )
101
- `)
102
- .run({
103
- file_id: fileId,
104
- name: fn.name,
105
- start_line: fn.start_line,
106
- end_line: fn.end_line,
107
- content: fn.content,
108
- embedding: JSON.stringify(embedding),
109
- lang: 'js',
110
- });
111
- const functionId = result.lastInsertRowid;
112
- // file → function edge
113
- db.prepare(`INSERT INTO edges (source_type, source_id, target_type, target_id, relation)
114
- VALUES (@source_type, @source_id, @target_type, @target_id, 'contains')`).run({
115
- source_type: 'file',
116
- source_id: fileId,
117
- target_type: 'function',
118
- target_id: functionId,
119
- });
120
- // Walk inside function to find calls
121
- const fnAst = parse(fn.content, {
122
- ecmaVersion: 'latest',
123
- sourceType: 'module',
124
- locations: true,
125
- });
126
- const calls = [];
127
- walkAncestor(fnAst, {
128
- CallExpression(node) {
129
- if (node.callee?.type === 'Identifier' && node.callee.name) {
130
- calls.push({ calleeName: node.callee.name });
122
+ // --- KG tagging ---
123
+ try {
124
+ const kgInput = { fileId, filepath: filePath, summary: undefined };
125
+ const kgResult = await kgModule.run(kgInput, content);
126
+ if (kgResult.entities?.length > 0) {
127
+ const insertTag = db.prepare(insertGraphTagTemplate);
128
+ const getTagId = db.prepare(selectGraphTagIdTemplate);
129
+ const insertEntityTag = db.prepare(insertGraphEntityTagTemplate);
130
+ for (const entity of kgResult.entities) {
131
+ if (!entity.type || !Array.isArray(entity.tags) || entity.tags.length === 0)
132
+ continue;
133
+ for (const tag of entity.tags) {
134
+ if (!tag || typeof tag !== 'string')
135
+ continue;
136
+ try {
137
+ insertTag.run({ name: tag });
138
+ const tagRow = getTagId.get({ name: tag });
139
+ if (!tagRow)
140
+ continue;
141
+ const matchedUniqueId = functions.find(f => f.name === entity.name)?.uniqueId ||
142
+ classes.find(c => c.name === entity.name)?.uniqueId ||
143
+ `${entity.name}@${filePath}`;
144
+ insertEntityTag.run({
145
+ entity_type: entity.type,
146
+ entity_unique_id: matchedUniqueId,
147
+ tag_id: tagRow.id,
148
+ });
149
+ }
150
+ catch (err) {
151
+ console.error('❌ Failed to persist entity/tag', { entity, tag, error: err });
152
+ }
131
153
  }
132
- },
133
- });
134
- for (const call of calls) {
135
- // Store name for later resolution
136
- db.prepare(`INSERT INTO function_calls (caller_id, callee_name) VALUES (@caller_id, @callee_name)`).run({ caller_id: functionId, callee_name: call.calleeName });
137
- // Optional unresolved edge
138
- db.prepare(`INSERT INTO edges (source_type, source_id, target_type, target_id, relation)
139
- VALUES (@source_type, @source_id, @target_type, @target_id, 'calls')`).run({
140
- source_type: 'function',
141
- source_id: functionId,
154
+ }
155
+ log(`🏷 Persisted LLM-generated tags for ${filePath}`);
156
+ }
157
+ }
158
+ catch (kgErr) {
159
+ console.warn(`⚠️ KG tagging failed for ${filePath}:`, kgErr instanceof Error ? kgErr.message : kgErr);
160
+ }
161
+ // --- Insert functions + edges ---
162
+ for (const fn of functions) {
163
+ try {
164
+ const embedding = await generateEmbedding(fn.content);
165
+ db.prepare(insertFunctionTemplate).run({
166
+ file_id: fileId,
167
+ name: fn.name,
168
+ start_line: fn.start_line,
169
+ end_line: fn.end_line,
170
+ content: fn.content,
171
+ embedding: JSON.stringify(embedding),
172
+ lang: 'js',
173
+ unique_id: fn.uniqueId,
174
+ });
175
+ db.prepare(insertEdgeTemplate).run({
176
+ source_type: 'file',
177
+ source_unique_id: normalizedPath,
142
178
  target_type: 'function',
143
- target_id: 0, // unresolved callee
179
+ target_unique_id: fn.uniqueId,
180
+ relation: 'contains',
181
+ });
182
+ const fnAst = parse(fn.content, { ecmaVersion: 'latest', sourceType: 'module', locations: true });
183
+ walkAncestor(fnAst, {
184
+ CallExpression(node) {
185
+ const calleeName = node.callee?.name || 'unresolved';
186
+ const targetUniqueId = `${calleeName}@${normalizedPath}`;
187
+ db.prepare(insertEdgeTemplate).run({
188
+ source_type: 'function',
189
+ source_unique_id: fn.uniqueId,
190
+ target_type: 'function',
191
+ target_unique_id: targetUniqueId,
192
+ relation: 'calls',
193
+ });
194
+ },
144
195
  });
196
+ log(`📌 Indexed JS function: ${fn.name}`);
197
+ }
198
+ catch (err) {
199
+ console.error('❌ Failed to insert function or call edges', { fn, error: err });
145
200
  }
146
- log(`📌 Indexed function: ${fn.name} with ${calls.length} calls`);
147
201
  }
148
- // Insert classes
202
+ // --- Insert classes + edges ---
149
203
  for (const cls of classes) {
150
- const embedding = await generateEmbedding(cls.content);
151
- const result = db
152
- .prepare(`
153
- INSERT INTO classes (
154
- file_id, name, start_line, end_line, content, embedding, lang
155
- ) VALUES (
156
- @file_id, @name, @start_line, @end_line, @content, @embedding, @lang
157
- )
158
- `)
159
- .run({
160
- file_id: fileId,
161
- name: cls.name,
162
- start_line: cls.start_line,
163
- end_line: cls.end_line,
164
- content: cls.content,
165
- embedding: JSON.stringify(embedding),
166
- lang: 'js',
167
- });
168
- const classId = result.lastInsertRowid;
169
- // file → class edge
170
- db.prepare(`INSERT INTO edges (source_type, source_id, target_type, target_id, relation)
171
- VALUES (@source_type, @source_id, @target_type, @target_id, 'contains')`).run({
172
- source_type: 'file',
173
- source_id: fileId,
174
- target_type: 'class',
175
- target_id: classId,
176
- });
177
- // superclass → store unresolved reference
178
- if (cls.superClass) {
179
- db.prepare(`INSERT INTO edges (source_type, source_id, target_type, target_id, relation)
180
- VALUES (@source_type, @source_id, @target_type, @target_id, 'inherits')`).run({
181
- source_type: 'class',
182
- source_id: classId,
204
+ try {
205
+ const embedding = await generateEmbedding(cls.content);
206
+ db.prepare(insertGraphClassTemplate).run({
207
+ file_id: fileId,
208
+ name: cls.name,
209
+ start_line: cls.start_line,
210
+ end_line: cls.end_line,
211
+ content: cls.content,
212
+ embedding: JSON.stringify(embedding),
213
+ lang: 'js',
214
+ unique_id: cls.uniqueId,
215
+ });
216
+ db.prepare(insertEdgeTemplate).run({
217
+ source_type: 'file',
218
+ source_unique_id: normalizedPath,
183
219
  target_type: 'class',
184
- target_id: 0, // unresolved superclass
220
+ target_unique_id: cls.uniqueId,
221
+ relation: 'contains',
185
222
  });
186
- console.log(`🔗 Class ${cls.name} inherits ${cls.superClass} (edge stored for later resolution)`);
223
+ if (cls.superClass) {
224
+ db.prepare(insertEdgeTemplate).run({
225
+ source_type: 'class',
226
+ source_unique_id: cls.uniqueId,
227
+ target_type: `unresolved:${cls.superClass}`,
228
+ relation: 'inherits',
229
+ });
230
+ log(`🔗 Class ${cls.name} extends ${cls.superClass}`);
231
+ }
232
+ log(`🏷 Indexed JS class: ${cls.name}`);
187
233
  }
188
- console.log(`🏷 Indexed class: ${cls.name} (id=${classId})`);
234
+ catch (err) {
235
+ console.error('❌ Failed to insert class or edges', { cls, error: err });
236
+ }
237
+ }
238
+ // --- Imports / Exports edges ---
239
+ for (const imp of imports) {
240
+ db.prepare(insertEdgeTemplate).run({
241
+ source_type: 'file',
242
+ source_unique_id: normalizedPath,
243
+ target_type: 'file',
244
+ target_unique_id: `file@${imp}`,
245
+ relation: 'imports',
246
+ });
247
+ }
248
+ for (const exp of exports) {
249
+ db.prepare(insertEdgeTemplate).run({
250
+ source_type: 'file',
251
+ source_unique_id: normalizedPath,
252
+ target_type: 'file',
253
+ target_unique_id: `file@${exp}`,
254
+ relation: 'exports',
255
+ });
189
256
  }
190
- // Optional summary after extraction
191
- console.log(`📊 Extraction summary for ${filePath}: ${functions.length} functions, ${classes.length} classes`);
257
+ log(`📊 Extraction summary for ${filePath}: ${functions.length} functions, ${classes.length} classes, ${imports.length} imports, ${exports.length} exports`);
192
258
  db.prepare(markFileAsExtractedTemplate).run({ id: fileId });
193
- log(`✅ Marked functions/classes as extracted for ${filePath}`);
259
+ log(`✅ Marked JS functions/classes as extracted for ${filePath}`);
194
260
  return true;
195
261
  }
196
262
  catch (err) {