@zuvia-software-solutions/code-mapper 1.4.0 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli/ai-context.js +1 -1
- package/dist/cli/analyze.d.ts +1 -0
- package/dist/cli/analyze.js +73 -82
- package/dist/cli/augment.js +0 -2
- package/dist/cli/eval-server.d.ts +2 -2
- package/dist/cli/eval-server.js +6 -6
- package/dist/cli/index.js +6 -10
- package/dist/cli/mcp.d.ts +1 -3
- package/dist/cli/mcp.js +3 -3
- package/dist/cli/refresh.d.ts +2 -2
- package/dist/cli/refresh.js +24 -29
- package/dist/cli/status.js +4 -13
- package/dist/cli/tool.d.ts +5 -4
- package/dist/cli/tool.js +8 -10
- package/dist/config/ignore-service.js +14 -34
- package/dist/core/augmentation/engine.js +53 -83
- package/dist/core/db/adapter.d.ts +99 -0
- package/dist/core/db/adapter.js +402 -0
- package/dist/core/db/graph-loader.d.ts +27 -0
- package/dist/core/db/graph-loader.js +148 -0
- package/dist/core/db/queries.d.ts +160 -0
- package/dist/core/db/queries.js +441 -0
- package/dist/core/db/schema.d.ts +108 -0
- package/dist/core/db/schema.js +136 -0
- package/dist/core/embeddings/embedder.d.ts +21 -12
- package/dist/core/embeddings/embedder.js +104 -50
- package/dist/core/embeddings/embedding-pipeline.d.ts +48 -22
- package/dist/core/embeddings/embedding-pipeline.js +220 -262
- package/dist/core/embeddings/text-generator.js +4 -19
- package/dist/core/embeddings/types.d.ts +1 -1
- package/dist/core/graph/graph.d.ts +1 -1
- package/dist/core/graph/graph.js +1 -0
- package/dist/core/graph/types.d.ts +11 -9
- package/dist/core/graph/types.js +4 -1
- package/dist/core/incremental/refresh.d.ts +46 -0
- package/dist/core/incremental/refresh.js +464 -0
- package/dist/core/incremental/types.d.ts +2 -1
- package/dist/core/incremental/types.js +42 -44
- package/dist/core/ingestion/ast-cache.js +1 -0
- package/dist/core/ingestion/call-processor.d.ts +15 -3
- package/dist/core/ingestion/call-processor.js +448 -60
- package/dist/core/ingestion/cluster-enricher.d.ts +1 -1
- package/dist/core/ingestion/cluster-enricher.js +2 -0
- package/dist/core/ingestion/community-processor.d.ts +1 -1
- package/dist/core/ingestion/community-processor.js +8 -3
- package/dist/core/ingestion/export-detection.d.ts +1 -1
- package/dist/core/ingestion/export-detection.js +1 -1
- package/dist/core/ingestion/filesystem-walker.js +1 -1
- package/dist/core/ingestion/heritage-processor.d.ts +2 -2
- package/dist/core/ingestion/heritage-processor.js +22 -11
- package/dist/core/ingestion/import-processor.d.ts +2 -2
- package/dist/core/ingestion/import-processor.js +24 -9
- package/dist/core/ingestion/language-config.js +7 -4
- package/dist/core/ingestion/mro-processor.d.ts +1 -1
- package/dist/core/ingestion/mro-processor.js +23 -11
- package/dist/core/ingestion/named-binding-extraction.js +5 -5
- package/dist/core/ingestion/parsing-processor.d.ts +4 -4
- package/dist/core/ingestion/parsing-processor.js +26 -18
- package/dist/core/ingestion/pipeline.d.ts +4 -2
- package/dist/core/ingestion/pipeline.js +50 -20
- package/dist/core/ingestion/process-processor.d.ts +2 -2
- package/dist/core/ingestion/process-processor.js +28 -14
- package/dist/core/ingestion/resolution-context.d.ts +1 -1
- package/dist/core/ingestion/resolution-context.js +14 -4
- package/dist/core/ingestion/resolvers/csharp.js +4 -3
- package/dist/core/ingestion/resolvers/go.js +3 -1
- package/dist/core/ingestion/resolvers/jvm.js +13 -4
- package/dist/core/ingestion/resolvers/standard.js +2 -2
- package/dist/core/ingestion/resolvers/utils.js +6 -2
- package/dist/core/ingestion/route-stitcher.d.ts +15 -0
- package/dist/core/ingestion/route-stitcher.js +92 -0
- package/dist/core/ingestion/structure-processor.d.ts +1 -1
- package/dist/core/ingestion/structure-processor.js +3 -2
- package/dist/core/ingestion/symbol-table.d.ts +2 -0
- package/dist/core/ingestion/symbol-table.js +5 -1
- package/dist/core/ingestion/tree-sitter-queries.d.ts +2 -2
- package/dist/core/ingestion/tree-sitter-queries.js +177 -0
- package/dist/core/ingestion/type-env.js +20 -0
- package/dist/core/ingestion/type-extractors/csharp.js +4 -3
- package/dist/core/ingestion/type-extractors/go.js +23 -12
- package/dist/core/ingestion/type-extractors/php.js +18 -10
- package/dist/core/ingestion/type-extractors/ruby.js +15 -3
- package/dist/core/ingestion/type-extractors/rust.js +3 -2
- package/dist/core/ingestion/type-extractors/shared.js +3 -2
- package/dist/core/ingestion/type-extractors/typescript.js +11 -5
- package/dist/core/ingestion/utils.d.ts +27 -4
- package/dist/core/ingestion/utils.js +145 -100
- package/dist/core/ingestion/workers/parse-worker.d.ts +1 -0
- package/dist/core/ingestion/workers/parse-worker.js +97 -29
- package/dist/core/ingestion/workers/worker-pool.js +3 -0
- package/dist/core/search/bm25-index.d.ts +15 -8
- package/dist/core/search/bm25-index.js +48 -98
- package/dist/core/search/hybrid-search.d.ts +9 -3
- package/dist/core/search/hybrid-search.js +30 -25
- package/dist/core/search/reranker.js +9 -7
- package/dist/core/search/types.d.ts +0 -4
- package/dist/core/semantic/tsgo-service.d.ts +5 -1
- package/dist/core/semantic/tsgo-service.js +161 -66
- package/dist/lib/tsgo-test.d.ts +2 -0
- package/dist/lib/tsgo-test.js +6 -0
- package/dist/lib/type-utils.d.ts +25 -0
- package/dist/lib/type-utils.js +22 -0
- package/dist/lib/utils.d.ts +3 -2
- package/dist/lib/utils.js +3 -2
- package/dist/mcp/compatible-stdio-transport.js +1 -1
- package/dist/mcp/local/local-backend.d.ts +29 -56
- package/dist/mcp/local/local-backend.js +808 -1118
- package/dist/mcp/resources.js +35 -25
- package/dist/mcp/server.d.ts +1 -1
- package/dist/mcp/server.js +5 -5
- package/dist/mcp/tools.js +24 -25
- package/dist/storage/repo-manager.d.ts +2 -12
- package/dist/storage/repo-manager.js +1 -47
- package/dist/types/pipeline.d.ts +8 -5
- package/dist/types/pipeline.js +5 -0
- package/package.json +18 -11
- package/dist/cli/serve.d.ts +0 -5
- package/dist/cli/serve.js +0 -8
- package/dist/core/incremental/child-process.d.ts +0 -8
- package/dist/core/incremental/child-process.js +0 -649
- package/dist/core/incremental/refresh-coordinator.d.ts +0 -32
- package/dist/core/incremental/refresh-coordinator.js +0 -147
- package/dist/core/lbug/csv-generator.d.ts +0 -28
- package/dist/core/lbug/csv-generator.js +0 -355
- package/dist/core/lbug/lbug-adapter.d.ts +0 -96
- package/dist/core/lbug/lbug-adapter.js +0 -753
- package/dist/core/lbug/schema.d.ts +0 -46
- package/dist/core/lbug/schema.js +0 -402
- package/dist/mcp/core/embedder.d.ts +0 -24
- package/dist/mcp/core/embedder.js +0 -168
- package/dist/mcp/core/lbug-adapter.d.ts +0 -29
- package/dist/mcp/core/lbug-adapter.js +0 -330
- package/dist/server/api.d.ts +0 -5
- package/dist/server/api.js +0 -340
- package/dist/server/mcp-http.d.ts +0 -7
- package/dist/server/mcp-http.js +0 -95
- package/models/mlx-embedder.py +0 -185
|
@@ -1,753 +0,0 @@
|
|
|
1
|
-
// code-mapper/src/core/lbug/lbug-adapter.ts
|
|
2
|
-
/**
|
|
3
|
-
* @file lbug-adapter.ts
|
|
4
|
-
* @description Core LadybugDB adapter — manages singleton DB connection, schema
|
|
5
|
-
* creation, bulk CSV import, FTS indexing, and Cypher query execution
|
|
6
|
-
*/
|
|
7
|
-
import fs from 'fs/promises';
|
|
8
|
-
import { createReadStream } from 'fs';
|
|
9
|
-
import { createInterface } from 'readline';
|
|
10
|
-
import path from 'path';
|
|
11
|
-
import lbug from '@ladybugdb/core';
|
|
12
|
-
import { NODE_TABLES, REL_TABLE_NAME, SCHEMA_QUERIES, EMBEDDING_TABLE_NAME, } from './schema.js';
|
|
13
|
-
import { streamAllCSVsToDisk } from './csv-generator.js';
|
|
14
|
-
let db = null;
|
|
15
|
-
let conn = null;
|
|
16
|
-
let currentDbPath = null;
|
|
17
|
-
let ftsLoaded = false;
|
|
18
|
-
// Session lock — guarantees no DB switch during an active operation
|
|
19
|
-
let sessionLock = Promise.resolve();
|
|
20
|
-
const runWithSessionLock = async (operation) => {
|
|
21
|
-
const previous = sessionLock;
|
|
22
|
-
let release = null;
|
|
23
|
-
sessionLock = new Promise(resolve => {
|
|
24
|
-
release = resolve;
|
|
25
|
-
});
|
|
26
|
-
await previous;
|
|
27
|
-
try {
|
|
28
|
-
return await operation();
|
|
29
|
-
}
|
|
30
|
-
finally {
|
|
31
|
-
release?.();
|
|
32
|
-
}
|
|
33
|
-
};
|
|
34
|
-
const normalizeCopyPath = (filePath) => filePath.replace(/\\/g, '/');
|
|
35
|
-
export const initLbug = async (dbPath) => {
|
|
36
|
-
return runWithSessionLock(() => ensureLbugInitialized(dbPath));
|
|
37
|
-
};
|
|
38
|
-
/** Execute multiple queries against one repo DB atomically (holds session lock) */
|
|
39
|
-
export const withLbugDb = async (dbPath, operation) => {
|
|
40
|
-
return runWithSessionLock(async () => {
|
|
41
|
-
await ensureLbugInitialized(dbPath);
|
|
42
|
-
return operation();
|
|
43
|
-
});
|
|
44
|
-
};
|
|
45
|
-
const ensureLbugInitialized = async (dbPath) => {
|
|
46
|
-
if (conn && currentDbPath === dbPath) {
|
|
47
|
-
return { db, conn };
|
|
48
|
-
}
|
|
49
|
-
await doInitLbug(dbPath);
|
|
50
|
-
return { db, conn };
|
|
51
|
-
};
|
|
52
|
-
const doInitLbug = async (dbPath) => {
|
|
53
|
-
// Close old connection if switching databases
|
|
54
|
-
if (conn || db) {
|
|
55
|
-
try {
|
|
56
|
-
if (conn)
|
|
57
|
-
await conn.close();
|
|
58
|
-
}
|
|
59
|
-
catch { }
|
|
60
|
-
try {
|
|
61
|
-
if (db)
|
|
62
|
-
await db.close();
|
|
63
|
-
}
|
|
64
|
-
catch { }
|
|
65
|
-
conn = null;
|
|
66
|
-
db = null;
|
|
67
|
-
currentDbPath = null;
|
|
68
|
-
ftsLoaded = false;
|
|
69
|
-
}
|
|
70
|
-
// LadybugDB uses a single file — remove stale directories/symlinks from older versions
|
|
71
|
-
try {
|
|
72
|
-
const stat = await fs.lstat(dbPath);
|
|
73
|
-
if (stat.isSymbolicLink()) {
|
|
74
|
-
// Never follow symlinks — remove the link itself
|
|
75
|
-
await fs.unlink(dbPath);
|
|
76
|
-
}
|
|
77
|
-
else if (stat.isDirectory()) {
|
|
78
|
-
// Verify path is within expected storage directory
|
|
79
|
-
const realPath = await fs.realpath(dbPath);
|
|
80
|
-
const parentDir = path.dirname(dbPath);
|
|
81
|
-
const realParent = await fs.realpath(parentDir);
|
|
82
|
-
if (!realPath.startsWith(realParent + path.sep) && realPath !== realParent) {
|
|
83
|
-
throw new Error(`Refusing to delete ${dbPath}: resolved path ${realPath} is outside storage directory`);
|
|
84
|
-
}
|
|
85
|
-
// Old-style directory database — remove it
|
|
86
|
-
await fs.rm(dbPath, { recursive: true, force: true });
|
|
87
|
-
}
|
|
88
|
-
// If it's a file, assume it's an existing LadybugDB database
|
|
89
|
-
}
|
|
90
|
-
catch {
|
|
91
|
-
// Path doesn't exist — LadybugDB will create a new database
|
|
92
|
-
}
|
|
93
|
-
// Ensure parent directory exists
|
|
94
|
-
const parentDir = path.dirname(dbPath);
|
|
95
|
-
await fs.mkdir(parentDir, { recursive: true });
|
|
96
|
-
db = new lbug.Database(dbPath);
|
|
97
|
-
conn = new lbug.Connection(db);
|
|
98
|
-
for (const schemaQuery of SCHEMA_QUERIES) {
|
|
99
|
-
try {
|
|
100
|
-
await conn.query(schemaQuery);
|
|
101
|
-
}
|
|
102
|
-
catch (err) {
|
|
103
|
-
// Only ignore "already exists" errors — log everything else
|
|
104
|
-
const msg = err instanceof Error ? err.message : String(err);
|
|
105
|
-
if (!msg.includes('already exists')) {
|
|
106
|
-
console.warn(`⚠️ Schema creation warning: ${msg.slice(0, 120)}`);
|
|
107
|
-
}
|
|
108
|
-
}
|
|
109
|
-
}
|
|
110
|
-
currentDbPath = dbPath;
|
|
111
|
-
return { db, conn };
|
|
112
|
-
};
|
|
113
|
-
export const loadGraphToLbug = async (graph, repoPath, storagePath, onProgress) => {
|
|
114
|
-
if (!conn) {
|
|
115
|
-
throw new Error('LadybugDB not initialized. Call initLbug first.');
|
|
116
|
-
}
|
|
117
|
-
const log = onProgress || (() => { });
|
|
118
|
-
const csvDir = path.join(storagePath, 'csv');
|
|
119
|
-
log('Streaming CSVs to disk...');
|
|
120
|
-
const csvResult = await streamAllCSVsToDisk(graph, repoPath, csvDir);
|
|
121
|
-
const validTables = new Set(NODE_TABLES);
|
|
122
|
-
const getNodeLabel = (nodeId) => {
|
|
123
|
-
if (nodeId.startsWith('comm_'))
|
|
124
|
-
return 'Community';
|
|
125
|
-
if (nodeId.startsWith('proc_'))
|
|
126
|
-
return 'Process';
|
|
127
|
-
return nodeId.split(':')[0];
|
|
128
|
-
};
|
|
129
|
-
// Bulk COPY node CSVs (sequential — LadybugDB allows one write txn at a time)
|
|
130
|
-
const nodeFiles = [...csvResult.nodeFiles.entries()];
|
|
131
|
-
const totalSteps = nodeFiles.length + 1; // +1 for relationships
|
|
132
|
-
let stepsDone = 0;
|
|
133
|
-
for (const [table, { csvPath, rows }] of nodeFiles) {
|
|
134
|
-
stepsDone++;
|
|
135
|
-
log(`Loading nodes ${stepsDone}/${totalSteps}: ${table} (${rows.toLocaleString()} rows)`);
|
|
136
|
-
const normalizedPath = normalizeCopyPath(csvPath);
|
|
137
|
-
const copyQuery = getCopyQuery(table, normalizedPath);
|
|
138
|
-
try {
|
|
139
|
-
await conn.query(copyQuery);
|
|
140
|
-
}
|
|
141
|
-
catch (err) {
|
|
142
|
-
try {
|
|
143
|
-
const retryQuery = copyQuery.replace('auto_detect=false)', 'auto_detect=false, IGNORE_ERRORS=true)');
|
|
144
|
-
await conn.query(retryQuery);
|
|
145
|
-
}
|
|
146
|
-
catch (retryErr) {
|
|
147
|
-
const retryMsg = retryErr instanceof Error ? retryErr.message : String(retryErr);
|
|
148
|
-
throw new Error(`COPY failed for ${table}: ${retryMsg.slice(0, 200)}`);
|
|
149
|
-
}
|
|
150
|
-
}
|
|
151
|
-
}
|
|
152
|
-
// Bulk COPY relationships — split by FROM→TO label pair
|
|
153
|
-
// Stream-read line by line to avoid exceeding V8 max string length
|
|
154
|
-
let relHeader = '';
|
|
155
|
-
const relsByPair = new Map();
|
|
156
|
-
let skippedRels = 0;
|
|
157
|
-
let totalValidRels = 0;
|
|
158
|
-
await new Promise((resolve, reject) => {
|
|
159
|
-
const rl = createInterface({ input: createReadStream(csvResult.relCsvPath, 'utf-8'), crlfDelay: Infinity });
|
|
160
|
-
let isFirst = true;
|
|
161
|
-
rl.on('line', (line) => {
|
|
162
|
-
if (isFirst) {
|
|
163
|
-
relHeader = line;
|
|
164
|
-
isFirst = false;
|
|
165
|
-
return;
|
|
166
|
-
}
|
|
167
|
-
if (!line.trim())
|
|
168
|
-
return;
|
|
169
|
-
const match = line.match(/"([^"]*)","([^"]*)"/);
|
|
170
|
-
if (!match) {
|
|
171
|
-
skippedRels++;
|
|
172
|
-
return;
|
|
173
|
-
}
|
|
174
|
-
const fromLabel = getNodeLabel(match[1]);
|
|
175
|
-
const toLabel = getNodeLabel(match[2]);
|
|
176
|
-
if (!validTables.has(fromLabel) || !validTables.has(toLabel)) {
|
|
177
|
-
skippedRels++;
|
|
178
|
-
return;
|
|
179
|
-
}
|
|
180
|
-
const pairKey = `${fromLabel}|${toLabel}`;
|
|
181
|
-
let list = relsByPair.get(pairKey);
|
|
182
|
-
if (!list) {
|
|
183
|
-
list = [];
|
|
184
|
-
relsByPair.set(pairKey, list);
|
|
185
|
-
}
|
|
186
|
-
list.push(line);
|
|
187
|
-
totalValidRels++;
|
|
188
|
-
});
|
|
189
|
-
rl.on('close', resolve);
|
|
190
|
-
rl.on('error', reject);
|
|
191
|
-
});
|
|
192
|
-
const insertedRels = totalValidRels;
|
|
193
|
-
const warnings = [];
|
|
194
|
-
if (insertedRels > 0) {
|
|
195
|
-
log(`Loading edges: ${insertedRels.toLocaleString()} across ${relsByPair.size} types`);
|
|
196
|
-
let pairIdx = 0;
|
|
197
|
-
let failedPairEdges = 0;
|
|
198
|
-
const failedPairLines = [];
|
|
199
|
-
for (const [pairKey, lines] of relsByPair) {
|
|
200
|
-
pairIdx++;
|
|
201
|
-
const [fromLabel, toLabel] = pairKey.split('|');
|
|
202
|
-
const pairCsvPath = path.join(csvDir, `rel_${fromLabel}_${toLabel}.csv`);
|
|
203
|
-
await fs.writeFile(pairCsvPath, relHeader + '\n' + lines.join('\n'), 'utf-8');
|
|
204
|
-
const normalizedPath = normalizeCopyPath(pairCsvPath);
|
|
205
|
-
const copyQuery = `COPY ${REL_TABLE_NAME} FROM "${normalizedPath}" (from="${fromLabel}", to="${toLabel}", HEADER=true, ESCAPE='"', DELIM=',', QUOTE='"', PARALLEL=false, auto_detect=false)`;
|
|
206
|
-
if (pairIdx % 5 === 0 || lines.length > 1000) {
|
|
207
|
-
log(`Loading edges: ${pairIdx}/${relsByPair.size} types (${fromLabel} -> ${toLabel})`);
|
|
208
|
-
}
|
|
209
|
-
try {
|
|
210
|
-
await conn.query(copyQuery);
|
|
211
|
-
}
|
|
212
|
-
catch (err) {
|
|
213
|
-
try {
|
|
214
|
-
const retryQuery = copyQuery.replace('auto_detect=false)', 'auto_detect=false, IGNORE_ERRORS=true)');
|
|
215
|
-
await conn.query(retryQuery);
|
|
216
|
-
}
|
|
217
|
-
catch (retryErr) {
|
|
218
|
-
const retryMsg = retryErr instanceof Error ? retryErr.message : String(retryErr);
|
|
219
|
-
warnings.push(`${fromLabel}->${toLabel} (${lines.length} edges): ${retryMsg.slice(0, 80)}`);
|
|
220
|
-
failedPairEdges += lines.length;
|
|
221
|
-
failedPairLines.push(...lines);
|
|
222
|
-
}
|
|
223
|
-
}
|
|
224
|
-
try {
|
|
225
|
-
await fs.unlink(pairCsvPath);
|
|
226
|
-
}
|
|
227
|
-
catch { }
|
|
228
|
-
}
|
|
229
|
-
if (failedPairLines.length > 0) {
|
|
230
|
-
log(`Inserting ${failedPairEdges} edges individually (missing schema pairs)`);
|
|
231
|
-
await fallbackRelationshipInserts([relHeader, ...failedPairLines], validTables, getNodeLabel);
|
|
232
|
-
}
|
|
233
|
-
}
|
|
234
|
-
// Clean up all CSV files
|
|
235
|
-
try {
|
|
236
|
-
await fs.unlink(csvResult.relCsvPath);
|
|
237
|
-
}
|
|
238
|
-
catch { }
|
|
239
|
-
for (const [, { csvPath }] of csvResult.nodeFiles) {
|
|
240
|
-
try {
|
|
241
|
-
await fs.unlink(csvPath);
|
|
242
|
-
}
|
|
243
|
-
catch { }
|
|
244
|
-
}
|
|
245
|
-
try {
|
|
246
|
-
const remaining = await fs.readdir(csvDir);
|
|
247
|
-
for (const f of remaining) {
|
|
248
|
-
try {
|
|
249
|
-
await fs.unlink(path.join(csvDir, f));
|
|
250
|
-
}
|
|
251
|
-
catch { }
|
|
252
|
-
}
|
|
253
|
-
}
|
|
254
|
-
catch { }
|
|
255
|
-
try {
|
|
256
|
-
await fs.rmdir(csvDir);
|
|
257
|
-
}
|
|
258
|
-
catch { }
|
|
259
|
-
return { success: true, insertedRels, skippedRels, warnings };
|
|
260
|
-
};
|
|
261
|
-
// Force RFC 4180 escaping (ESCAPE='"') and disable auto_detect
|
|
262
|
-
// Source code content has backslashes that confuse LadybugDB's default '\' escape
|
|
263
|
-
const COPY_CSV_OPTS = `(HEADER=true, ESCAPE='"', DELIM=',', QUOTE='"', PARALLEL=false, auto_detect=false)`;
|
|
264
|
-
// Multi-language tables created with backticks — must always use backticks in queries
|
|
265
|
-
const BACKTICK_TABLES = new Set([
|
|
266
|
-
'Struct', 'Enum', 'Macro', 'Typedef', 'Union', 'Namespace', 'Trait', 'Impl',
|
|
267
|
-
'TypeAlias', 'Const', 'Static', 'Property', 'Record', 'Delegate', 'Annotation',
|
|
268
|
-
'Constructor', 'Template', 'Module',
|
|
269
|
-
]);
|
|
270
|
-
const escapeTableName = (table) => {
|
|
271
|
-
return BACKTICK_TABLES.has(table) ? `\`${table}\`` : table;
|
|
272
|
-
};
|
|
273
|
-
/** Fallback: insert relationships one-by-one when COPY fails */
|
|
274
|
-
const fallbackRelationshipInserts = async (validRelLines, validTables, getNodeLabel) => {
|
|
275
|
-
if (!conn)
|
|
276
|
-
return;
|
|
277
|
-
const escapeLabel = (label) => {
|
|
278
|
-
return BACKTICK_TABLES.has(label) ? `\`${label}\`` : label;
|
|
279
|
-
};
|
|
280
|
-
for (let i = 1; i < validRelLines.length; i++) {
|
|
281
|
-
const line = validRelLines[i];
|
|
282
|
-
try {
|
|
283
|
-
const match = line.match(/"([^"]*)","([^"]*)","([^"]*)",([0-9.]+),"([^"]*)",([0-9-]+)/);
|
|
284
|
-
if (!match)
|
|
285
|
-
continue;
|
|
286
|
-
const [, fromId, toId, relType, confidenceStr, reason, stepStr] = match;
|
|
287
|
-
const fromLabel = getNodeLabel(fromId);
|
|
288
|
-
const toLabel = getNodeLabel(toId);
|
|
289
|
-
if (!validTables.has(fromLabel) || !validTables.has(toLabel))
|
|
290
|
-
continue;
|
|
291
|
-
const confidence = parseFloat(confidenceStr) || 1.0;
|
|
292
|
-
const step = parseInt(stepStr) || 0;
|
|
293
|
-
const esc = (s) => s.replace(/'/g, "''").replace(/\\/g, '\\\\').replace(/\n/g, '\\n').replace(/\r/g, '\\r');
|
|
294
|
-
await conn.query(`
|
|
295
|
-
MATCH (a:${escapeLabel(fromLabel)} {id: '${esc(fromId)}' }),
|
|
296
|
-
(b:${escapeLabel(toLabel)} {id: '${esc(toId)}' })
|
|
297
|
-
CREATE (a)-[:${REL_TABLE_NAME} {type: '${esc(relType)}', confidence: ${confidence}, reason: '${esc(reason)}', step: ${step}}]->(b)
|
|
298
|
-
`);
|
|
299
|
-
}
|
|
300
|
-
catch {
|
|
301
|
-
// skip
|
|
302
|
-
}
|
|
303
|
-
}
|
|
304
|
-
};
|
|
305
|
-
// Tables with isExported column (TypeScript/JS-native types)
|
|
306
|
-
const TABLES_WITH_EXPORTED = new Set(['Function', 'Class', 'Interface', 'Method', 'CodeElement']);
|
|
307
|
-
const getCopyQuery = (table, filePath) => {
|
|
308
|
-
const t = escapeTableName(table);
|
|
309
|
-
if (table === 'File') {
|
|
310
|
-
return `COPY ${t}(id, name, filePath, content) FROM "${filePath}" ${COPY_CSV_OPTS}`;
|
|
311
|
-
}
|
|
312
|
-
if (table === 'Folder') {
|
|
313
|
-
return `COPY ${t}(id, name, filePath) FROM "${filePath}" ${COPY_CSV_OPTS}`;
|
|
314
|
-
}
|
|
315
|
-
if (table === 'Community') {
|
|
316
|
-
return `COPY ${t}(id, label, heuristicLabel, keywords, description, enrichedBy, cohesion, symbolCount) FROM "${filePath}" ${COPY_CSV_OPTS}`;
|
|
317
|
-
}
|
|
318
|
-
if (table === 'Process') {
|
|
319
|
-
return `COPY ${t}(id, label, heuristicLabel, processType, stepCount, communities, entryPointId, terminalId) FROM "${filePath}" ${COPY_CSV_OPTS}`;
|
|
320
|
-
}
|
|
321
|
-
if (table === 'Method') {
|
|
322
|
-
return `COPY ${t}(id, name, filePath, startLine, endLine, isExported, content, description, parameterCount, returnType) FROM "${filePath}" ${COPY_CSV_OPTS}`;
|
|
323
|
-
}
|
|
324
|
-
// TS/JS tables have isExported; multi-language tables do not
|
|
325
|
-
if (TABLES_WITH_EXPORTED.has(table)) {
|
|
326
|
-
return `COPY ${t}(id, name, filePath, startLine, endLine, isExported, content, description) FROM "${filePath}" ${COPY_CSV_OPTS}`;
|
|
327
|
-
}
|
|
328
|
-
// Multi-language tables (Struct, Impl, Trait, Macro, etc)
|
|
329
|
-
return `COPY ${t}(id, name, filePath, startLine, endLine, content, description) FROM "${filePath}" ${COPY_CSV_OPTS}`;
|
|
330
|
-
};
|
|
331
|
-
/**
|
|
332
|
-
* Insert a single node to LadybugDB
|
|
333
|
-
* @param label - Node type (File, Function, Class, etc)
|
|
334
|
-
* @param properties - Node properties
|
|
335
|
-
* @param dbPath - Path to LadybugDB database (optional if already initialized)
|
|
336
|
-
*/
|
|
337
|
-
export const insertNodeToLbug = async (label, properties, dbPath) => {
|
|
338
|
-
// Use provided dbPath or fall back to persistent connection
|
|
339
|
-
const targetDbPath = dbPath || (db ? undefined : null);
|
|
340
|
-
if (!targetDbPath && !db) {
|
|
341
|
-
throw new Error('LadybugDB not initialized. Provide dbPath or call initLbug first.');
|
|
342
|
-
}
|
|
343
|
-
try {
|
|
344
|
-
const escapeValue = (v) => {
|
|
345
|
-
if (v === null || v === undefined)
|
|
346
|
-
return 'NULL';
|
|
347
|
-
if (typeof v === 'number')
|
|
348
|
-
return String(v);
|
|
349
|
-
// Escape backslashes first (for Windows paths), then single quotes
|
|
350
|
-
return `'${String(v).replace(/\\/g, '\\\\').replace(/'/g, "''").replace(/\n/g, '\\n').replace(/\r/g, '\\r')}'`;
|
|
351
|
-
};
|
|
352
|
-
// Build INSERT query by node type
|
|
353
|
-
const t = escapeTableName(label);
|
|
354
|
-
let query;
|
|
355
|
-
if (label === 'File') {
|
|
356
|
-
query = `CREATE (n:File {id: ${escapeValue(properties.id)}, name: ${escapeValue(properties.name)}, filePath: ${escapeValue(properties.filePath)}, content: ${escapeValue(properties.content || '')}})`;
|
|
357
|
-
}
|
|
358
|
-
else if (label === 'Folder') {
|
|
359
|
-
query = `CREATE (n:Folder {id: ${escapeValue(properties.id)}, name: ${escapeValue(properties.name)}, filePath: ${escapeValue(properties.filePath)}})`;
|
|
360
|
-
}
|
|
361
|
-
else if (TABLES_WITH_EXPORTED.has(label)) {
|
|
362
|
-
const descPart = properties.description ? `, description: ${escapeValue(properties.description)}` : '';
|
|
363
|
-
query = `CREATE (n:${t} {id: ${escapeValue(properties.id)}, name: ${escapeValue(properties.name)}, filePath: ${escapeValue(properties.filePath)}, startLine: ${properties.startLine || 0}, endLine: ${properties.endLine || 0}, isExported: ${!!properties.isExported}, content: ${escapeValue(properties.content || '')}${descPart}})`;
|
|
364
|
-
}
|
|
365
|
-
else {
|
|
366
|
-
// Multi-language tables — no isExported
|
|
367
|
-
const descPart = properties.description ? `, description: ${escapeValue(properties.description)}` : '';
|
|
368
|
-
query = `CREATE (n:${t} {id: ${escapeValue(properties.id)}, name: ${escapeValue(properties.name)}, filePath: ${escapeValue(properties.filePath)}, startLine: ${properties.startLine || 0}, endLine: ${properties.endLine || 0}, content: ${escapeValue(properties.content || '')}${descPart}})`;
|
|
369
|
-
}
|
|
370
|
-
// Per-query connection to avoid lock conflicts
|
|
371
|
-
if (targetDbPath) {
|
|
372
|
-
const tempDb = new lbug.Database(targetDbPath);
|
|
373
|
-
const tempConn = new lbug.Connection(tempDb);
|
|
374
|
-
try {
|
|
375
|
-
await tempConn.query(query);
|
|
376
|
-
return true;
|
|
377
|
-
}
|
|
378
|
-
finally {
|
|
379
|
-
try {
|
|
380
|
-
await tempConn.close();
|
|
381
|
-
}
|
|
382
|
-
catch { }
|
|
383
|
-
try {
|
|
384
|
-
await tempDb.close();
|
|
385
|
-
}
|
|
386
|
-
catch { }
|
|
387
|
-
}
|
|
388
|
-
}
|
|
389
|
-
else if (conn) {
|
|
390
|
-
// Use existing persistent connection
|
|
391
|
-
await conn.query(query);
|
|
392
|
-
return true;
|
|
393
|
-
}
|
|
394
|
-
return false;
|
|
395
|
-
}
|
|
396
|
-
catch (e) {
|
|
397
|
-
// Node may already exist
|
|
398
|
-
console.error(`Failed to insert ${label} node:`, e.message);
|
|
399
|
-
return false;
|
|
400
|
-
}
|
|
401
|
-
};
|
|
402
|
-
/**
|
|
403
|
-
* Batch insert multiple nodes using a single connection
|
|
404
|
-
* @param nodes - Array of {label, properties} to insert
|
|
405
|
-
* @param dbPath - Path to LadybugDB database
|
|
406
|
-
*/
|
|
407
|
-
export const batchInsertNodesToLbug = async (nodes, dbPath) => {
|
|
408
|
-
if (nodes.length === 0)
|
|
409
|
-
return { inserted: 0, failed: 0 };
|
|
410
|
-
const escapeValue = (v) => {
|
|
411
|
-
if (v === null || v === undefined)
|
|
412
|
-
return 'NULL';
|
|
413
|
-
if (typeof v === 'number')
|
|
414
|
-
return String(v);
|
|
415
|
-
// Escape backslashes, single quotes, and newlines
|
|
416
|
-
return `'${String(v).replace(/\\/g, '\\\\').replace(/'/g, "''").replace(/\n/g, '\\n').replace(/\r/g, '\\r')}'`;
|
|
417
|
-
};
|
|
418
|
-
// Single connection for all inserts
|
|
419
|
-
const tempDb = new lbug.Database(dbPath);
|
|
420
|
-
const tempConn = new lbug.Connection(tempDb);
|
|
421
|
-
let inserted = 0;
|
|
422
|
-
let failed = 0;
|
|
423
|
-
try {
|
|
424
|
-
for (const { label, properties } of nodes) {
|
|
425
|
-
try {
|
|
426
|
-
let query;
|
|
427
|
-
// MERGE for upsert behavior (handles duplicates)
|
|
428
|
-
const t = escapeTableName(label);
|
|
429
|
-
if (label === 'File') {
|
|
430
|
-
query = `MERGE (n:File {id: ${escapeValue(properties.id)}}) SET n.name = ${escapeValue(properties.name)}, n.filePath = ${escapeValue(properties.filePath)}, n.content = ${escapeValue(properties.content || '')}`;
|
|
431
|
-
}
|
|
432
|
-
else if (label === 'Folder') {
|
|
433
|
-
query = `MERGE (n:Folder {id: ${escapeValue(properties.id)}}) SET n.name = ${escapeValue(properties.name)}, n.filePath = ${escapeValue(properties.filePath)}`;
|
|
434
|
-
}
|
|
435
|
-
else if (TABLES_WITH_EXPORTED.has(label)) {
|
|
436
|
-
const descPart = properties.description ? `, n.description = ${escapeValue(properties.description)}` : '';
|
|
437
|
-
query = `MERGE (n:${t} {id: ${escapeValue(properties.id)}}) SET n.name = ${escapeValue(properties.name)}, n.filePath = ${escapeValue(properties.filePath)}, n.startLine = ${properties.startLine || 0}, n.endLine = ${properties.endLine || 0}, n.isExported = ${!!properties.isExported}, n.content = ${escapeValue(properties.content || '')}${descPart}`;
|
|
438
|
-
}
|
|
439
|
-
else {
|
|
440
|
-
const descPart = properties.description ? `, n.description = ${escapeValue(properties.description)}` : '';
|
|
441
|
-
query = `MERGE (n:${t} {id: ${escapeValue(properties.id)}}) SET n.name = ${escapeValue(properties.name)}, n.filePath = ${escapeValue(properties.filePath)}, n.startLine = ${properties.startLine || 0}, n.endLine = ${properties.endLine || 0}, n.content = ${escapeValue(properties.content || '')}${descPart}`;
|
|
442
|
-
}
|
|
443
|
-
await tempConn.query(query);
|
|
444
|
-
inserted++;
|
|
445
|
-
}
|
|
446
|
-
catch (e) {
|
|
447
|
-
// Don't log to stderr — corrupts MCP JSON-RPC
|
|
448
|
-
failed++;
|
|
449
|
-
}
|
|
450
|
-
}
|
|
451
|
-
}
|
|
452
|
-
finally {
|
|
453
|
-
try {
|
|
454
|
-
await tempConn.close();
|
|
455
|
-
}
|
|
456
|
-
catch { }
|
|
457
|
-
try {
|
|
458
|
-
await tempDb.close();
|
|
459
|
-
}
|
|
460
|
-
catch { }
|
|
461
|
-
}
|
|
462
|
-
return { inserted, failed };
|
|
463
|
-
};
|
|
464
|
-
export const executeQuery = async (cypher) => {
|
|
465
|
-
if (!conn) {
|
|
466
|
-
throw new Error('LadybugDB not initialized. Call initLbug first.');
|
|
467
|
-
}
|
|
468
|
-
const queryResult = await conn.query(cypher);
|
|
469
|
-
// LadybugDB uses getAll(); returns QueryResult or QueryResult[] for multi-statement
|
|
470
|
-
const result = Array.isArray(queryResult) ? queryResult[0] : queryResult;
|
|
471
|
-
const rows = await result.getAll();
|
|
472
|
-
return rows;
|
|
473
|
-
};
|
|
474
|
-
export const executeWithReusedStatement = async (cypher, paramsList) => {
|
|
475
|
-
if (!conn) {
|
|
476
|
-
throw new Error('LadybugDB not initialized. Call initLbug first.');
|
|
477
|
-
}
|
|
478
|
-
if (paramsList.length === 0)
|
|
479
|
-
return;
|
|
480
|
-
const SUB_BATCH_SIZE = 4;
|
|
481
|
-
for (let i = 0; i < paramsList.length; i += SUB_BATCH_SIZE) {
|
|
482
|
-
const subBatch = paramsList.slice(i, i + SUB_BATCH_SIZE);
|
|
483
|
-
const stmt = await conn.prepare(cypher);
|
|
484
|
-
if (!stmt.isSuccess()) {
|
|
485
|
-
const errMsg = await stmt.getErrorMessage();
|
|
486
|
-
throw new Error(`Prepare failed: ${errMsg}`);
|
|
487
|
-
}
|
|
488
|
-
try {
|
|
489
|
-
for (const params of subBatch) {
|
|
490
|
-
await conn.execute(stmt, params);
|
|
491
|
-
}
|
|
492
|
-
}
|
|
493
|
-
catch (e) {
|
|
494
|
-
// Log and continue with next batch
|
|
495
|
-
console.warn('Batch execution error:', e);
|
|
496
|
-
}
|
|
497
|
-
// LadybugDB PreparedStatement doesn't require explicit close()
|
|
498
|
-
}
|
|
499
|
-
};
|
|
500
|
-
export const getLbugStats = async () => {
|
|
501
|
-
if (!conn)
|
|
502
|
-
return { nodes: 0, edges: 0 };
|
|
503
|
-
let totalNodes = 0;
|
|
504
|
-
for (const tableName of NODE_TABLES) {
|
|
505
|
-
try {
|
|
506
|
-
const queryResult = await conn.query(`MATCH (n:${escapeTableName(tableName)}) RETURN count(n) AS cnt`);
|
|
507
|
-
const nodeResult = Array.isArray(queryResult) ? queryResult[0] : queryResult;
|
|
508
|
-
const nodeRows = await nodeResult.getAll();
|
|
509
|
-
if (nodeRows.length > 0) {
|
|
510
|
-
totalNodes += Number(nodeRows[0]?.cnt ?? nodeRows[0]?.[0] ?? 0);
|
|
511
|
-
}
|
|
512
|
-
}
|
|
513
|
-
catch {
|
|
514
|
-
// ignore
|
|
515
|
-
}
|
|
516
|
-
}
|
|
517
|
-
let totalEdges = 0;
|
|
518
|
-
try {
|
|
519
|
-
const queryResult = await conn.query(`MATCH ()-[r:${REL_TABLE_NAME}]->() RETURN count(r) AS cnt`);
|
|
520
|
-
const edgeResult = Array.isArray(queryResult) ? queryResult[0] : queryResult;
|
|
521
|
-
const edgeRows = await edgeResult.getAll();
|
|
522
|
-
if (edgeRows.length > 0) {
|
|
523
|
-
totalEdges = Number(edgeRows[0]?.cnt ?? edgeRows[0]?.[0] ?? 0);
|
|
524
|
-
}
|
|
525
|
-
}
|
|
526
|
-
catch {
|
|
527
|
-
// ignore
|
|
528
|
-
}
|
|
529
|
-
return { nodes: totalNodes, edges: totalEdges };
|
|
530
|
-
};
|
|
531
|
-
/**
|
|
532
|
-
* Load cached embeddings before a rebuild
|
|
533
|
-
*
|
|
534
|
-
* Returns all vectors so they can be re-inserted after the graph reloads,
|
|
535
|
-
* avoiding expensive re-embedding of unchanged nodes
|
|
536
|
-
*/
|
|
537
|
-
export const loadCachedEmbeddings = async () => {
|
|
538
|
-
if (!conn) {
|
|
539
|
-
return { embeddingNodeIds: new Set(), embeddings: [] };
|
|
540
|
-
}
|
|
541
|
-
const embeddingNodeIds = new Set();
|
|
542
|
-
const embeddings = [];
|
|
543
|
-
try {
|
|
544
|
-
const rows = await conn.query(`MATCH (e:${EMBEDDING_TABLE_NAME}) RETURN e.nodeId AS nodeId, e.embedding AS embedding`);
|
|
545
|
-
const result = Array.isArray(rows) ? rows[0] : rows;
|
|
546
|
-
for (const row of await result.getAll()) {
|
|
547
|
-
const nodeId = String(row.nodeId ?? row[0] ?? '');
|
|
548
|
-
if (!nodeId)
|
|
549
|
-
continue;
|
|
550
|
-
embeddingNodeIds.add(nodeId);
|
|
551
|
-
const embedding = row.embedding ?? row[1];
|
|
552
|
-
if (embedding) {
|
|
553
|
-
embeddings.push({
|
|
554
|
-
nodeId,
|
|
555
|
-
embedding: Array.isArray(embedding) ? embedding.map(Number) : Array.from(embedding).map(Number),
|
|
556
|
-
});
|
|
557
|
-
}
|
|
558
|
-
}
|
|
559
|
-
}
|
|
560
|
-
catch { /* embedding table may not exist */ }
|
|
561
|
-
return { embeddingNodeIds, embeddings };
|
|
562
|
-
};
|
|
563
|
-
export const closeLbug = async () => {
|
|
564
|
-
if (conn) {
|
|
565
|
-
try {
|
|
566
|
-
await conn.close();
|
|
567
|
-
}
|
|
568
|
-
catch { }
|
|
569
|
-
conn = null;
|
|
570
|
-
}
|
|
571
|
-
if (db) {
|
|
572
|
-
try {
|
|
573
|
-
await db.close();
|
|
574
|
-
}
|
|
575
|
-
catch { }
|
|
576
|
-
db = null;
|
|
577
|
-
}
|
|
578
|
-
currentDbPath = null;
|
|
579
|
-
ftsLoaded = false;
|
|
580
|
-
};
|
|
581
|
-
export const isLbugReady = () => conn !== null && db !== null;
|
|
582
|
-
/**
|
|
583
|
-
* Delete all nodes (and relationships) for a specific file
|
|
584
|
-
* @param filePath - File path to delete nodes for
|
|
585
|
-
* @param dbPath - Optional path for per-query connection
|
|
586
|
-
*/
|
|
587
|
-
export const deleteNodesForFile = async (filePath, dbPath) => {
|
|
588
|
-
const usePerQuery = !!dbPath;
|
|
589
|
-
// Use existing connection or create per-query one
|
|
590
|
-
let tempDb = null;
|
|
591
|
-
let tempConn = null;
|
|
592
|
-
let targetConn = conn;
|
|
593
|
-
if (usePerQuery) {
|
|
594
|
-
tempDb = new lbug.Database(dbPath);
|
|
595
|
-
tempConn = new lbug.Connection(tempDb);
|
|
596
|
-
targetConn = tempConn;
|
|
597
|
-
}
|
|
598
|
-
else if (!conn) {
|
|
599
|
-
throw new Error('LadybugDB not initialized. Provide dbPath or call initLbug first.');
|
|
600
|
-
}
|
|
601
|
-
try {
|
|
602
|
-
let deletedNodes = 0;
|
|
603
|
-
const escapedPath = filePath.replace(/'/g, "''");
|
|
604
|
-
// DETACH DELETE from each table with filePath
|
|
605
|
-
for (const tableName of NODE_TABLES) {
|
|
606
|
-
// Community and Process don't have filePath
|
|
607
|
-
if (tableName === 'Community' || tableName === 'Process')
|
|
608
|
-
continue;
|
|
609
|
-
try {
|
|
610
|
-
// Count, then delete
|
|
611
|
-
const tn = escapeTableName(tableName);
|
|
612
|
-
const countResult = await targetConn.query(`MATCH (n:${tn}) WHERE n.filePath = '${escapedPath}' RETURN count(n) AS cnt`);
|
|
613
|
-
const result = Array.isArray(countResult) ? countResult[0] : countResult;
|
|
614
|
-
const rows = await result.getAll();
|
|
615
|
-
const count = Number(rows[0]?.cnt ?? rows[0]?.[0] ?? 0);
|
|
616
|
-
if (count > 0) {
|
|
617
|
-
// DETACH DELETE removes node + all relationships
|
|
618
|
-
await targetConn.query(`MATCH (n:${tn}) WHERE n.filePath = '${escapedPath}' DETACH DELETE n`);
|
|
619
|
-
deletedNodes += count;
|
|
620
|
-
}
|
|
621
|
-
}
|
|
622
|
-
catch (e) {
|
|
623
|
-
// Table may not support this query — skip
|
|
624
|
-
}
|
|
625
|
-
}
|
|
626
|
-
// Delete embeddings for nodes in this file
|
|
627
|
-
try {
|
|
628
|
-
await targetConn.query(`MATCH (e:${EMBEDDING_TABLE_NAME}) WHERE e.nodeId STARTS WITH '${escapedPath}' DELETE e`);
|
|
629
|
-
}
|
|
630
|
-
catch {
|
|
631
|
-
// Embedding table may not exist
|
|
632
|
-
}
|
|
633
|
-
return { deletedNodes };
|
|
634
|
-
}
|
|
635
|
-
finally {
|
|
636
|
-
// Close per-query connection
|
|
637
|
-
if (tempConn) {
|
|
638
|
-
try {
|
|
639
|
-
await tempConn.close();
|
|
640
|
-
}
|
|
641
|
-
catch { }
|
|
642
|
-
}
|
|
643
|
-
if (tempDb) {
|
|
644
|
-
try {
|
|
645
|
-
await tempDb.close();
|
|
646
|
-
}
|
|
647
|
-
catch { }
|
|
648
|
-
}
|
|
649
|
-
}
|
|
650
|
-
};
|
|
651
|
-
export const getEmbeddingTableName = () => EMBEDDING_TABLE_NAME;
|
|
652
|
-
// Full-Text Search (FTS) functions
|
|
653
|
-
/** Load the FTS extension (idempotent — tracks loaded state) */
|
|
654
|
-
export const loadFTSExtension = async () => {
|
|
655
|
-
if (ftsLoaded)
|
|
656
|
-
return;
|
|
657
|
-
if (!conn) {
|
|
658
|
-
throw new Error('LadybugDB not initialized. Call initLbug first.');
|
|
659
|
-
}
|
|
660
|
-
try {
|
|
661
|
-
await conn.query('INSTALL fts');
|
|
662
|
-
await conn.query('LOAD EXTENSION fts');
|
|
663
|
-
ftsLoaded = true;
|
|
664
|
-
}
|
|
665
|
-
catch (err) {
|
|
666
|
-
const msg = err?.message || '';
|
|
667
|
-
if (msg.includes('already loaded') || msg.includes('already installed') || msg.includes('already exists')) {
|
|
668
|
-
ftsLoaded = true;
|
|
669
|
-
}
|
|
670
|
-
else {
|
|
671
|
-
console.error('Code Mapper: FTS extension load failed:', msg);
|
|
672
|
-
}
|
|
673
|
-
}
|
|
674
|
-
};
|
|
675
|
-
/**
|
|
676
|
-
* Create a full-text search index on a table
|
|
677
|
-
* @param tableName - Node table name (e.g. 'File', 'Function')
|
|
678
|
-
* @param indexName - FTS index name
|
|
679
|
-
* @param properties - Properties to index (e.g. ['name', 'content'])
|
|
680
|
-
* @param stemmer - Stemming algorithm (default: 'porter')
|
|
681
|
-
*/
|
|
682
|
-
export const createFTSIndex = async (tableName, indexName, properties, stemmer = 'porter') => {
|
|
683
|
-
if (!conn) {
|
|
684
|
-
throw new Error('LadybugDB not initialized. Call initLbug first.');
|
|
685
|
-
}
|
|
686
|
-
await loadFTSExtension();
|
|
687
|
-
const propList = properties.map(p => `'${p}'`).join(', ');
|
|
688
|
-
const query = `CALL CREATE_FTS_INDEX('${tableName}', '${indexName}', [${propList}], stemmer := '${stemmer}')`;
|
|
689
|
-
try {
|
|
690
|
-
await conn.query(query);
|
|
691
|
-
}
|
|
692
|
-
catch (e) {
|
|
693
|
-
if (!e.message?.includes('already exists')) {
|
|
694
|
-
throw e;
|
|
695
|
-
}
|
|
696
|
-
}
|
|
697
|
-
};
|
|
698
|
-
/**
|
|
699
|
-
* Query a full-text search index
|
|
700
|
-
* @param tableName - Node table name
|
|
701
|
-
* @param indexName - FTS index name
|
|
702
|
-
* @param query - Search query string
|
|
703
|
-
* @param limit - Max results
|
|
704
|
-
* @param conjunctive - If true, all terms must match (AND); false = any term (OR)
|
|
705
|
-
*/
|
|
706
|
-
export const queryFTS = async (tableName, indexName, query, limit = 20, conjunctive = false) => {
|
|
707
|
-
if (!conn) {
|
|
708
|
-
throw new Error('LadybugDB not initialized. Call initLbug first.');
|
|
709
|
-
}
|
|
710
|
-
// Escape to prevent Cypher injection
|
|
711
|
-
const escapedQuery = query.replace(/\\/g, '\\\\').replace(/'/g, "''");
|
|
712
|
-
const cypher = `
|
|
713
|
-
CALL QUERY_FTS_INDEX('${tableName}', '${indexName}', '${escapedQuery}', conjunctive := ${conjunctive})
|
|
714
|
-
RETURN node, score
|
|
715
|
-
ORDER BY score DESC
|
|
716
|
-
LIMIT ${limit}
|
|
717
|
-
`;
|
|
718
|
-
try {
|
|
719
|
-
const queryResult = await conn.query(cypher);
|
|
720
|
-
const result = Array.isArray(queryResult) ? queryResult[0] : queryResult;
|
|
721
|
-
const rows = await result.getAll();
|
|
722
|
-
return rows.map((row) => {
|
|
723
|
-
const node = row.node || row[0] || {};
|
|
724
|
-
const score = row.score ?? row[1] ?? 0;
|
|
725
|
-
return {
|
|
726
|
-
nodeId: node.nodeId || node.id || '',
|
|
727
|
-
name: node.name || '',
|
|
728
|
-
filePath: node.filePath || '',
|
|
729
|
-
score: typeof score === 'number' ? score : parseFloat(score) || 0,
|
|
730
|
-
...node,
|
|
731
|
-
};
|
|
732
|
-
});
|
|
733
|
-
}
|
|
734
|
-
catch (e) {
|
|
735
|
-
// Return empty if index doesn't exist
|
|
736
|
-
if (e.message?.includes('does not exist')) {
|
|
737
|
-
return [];
|
|
738
|
-
}
|
|
739
|
-
throw e;
|
|
740
|
-
}
|
|
741
|
-
};
|
|
742
|
-
/** Drop an FTS index */
|
|
743
|
-
export const dropFTSIndex = async (tableName, indexName) => {
|
|
744
|
-
if (!conn) {
|
|
745
|
-
throw new Error('LadybugDB not initialized. Call initLbug first.');
|
|
746
|
-
}
|
|
747
|
-
try {
|
|
748
|
-
await conn.query(`CALL DROP_FTS_INDEX('${tableName}', '${indexName}')`);
|
|
749
|
-
}
|
|
750
|
-
catch {
|
|
751
|
-
// Index may not exist — ignore
|
|
752
|
-
}
|
|
753
|
-
};
|