@zuvia-software-solutions/code-mapper 1.4.0 → 2.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli/ai-context.js +1 -1
- package/dist/cli/analyze.d.ts +1 -0
- package/dist/cli/analyze.js +73 -82
- package/dist/cli/augment.js +0 -2
- package/dist/cli/eval-server.d.ts +2 -2
- package/dist/cli/eval-server.js +6 -6
- package/dist/cli/index.js +6 -10
- package/dist/cli/mcp.d.ts +1 -3
- package/dist/cli/mcp.js +3 -3
- package/dist/cli/refresh.d.ts +2 -2
- package/dist/cli/refresh.js +24 -29
- package/dist/cli/status.js +4 -13
- package/dist/cli/tool.d.ts +5 -4
- package/dist/cli/tool.js +8 -10
- package/dist/config/ignore-service.js +14 -34
- package/dist/core/augmentation/engine.js +53 -83
- package/dist/core/db/adapter.d.ts +99 -0
- package/dist/core/db/adapter.js +402 -0
- package/dist/core/db/graph-loader.d.ts +27 -0
- package/dist/core/db/graph-loader.js +148 -0
- package/dist/core/db/queries.d.ts +160 -0
- package/dist/core/db/queries.js +441 -0
- package/dist/core/db/schema.d.ts +108 -0
- package/dist/core/db/schema.js +136 -0
- package/dist/core/embeddings/embedder.d.ts +21 -12
- package/dist/core/embeddings/embedder.js +104 -50
- package/dist/core/embeddings/embedding-pipeline.d.ts +48 -22
- package/dist/core/embeddings/embedding-pipeline.js +220 -262
- package/dist/core/embeddings/text-generator.js +4 -19
- package/dist/core/embeddings/types.d.ts +1 -1
- package/dist/core/graph/graph.d.ts +1 -1
- package/dist/core/graph/graph.js +1 -0
- package/dist/core/graph/types.d.ts +11 -9
- package/dist/core/graph/types.js +4 -1
- package/dist/core/incremental/refresh.d.ts +46 -0
- package/dist/core/incremental/refresh.js +503 -0
- package/dist/core/incremental/types.d.ts +2 -1
- package/dist/core/incremental/types.js +42 -44
- package/dist/core/ingestion/ast-cache.js +1 -0
- package/dist/core/ingestion/call-processor.d.ts +15 -3
- package/dist/core/ingestion/call-processor.js +448 -60
- package/dist/core/ingestion/cluster-enricher.d.ts +1 -1
- package/dist/core/ingestion/cluster-enricher.js +2 -0
- package/dist/core/ingestion/community-processor.d.ts +1 -1
- package/dist/core/ingestion/community-processor.js +8 -3
- package/dist/core/ingestion/export-detection.d.ts +1 -1
- package/dist/core/ingestion/export-detection.js +1 -1
- package/dist/core/ingestion/filesystem-walker.js +1 -1
- package/dist/core/ingestion/heritage-processor.d.ts +2 -2
- package/dist/core/ingestion/heritage-processor.js +22 -11
- package/dist/core/ingestion/import-processor.d.ts +2 -2
- package/dist/core/ingestion/import-processor.js +24 -9
- package/dist/core/ingestion/language-config.js +7 -4
- package/dist/core/ingestion/mro-processor.d.ts +1 -1
- package/dist/core/ingestion/mro-processor.js +23 -11
- package/dist/core/ingestion/named-binding-extraction.js +5 -5
- package/dist/core/ingestion/parsing-processor.d.ts +4 -4
- package/dist/core/ingestion/parsing-processor.js +26 -18
- package/dist/core/ingestion/pipeline.d.ts +4 -2
- package/dist/core/ingestion/pipeline.js +50 -20
- package/dist/core/ingestion/process-processor.d.ts +2 -2
- package/dist/core/ingestion/process-processor.js +28 -14
- package/dist/core/ingestion/resolution-context.d.ts +1 -1
- package/dist/core/ingestion/resolution-context.js +14 -4
- package/dist/core/ingestion/resolvers/csharp.js +4 -3
- package/dist/core/ingestion/resolvers/go.js +3 -1
- package/dist/core/ingestion/resolvers/jvm.js +13 -4
- package/dist/core/ingestion/resolvers/standard.js +2 -2
- package/dist/core/ingestion/resolvers/utils.js +6 -2
- package/dist/core/ingestion/route-stitcher.d.ts +15 -0
- package/dist/core/ingestion/route-stitcher.js +92 -0
- package/dist/core/ingestion/structure-processor.d.ts +1 -1
- package/dist/core/ingestion/structure-processor.js +3 -2
- package/dist/core/ingestion/symbol-table.d.ts +2 -0
- package/dist/core/ingestion/symbol-table.js +5 -1
- package/dist/core/ingestion/tree-sitter-queries.d.ts +2 -2
- package/dist/core/ingestion/tree-sitter-queries.js +177 -0
- package/dist/core/ingestion/type-env.js +20 -0
- package/dist/core/ingestion/type-extractors/csharp.js +4 -3
- package/dist/core/ingestion/type-extractors/go.js +23 -12
- package/dist/core/ingestion/type-extractors/php.js +18 -10
- package/dist/core/ingestion/type-extractors/ruby.js +15 -3
- package/dist/core/ingestion/type-extractors/rust.js +3 -2
- package/dist/core/ingestion/type-extractors/shared.js +3 -2
- package/dist/core/ingestion/type-extractors/typescript.js +11 -5
- package/dist/core/ingestion/utils.d.ts +27 -4
- package/dist/core/ingestion/utils.js +145 -100
- package/dist/core/ingestion/workers/parse-worker.d.ts +1 -0
- package/dist/core/ingestion/workers/parse-worker.js +97 -29
- package/dist/core/ingestion/workers/worker-pool.js +3 -0
- package/dist/core/search/bm25-index.d.ts +15 -8
- package/dist/core/search/bm25-index.js +48 -98
- package/dist/core/search/hybrid-search.d.ts +9 -3
- package/dist/core/search/hybrid-search.js +30 -25
- package/dist/core/search/reranker.js +9 -7
- package/dist/core/search/types.d.ts +0 -4
- package/dist/core/semantic/tsgo-service.d.ts +7 -1
- package/dist/core/semantic/tsgo-service.js +165 -66
- package/dist/lib/tsgo-test.d.ts +2 -0
- package/dist/lib/tsgo-test.js +6 -0
- package/dist/lib/type-utils.d.ts +25 -0
- package/dist/lib/type-utils.js +22 -0
- package/dist/lib/utils.d.ts +3 -2
- package/dist/lib/utils.js +3 -2
- package/dist/mcp/compatible-stdio-transport.js +1 -1
- package/dist/mcp/local/local-backend.d.ts +29 -56
- package/dist/mcp/local/local-backend.js +808 -1118
- package/dist/mcp/resources.js +35 -25
- package/dist/mcp/server.d.ts +1 -1
- package/dist/mcp/server.js +5 -5
- package/dist/mcp/tools.js +24 -25
- package/dist/storage/repo-manager.d.ts +2 -12
- package/dist/storage/repo-manager.js +1 -47
- package/dist/types/pipeline.d.ts +8 -5
- package/dist/types/pipeline.js +5 -0
- package/package.json +18 -11
- package/dist/cli/serve.d.ts +0 -5
- package/dist/cli/serve.js +0 -8
- package/dist/core/incremental/child-process.d.ts +0 -8
- package/dist/core/incremental/child-process.js +0 -649
- package/dist/core/incremental/refresh-coordinator.d.ts +0 -32
- package/dist/core/incremental/refresh-coordinator.js +0 -147
- package/dist/core/lbug/csv-generator.d.ts +0 -28
- package/dist/core/lbug/csv-generator.js +0 -355
- package/dist/core/lbug/lbug-adapter.d.ts +0 -96
- package/dist/core/lbug/lbug-adapter.js +0 -753
- package/dist/core/lbug/schema.d.ts +0 -46
- package/dist/core/lbug/schema.js +0 -402
- package/dist/mcp/core/embedder.d.ts +0 -24
- package/dist/mcp/core/embedder.js +0 -168
- package/dist/mcp/core/lbug-adapter.d.ts +0 -29
- package/dist/mcp/core/lbug-adapter.js +0 -330
- package/dist/server/api.d.ts +0 -5
- package/dist/server/api.js +0 -340
- package/dist/server/mcp-http.d.ts +0 -7
- package/dist/server/mcp-http.js +0 -95
- package/models/mlx-embedder.py +0 -185
|
@@ -1,649 +0,0 @@
|
|
|
1
|
-
// code-mapper/src/core/incremental/child-process.ts
|
|
2
|
-
/**
|
|
3
|
-
* @file child-process.ts
|
|
4
|
-
* @description Forked child process entry point for incremental DB updates. Opens
|
|
5
|
-
* LadybugDB in READ-WRITE mode, deletes stale nodes for changed files, re-parses
|
|
6
|
-
* them with tree-sitter, inserts new nodes/edges, resolves imports/calls/heritage,
|
|
7
|
-
* and rebuilds FTS indexes. Communicates with the parent via structured IPC messages
|
|
8
|
-
*/
|
|
9
|
-
import fs from 'fs/promises';
|
|
10
|
-
import path from 'path';
|
|
11
|
-
import Parser from 'tree-sitter';
|
|
12
|
-
import lbug from '@ladybugdb/core';
|
|
13
|
-
import { parseParentMessage, } from './types.js';
|
|
14
|
-
import { NODE_TABLES, REL_TABLE_NAME } from '../lbug/schema.js';
|
|
15
|
-
import { LANGUAGE_QUERIES } from '../ingestion/tree-sitter-queries.js';
|
|
16
|
-
import { getLanguageFromFilename, getDefinitionNodeFromCaptures } from '../ingestion/utils.js';
|
|
17
|
-
import { loadParser, loadLanguage, isLanguageAvailable } from '../tree-sitter/parser-loader.js';
|
|
18
|
-
import { getTreeSitterBufferSize, TREE_SITTER_MAX_BUFFER } from '../ingestion/constants.js';
|
|
19
|
-
import { generateId } from '../../lib/utils.js';
|
|
20
|
-
import { FTS_TABLES } from '../search/types.js';
|
|
21
|
-
// ---------------------------------------------------------------------------
|
|
22
|
-
// Constants
|
|
23
|
-
// ---------------------------------------------------------------------------
|
|
24
|
-
/** Tables that require backtick-quoting in Cypher because they collide with reserved words */
|
|
25
|
-
const BACKTICK_TABLES = new Set([
|
|
26
|
-
'Struct', 'Enum', 'Macro', 'Typedef', 'Union', 'Namespace', 'Trait', 'Impl',
|
|
27
|
-
'TypeAlias', 'Const', 'Static', 'Property', 'Record', 'Delegate', 'Annotation',
|
|
28
|
-
'Constructor', 'Template', 'Module',
|
|
29
|
-
]);
|
|
30
|
-
/** Tables that are global metadata and should NOT be deleted per-file */
|
|
31
|
-
const SKIP_DELETE_TABLES = new Set(['Community', 'Process']);
|
|
32
|
-
// ---------------------------------------------------------------------------
|
|
33
|
-
// IPC helpers — all outbound messages go through sendMessage()
|
|
34
|
-
// ---------------------------------------------------------------------------
|
|
35
|
-
function sendMessage(msg) {
|
|
36
|
-
if (typeof process.send === 'function') {
|
|
37
|
-
process.send(msg);
|
|
38
|
-
}
|
|
39
|
-
}
|
|
40
|
-
function sendSuccess(payload) {
|
|
41
|
-
sendMessage({ kind: 'success', payload });
|
|
42
|
-
}
|
|
43
|
-
function sendError(err) {
|
|
44
|
-
const message = err instanceof Error ? err.message : String(err);
|
|
45
|
-
const stack = err instanceof Error ? err.stack : undefined;
|
|
46
|
-
sendMessage({ kind: 'error', message, stack });
|
|
47
|
-
}
|
|
48
|
-
// Typed progress sender to avoid inline casting
|
|
49
|
-
function progress(phase, detail) {
|
|
50
|
-
sendMessage({ kind: 'progress', phase, detail });
|
|
51
|
-
}
|
|
52
|
-
/** Log a non-fatal warning to stderr so it's visible in MCP server logs */
|
|
53
|
-
function warn(context, err) {
|
|
54
|
-
const msg = err instanceof Error ? err.message : String(err);
|
|
55
|
-
process.stderr.write(`[incremental-child] ${context}: ${msg}\n`);
|
|
56
|
-
}
|
|
57
|
-
// ---------------------------------------------------------------------------
|
|
58
|
-
// Cypher helpers
|
|
59
|
-
// ---------------------------------------------------------------------------
|
|
60
|
-
/** Quote a table name for Cypher if it requires backticks */
|
|
61
|
-
function quoteTable(table) {
|
|
62
|
-
return BACKTICK_TABLES.has(table) ? `\`${table}\`` : table;
|
|
63
|
-
}
|
|
64
|
-
/** Escape a string value for use inside a Cypher single-quoted literal */
|
|
65
|
-
function escapeCypher(value) {
|
|
66
|
-
return value.replace(/\\/g, '\\\\').replace(/'/g, "\\'");
|
|
67
|
-
}
|
|
68
|
-
// ---------------------------------------------------------------------------
|
|
69
|
-
// Existing file paths (passed from parent via env)
|
|
70
|
-
// ---------------------------------------------------------------------------
|
|
71
|
-
function loadExistingPaths() {
|
|
72
|
-
const raw = process.env.CODE_MAPPER_EXISTING_PATHS;
|
|
73
|
-
if (!raw)
|
|
74
|
-
return new Set();
|
|
75
|
-
try {
|
|
76
|
-
const parsed = JSON.parse(raw);
|
|
77
|
-
if (!Array.isArray(parsed))
|
|
78
|
-
return new Set();
|
|
79
|
-
return new Set(parsed.filter((p) => typeof p === 'string'));
|
|
80
|
-
}
|
|
81
|
-
catch {
|
|
82
|
-
return new Set();
|
|
83
|
-
}
|
|
84
|
-
}
|
|
85
|
-
// ---------------------------------------------------------------------------
|
|
86
|
-
// Core refresh pipeline
|
|
87
|
-
// ---------------------------------------------------------------------------
|
|
88
|
-
async function runRefresh(payload) {
|
|
89
|
-
const t0 = Date.now();
|
|
90
|
-
const { repoRoot, dbPath, dirtyFiles } = payload;
|
|
91
|
-
const existingPaths = loadExistingPaths();
|
|
92
|
-
let nodesDeleted = 0;
|
|
93
|
-
let nodesInserted = 0;
|
|
94
|
-
let edgesInserted = 0;
|
|
95
|
-
let filesSkipped = 0;
|
|
96
|
-
// -- Open DB in READ-WRITE mode -----------------------------------------
|
|
97
|
-
// If this fails with a lock error, the parent process didn't release its
|
|
98
|
-
// read-only handle before forking. This is a bug in the coordinator flow.
|
|
99
|
-
let db;
|
|
100
|
-
let conn;
|
|
101
|
-
try {
|
|
102
|
-
db = new lbug.Database(dbPath);
|
|
103
|
-
conn = new lbug.Connection(db);
|
|
104
|
-
}
|
|
105
|
-
catch (err) {
|
|
106
|
-
const msg = err instanceof Error ? err.message : String(err);
|
|
107
|
-
throw new Error(`Failed to open DB for write: ${msg} (dbPath=${dbPath})`);
|
|
108
|
-
}
|
|
109
|
-
try {
|
|
110
|
-
// =====================================================================
|
|
111
|
-
// Phase 1 — Delete old nodes for all dirty files
|
|
112
|
-
// =====================================================================
|
|
113
|
-
progress('deleting-old-nodes', `Deleting nodes for ${dirtyFiles.length} file(s)`);
|
|
114
|
-
for (const entry of dirtyFiles) {
|
|
115
|
-
const escaped = escapeCypher(entry.relativePath);
|
|
116
|
-
for (const table of NODE_TABLES) {
|
|
117
|
-
if (SKIP_DELETE_TABLES.has(table))
|
|
118
|
-
continue;
|
|
119
|
-
const quoted = quoteTable(table);
|
|
120
|
-
try {
|
|
121
|
-
await conn.query(`MATCH (n:${quoted}) WHERE n.filePath = '${escaped}' DETACH DELETE n`);
|
|
122
|
-
nodesDeleted += 1; // Approximate — one query per table per file
|
|
123
|
-
}
|
|
124
|
-
catch (err) {
|
|
125
|
-
warn(`DELETE ${table} for ${entry.relativePath}`, err);
|
|
126
|
-
}
|
|
127
|
-
}
|
|
128
|
-
}
|
|
129
|
-
// =====================================================================
|
|
130
|
-
// Phase 2 — Parse changed/created files with tree-sitter
|
|
131
|
-
// =====================================================================
|
|
132
|
-
progress('parsing', 'Parsing modified and created files');
|
|
133
|
-
const parser = await loadParser();
|
|
134
|
-
const allDefinitions = [];
|
|
135
|
-
const allImports = [];
|
|
136
|
-
const allCalls = [];
|
|
137
|
-
const allHeritage = [];
|
|
138
|
-
// Track File nodes we insert so we know which files are in the DB
|
|
139
|
-
const insertedFilePaths = new Set();
|
|
140
|
-
const filesToProcess = dirtyFiles.filter((f) => f.changeKind === 'modified' || f.changeKind === 'created');
|
|
141
|
-
for (const entry of filesToProcess) {
|
|
142
|
-
const relPath = entry.relativePath;
|
|
143
|
-
const absPath = path.resolve(repoRoot, relPath);
|
|
144
|
-
// Determine language
|
|
145
|
-
const language = getLanguageFromFilename(relPath);
|
|
146
|
-
if (!language) {
|
|
147
|
-
filesSkipped += 1;
|
|
148
|
-
continue;
|
|
149
|
-
}
|
|
150
|
-
if (!isLanguageAvailable(language)) {
|
|
151
|
-
filesSkipped += 1;
|
|
152
|
-
continue;
|
|
153
|
-
}
|
|
154
|
-
// Read file content from disk
|
|
155
|
-
let content;
|
|
156
|
-
try {
|
|
157
|
-
content = await fs.readFile(absPath, 'utf-8');
|
|
158
|
-
}
|
|
159
|
-
catch {
|
|
160
|
-
// File may have been deleted between detection and processing
|
|
161
|
-
filesSkipped += 1;
|
|
162
|
-
continue;
|
|
163
|
-
}
|
|
164
|
-
// Skip files exceeding tree-sitter buffer limits
|
|
165
|
-
if (content.length > TREE_SITTER_MAX_BUFFER) {
|
|
166
|
-
filesSkipped += 1;
|
|
167
|
-
continue;
|
|
168
|
-
}
|
|
169
|
-
// Load language and parse
|
|
170
|
-
try {
|
|
171
|
-
await loadLanguage(language, relPath);
|
|
172
|
-
}
|
|
173
|
-
catch {
|
|
174
|
-
filesSkipped += 1;
|
|
175
|
-
continue;
|
|
176
|
-
}
|
|
177
|
-
let tree;
|
|
178
|
-
try {
|
|
179
|
-
tree = parser.parse(content, undefined, {
|
|
180
|
-
bufferSize: getTreeSitterBufferSize(content.length),
|
|
181
|
-
});
|
|
182
|
-
}
|
|
183
|
-
catch {
|
|
184
|
-
filesSkipped += 1;
|
|
185
|
-
continue;
|
|
186
|
-
}
|
|
187
|
-
// Run tree-sitter queries to extract definitions, imports, calls, heritage
|
|
188
|
-
const queryString = LANGUAGE_QUERIES[language];
|
|
189
|
-
if (!queryString) {
|
|
190
|
-
filesSkipped += 1;
|
|
191
|
-
continue;
|
|
192
|
-
}
|
|
193
|
-
let query;
|
|
194
|
-
let matches;
|
|
195
|
-
try {
|
|
196
|
-
const tsLanguage = parser.getLanguage();
|
|
197
|
-
query = new Parser.Query(tsLanguage, queryString);
|
|
198
|
-
matches = query.matches(tree.rootNode);
|
|
199
|
-
}
|
|
200
|
-
catch {
|
|
201
|
-
filesSkipped += 1;
|
|
202
|
-
continue;
|
|
203
|
-
}
|
|
204
|
-
// Track this file for File node insertion
|
|
205
|
-
insertedFilePaths.add(relPath);
|
|
206
|
-
// Process each match
|
|
207
|
-
for (const match of matches) {
|
|
208
|
-
const captureMap = {};
|
|
209
|
-
for (const c of match.captures) {
|
|
210
|
-
captureMap[c.name] = c.node;
|
|
211
|
-
}
|
|
212
|
-
// -- Extract imports ------------------------------------------------
|
|
213
|
-
if (captureMap['import'] || captureMap['import.source']) {
|
|
214
|
-
const sourceNode = captureMap['import.source'];
|
|
215
|
-
if (sourceNode) {
|
|
216
|
-
// Strip surrounding quotes from import source
|
|
217
|
-
const raw = sourceNode.text;
|
|
218
|
-
const source = raw.replace(/^['"`]|['"`]$/g, '');
|
|
219
|
-
allImports.push({ filePath: relPath, source });
|
|
220
|
-
}
|
|
221
|
-
continue;
|
|
222
|
-
}
|
|
223
|
-
// -- Extract calls --------------------------------------------------
|
|
224
|
-
if (captureMap['call'] || captureMap['call.name']) {
|
|
225
|
-
const callNameNode = captureMap['call.name'];
|
|
226
|
-
if (callNameNode) {
|
|
227
|
-
allCalls.push({
|
|
228
|
-
filePath: relPath,
|
|
229
|
-
callerNodeId: null, // Simplified — we don't track the caller node
|
|
230
|
-
calleeName: callNameNode.text,
|
|
231
|
-
});
|
|
232
|
-
}
|
|
233
|
-
continue;
|
|
234
|
-
}
|
|
235
|
-
// -- Extract heritage -----------------------------------------------
|
|
236
|
-
if (captureMap['heritage'] || captureMap['heritage.impl']) {
|
|
237
|
-
const classNameNode = captureMap['heritage.class'];
|
|
238
|
-
const extendsNode = captureMap['heritage.extends'];
|
|
239
|
-
const implementsNode = captureMap['heritage.implements'];
|
|
240
|
-
if (classNameNode && extendsNode) {
|
|
241
|
-
const className = classNameNode.text;
|
|
242
|
-
allHeritage.push({
|
|
243
|
-
filePath: relPath,
|
|
244
|
-
className,
|
|
245
|
-
classNodeId: generateId('Class', `${relPath}:${className}`),
|
|
246
|
-
parentName: extendsNode.text,
|
|
247
|
-
kind: 'extends',
|
|
248
|
-
});
|
|
249
|
-
}
|
|
250
|
-
if (classNameNode && implementsNode) {
|
|
251
|
-
const className = classNameNode.text;
|
|
252
|
-
allHeritage.push({
|
|
253
|
-
filePath: relPath,
|
|
254
|
-
className,
|
|
255
|
-
classNodeId: generateId('Class', `${relPath}:${className}`),
|
|
256
|
-
parentName: implementsNode.text,
|
|
257
|
-
kind: 'implements',
|
|
258
|
-
});
|
|
259
|
-
}
|
|
260
|
-
continue;
|
|
261
|
-
}
|
|
262
|
-
// -- Extract definitions --------------------------------------------
|
|
263
|
-
const nameNode = captureMap['name'];
|
|
264
|
-
if (!nameNode && !captureMap['definition.constructor'])
|
|
265
|
-
continue;
|
|
266
|
-
const nodeName = nameNode ? nameNode.text : 'init';
|
|
267
|
-
let nodeLabel = 'CodeElement';
|
|
268
|
-
if (captureMap['definition.function'])
|
|
269
|
-
nodeLabel = 'Function';
|
|
270
|
-
else if (captureMap['definition.class'])
|
|
271
|
-
nodeLabel = 'Class';
|
|
272
|
-
else if (captureMap['definition.interface'])
|
|
273
|
-
nodeLabel = 'Interface';
|
|
274
|
-
else if (captureMap['definition.method'])
|
|
275
|
-
nodeLabel = 'Method';
|
|
276
|
-
else if (captureMap['definition.struct'])
|
|
277
|
-
nodeLabel = 'Struct';
|
|
278
|
-
else if (captureMap['definition.enum'])
|
|
279
|
-
nodeLabel = 'Enum';
|
|
280
|
-
else if (captureMap['definition.namespace'])
|
|
281
|
-
nodeLabel = 'Namespace';
|
|
282
|
-
else if (captureMap['definition.module'])
|
|
283
|
-
nodeLabel = 'Module';
|
|
284
|
-
else if (captureMap['definition.trait'])
|
|
285
|
-
nodeLabel = 'Trait';
|
|
286
|
-
else if (captureMap['definition.impl'])
|
|
287
|
-
nodeLabel = 'Impl';
|
|
288
|
-
else if (captureMap['definition.type'])
|
|
289
|
-
nodeLabel = 'TypeAlias';
|
|
290
|
-
else if (captureMap['definition.const'])
|
|
291
|
-
nodeLabel = 'Const';
|
|
292
|
-
else if (captureMap['definition.static'])
|
|
293
|
-
nodeLabel = 'Static';
|
|
294
|
-
else if (captureMap['definition.typedef'])
|
|
295
|
-
nodeLabel = 'Typedef';
|
|
296
|
-
else if (captureMap['definition.macro'])
|
|
297
|
-
nodeLabel = 'Macro';
|
|
298
|
-
else if (captureMap['definition.union'])
|
|
299
|
-
nodeLabel = 'Union';
|
|
300
|
-
else if (captureMap['definition.property'])
|
|
301
|
-
nodeLabel = 'Property';
|
|
302
|
-
else if (captureMap['definition.record'])
|
|
303
|
-
nodeLabel = 'Record';
|
|
304
|
-
else if (captureMap['definition.delegate'])
|
|
305
|
-
nodeLabel = 'Delegate';
|
|
306
|
-
else if (captureMap['definition.annotation'])
|
|
307
|
-
nodeLabel = 'Annotation';
|
|
308
|
-
else if (captureMap['definition.constructor'])
|
|
309
|
-
nodeLabel = 'Constructor';
|
|
310
|
-
else if (captureMap['definition.template'])
|
|
311
|
-
nodeLabel = 'Template';
|
|
312
|
-
const definitionNode = getDefinitionNodeFromCaptures(captureMap);
|
|
313
|
-
const startLine = definitionNode
|
|
314
|
-
? definitionNode.startPosition.row
|
|
315
|
-
: (nameNode ? nameNode.startPosition.row : 0);
|
|
316
|
-
const endLine = definitionNode
|
|
317
|
-
? definitionNode.endPosition.row
|
|
318
|
-
: startLine;
|
|
319
|
-
const nodeId = generateId(nodeLabel, `${relPath}:${nodeName}`);
|
|
320
|
-
// Truncate content — aligned with csv-generator MAX_SNIPPET (50K)
|
|
321
|
-
const nodeContent = definitionNode
|
|
322
|
-
? (definitionNode.text || '').slice(0, 50_000)
|
|
323
|
-
: '';
|
|
324
|
-
allDefinitions.push({
|
|
325
|
-
nodeId,
|
|
326
|
-
name: nodeName,
|
|
327
|
-
label: nodeLabel,
|
|
328
|
-
filePath: relPath,
|
|
329
|
-
startLine,
|
|
330
|
-
endLine,
|
|
331
|
-
content: nodeContent,
|
|
332
|
-
});
|
|
333
|
-
}
|
|
334
|
-
}
|
|
335
|
-
// =====================================================================
|
|
336
|
-
// Phase 3 — Insert File nodes + symbol nodes into DB
|
|
337
|
-
// =====================================================================
|
|
338
|
-
progress('inserting-nodes', `Inserting ${allDefinitions.length} symbol(s) across ${insertedFilePaths.size} file(s)`);
|
|
339
|
-
// Insert File nodes
|
|
340
|
-
for (const filePath of insertedFilePaths) {
|
|
341
|
-
const fileId = generateId('File', filePath);
|
|
342
|
-
const fileName = path.basename(filePath);
|
|
343
|
-
const escapedPath = escapeCypher(filePath);
|
|
344
|
-
const escapedName = escapeCypher(fileName);
|
|
345
|
-
try {
|
|
346
|
-
await conn.query(`CREATE (n:File {id: '${escapeCypher(fileId)}', name: '${escapedName}', filePath: '${escapedPath}', content: ''})`);
|
|
347
|
-
nodesInserted += 1;
|
|
348
|
-
}
|
|
349
|
-
catch (err) {
|
|
350
|
-
warn(`CREATE File node ${filePath}`, err);
|
|
351
|
-
}
|
|
352
|
-
}
|
|
353
|
-
// Insert symbol (definition) nodes
|
|
354
|
-
for (const def of allDefinitions) {
|
|
355
|
-
const quoted = quoteTable(def.label);
|
|
356
|
-
const escapedId = escapeCypher(def.nodeId);
|
|
357
|
-
const escapedName = escapeCypher(def.name);
|
|
358
|
-
const escapedPath = escapeCypher(def.filePath);
|
|
359
|
-
const escapedContent = escapeCypher(def.content);
|
|
360
|
-
try {
|
|
361
|
-
await conn.query(`CREATE (n:${quoted} {id: '${escapedId}', name: '${escapedName}', filePath: '${escapedPath}', startLine: ${def.startLine}, endLine: ${def.endLine}, content: '${escapedContent}', description: ''})`);
|
|
362
|
-
nodesInserted += 1;
|
|
363
|
-
}
|
|
364
|
-
catch (err) {
|
|
365
|
-
warn(`CREATE ${def.label} node ${def.name} in ${def.filePath}`, err);
|
|
366
|
-
}
|
|
367
|
-
// Create DEFINES edge from File to symbol
|
|
368
|
-
const fileId = generateId('File', def.filePath);
|
|
369
|
-
try {
|
|
370
|
-
await conn.query(`MATCH (a:File), (b:${quoted}) WHERE a.id = '${escapeCypher(fileId)}' AND b.id = '${escapedId}' CREATE (a)-[:${REL_TABLE_NAME} {type: 'DEFINES', confidence: 1.0, reason: '', step: 0}]->(b)`);
|
|
371
|
-
edgesInserted += 1;
|
|
372
|
-
}
|
|
373
|
-
catch (err) {
|
|
374
|
-
warn(`CREATE DEFINES edge File→${def.label} for ${def.name}`, err);
|
|
375
|
-
}
|
|
376
|
-
}
|
|
377
|
-
// =====================================================================
|
|
378
|
-
// Phase 4 — Resolve imports
|
|
379
|
-
// =====================================================================
|
|
380
|
-
progress('resolving-imports', `Resolving ${allImports.length} import(s)`);
|
|
381
|
-
// Build a set of all known file paths (existing + newly inserted)
|
|
382
|
-
const allKnownPaths = new Set(existingPaths);
|
|
383
|
-
for (const fp of insertedFilePaths) {
|
|
384
|
-
allKnownPaths.add(fp);
|
|
385
|
-
}
|
|
386
|
-
for (const imp of allImports) {
|
|
387
|
-
const targetPath = resolveImportSource(imp.source, imp.filePath, allKnownPaths);
|
|
388
|
-
if (!targetPath)
|
|
389
|
-
continue;
|
|
390
|
-
const fromFileId = generateId('File', imp.filePath);
|
|
391
|
-
const toFileId = generateId('File', targetPath);
|
|
392
|
-
try {
|
|
393
|
-
await conn.query(`MATCH (a:File), (b:File) WHERE a.id = '${escapeCypher(fromFileId)}' AND b.id = '${escapeCypher(toFileId)}' CREATE (a)-[:${REL_TABLE_NAME} {type: 'IMPORTS', confidence: 0.9, reason: 'incremental-import-resolution', step: 0}]->(b)`);
|
|
394
|
-
edgesInserted += 1;
|
|
395
|
-
}
|
|
396
|
-
catch (err) {
|
|
397
|
-
warn(`CREATE IMPORTS edge ${imp.filePath}→${targetPath}`, err);
|
|
398
|
-
}
|
|
399
|
-
}
|
|
400
|
-
// =====================================================================
|
|
401
|
-
// Phase 5 — Resolve calls
|
|
402
|
-
// =====================================================================
|
|
403
|
-
progress('resolving-calls', `Resolving ${allCalls.length} call site(s)`);
|
|
404
|
-
// Build a set of files that the current file imports (for preference scoring)
|
|
405
|
-
const fileImportTargets = new Map();
|
|
406
|
-
for (const imp of allImports) {
|
|
407
|
-
const targetPath = resolveImportSource(imp.source, imp.filePath, allKnownPaths);
|
|
408
|
-
if (targetPath) {
|
|
409
|
-
let targets = fileImportTargets.get(imp.filePath);
|
|
410
|
-
if (!targets) {
|
|
411
|
-
targets = new Set();
|
|
412
|
-
fileImportTargets.set(imp.filePath, targets);
|
|
413
|
-
}
|
|
414
|
-
targets.add(targetPath);
|
|
415
|
-
}
|
|
416
|
-
}
|
|
417
|
-
for (const call of allCalls) {
|
|
418
|
-
const calleeName = call.calleeName;
|
|
419
|
-
const escapedCalleeName = escapeCypher(calleeName);
|
|
420
|
-
// Query DB for candidate targets
|
|
421
|
-
let candidates;
|
|
422
|
-
try {
|
|
423
|
-
const result = await conn.query(`MATCH (n) WHERE n.name = '${escapedCalleeName}' AND (n:Function OR n:Method OR n:Class OR n:Constructor) RETURN n.id AS id, n.filePath AS filePath LIMIT 5`);
|
|
424
|
-
const resultObj = Array.isArray(result) ? result[0] : result;
|
|
425
|
-
const rows = await resultObj.getAll();
|
|
426
|
-
candidates = rows.map((row) => ({
|
|
427
|
-
id: String(row.id ?? ''),
|
|
428
|
-
filePath: String(row.filePath ?? ''),
|
|
429
|
-
}));
|
|
430
|
-
}
|
|
431
|
-
catch (err) {
|
|
432
|
-
warn(`query call candidates for ${calleeName}`, err);
|
|
433
|
-
continue;
|
|
434
|
-
}
|
|
435
|
-
if (candidates.length === 0)
|
|
436
|
-
continue;
|
|
437
|
-
// Pick the best candidate: prefer same-file, then imported file, then any
|
|
438
|
-
const importedFiles = fileImportTargets.get(call.filePath);
|
|
439
|
-
let best = candidates[0];
|
|
440
|
-
for (const c of candidates) {
|
|
441
|
-
if (c.filePath === call.filePath) {
|
|
442
|
-
best = c;
|
|
443
|
-
break;
|
|
444
|
-
}
|
|
445
|
-
if (importedFiles?.has(c.filePath)) {
|
|
446
|
-
best = c;
|
|
447
|
-
// Don't break — same-file is still better
|
|
448
|
-
}
|
|
449
|
-
}
|
|
450
|
-
// Determine confidence based on match quality
|
|
451
|
-
let confidence = 0.5;
|
|
452
|
-
if (best.filePath === call.filePath)
|
|
453
|
-
confidence = 0.9;
|
|
454
|
-
else if (importedFiles?.has(best.filePath))
|
|
455
|
-
confidence = 0.8;
|
|
456
|
-
// Determine caller node — use the first definition in this file, or the file itself
|
|
457
|
-
const callerFileId = generateId('File', call.filePath);
|
|
458
|
-
const escapedCallerId = escapeCypher(callerFileId);
|
|
459
|
-
const escapedTargetId = escapeCypher(best.id);
|
|
460
|
-
try {
|
|
461
|
-
await conn.query(`MATCH (a:File), (b) WHERE a.id = '${escapedCallerId}' AND b.id = '${escapedTargetId}' CREATE (a)-[:${REL_TABLE_NAME} {type: 'CALLS', confidence: ${confidence}, reason: 'incremental-call-resolution', step: 0}]->(b)`);
|
|
462
|
-
edgesInserted += 1;
|
|
463
|
-
}
|
|
464
|
-
catch (err) {
|
|
465
|
-
warn(`CREATE CALLS edge for ${call.calleeName}`, err);
|
|
466
|
-
}
|
|
467
|
-
}
|
|
468
|
-
// =====================================================================
|
|
469
|
-
// Phase 6 — Resolve heritage (extends/implements)
|
|
470
|
-
// =====================================================================
|
|
471
|
-
progress('resolving-heritage', `Resolving ${allHeritage.length} heritage relationship(s)`);
|
|
472
|
-
for (const h of allHeritage) {
|
|
473
|
-
const escapedParentName = escapeCypher(h.parentName);
|
|
474
|
-
const relType = h.kind === 'extends' ? 'EXTENDS' : 'IMPLEMENTS';
|
|
475
|
-
// Query DB for the parent class/interface
|
|
476
|
-
let parentId = null;
|
|
477
|
-
try {
|
|
478
|
-
const result = await conn.query(`MATCH (n) WHERE n.name = '${escapedParentName}' AND (n:Class OR n:Interface OR n:\`Struct\` OR n:\`Trait\`) RETURN n.id AS id LIMIT 1`);
|
|
479
|
-
const resultObj = Array.isArray(result) ? result[0] : result;
|
|
480
|
-
const rows = await resultObj.getAll();
|
|
481
|
-
if (rows.length > 0) {
|
|
482
|
-
parentId = String(rows[0].id ?? '');
|
|
483
|
-
}
|
|
484
|
-
}
|
|
485
|
-
catch (err) {
|
|
486
|
-
warn(`query heritage parent ${h.parentName}`, err);
|
|
487
|
-
continue;
|
|
488
|
-
}
|
|
489
|
-
if (!parentId)
|
|
490
|
-
continue;
|
|
491
|
-
const escapedClassId = escapeCypher(h.classNodeId);
|
|
492
|
-
const escapedParentId = escapeCypher(parentId);
|
|
493
|
-
try {
|
|
494
|
-
await conn.query(`MATCH (a), (b) WHERE a.id = '${escapedClassId}' AND b.id = '${escapedParentId}' CREATE (a)-[:${REL_TABLE_NAME} {type: '${relType}', confidence: 1.0, reason: 'incremental-heritage-resolution', step: 0}]->(b)`);
|
|
495
|
-
edgesInserted += 1;
|
|
496
|
-
}
|
|
497
|
-
catch (err) {
|
|
498
|
-
warn(`CREATE ${relType} edge ${h.className}→${h.parentName}`, err);
|
|
499
|
-
}
|
|
500
|
-
}
|
|
501
|
-
// =====================================================================
|
|
502
|
-
// Phase 7 — Rebuild FTS indexes
|
|
503
|
-
// =====================================================================
|
|
504
|
-
progress('rebuilding-fts', 'Rebuilding full-text search indexes');
|
|
505
|
-
// Load FTS extension
|
|
506
|
-
try {
|
|
507
|
-
await conn.query('INSTALL fts');
|
|
508
|
-
await conn.query('LOAD EXTENSION fts');
|
|
509
|
-
}
|
|
510
|
-
catch (err) {
|
|
511
|
-
const msg = err instanceof Error ? err.message : '';
|
|
512
|
-
if (!msg.includes('already loaded') && !msg.includes('already installed') && !msg.includes('already exists')) {
|
|
513
|
-
// FTS extension unavailable — skip FTS rebuild, not fatal
|
|
514
|
-
progress('rebuilding-fts', 'FTS extension unavailable, skipping');
|
|
515
|
-
}
|
|
516
|
-
}
|
|
517
|
-
// Drop and recreate each FTS index
|
|
518
|
-
for (const { table, index } of FTS_TABLES) {
|
|
519
|
-
try {
|
|
520
|
-
await conn.query(`CALL DROP_FTS_INDEX('${table}', '${index}')`);
|
|
521
|
-
}
|
|
522
|
-
catch {
|
|
523
|
-
// Index may not exist — ignore
|
|
524
|
-
}
|
|
525
|
-
try {
|
|
526
|
-
await conn.query(`CALL CREATE_FTS_INDEX('${table}', '${index}', ['name', 'content'], stemmer := 'porter')`);
|
|
527
|
-
}
|
|
528
|
-
catch (err) {
|
|
529
|
-
const msg = err instanceof Error ? err.message : '';
|
|
530
|
-
if (!msg.includes('already exists')) {
|
|
531
|
-
// Non-fatal — log but continue
|
|
532
|
-
}
|
|
533
|
-
}
|
|
534
|
-
}
|
|
535
|
-
// =====================================================================
|
|
536
|
-
// Done
|
|
537
|
-
// =====================================================================
|
|
538
|
-
progress('done', 'Refresh complete');
|
|
539
|
-
const result = {
|
|
540
|
-
filesProcessed: filesToProcess.length,
|
|
541
|
-
filesSkipped,
|
|
542
|
-
nodesDeleted,
|
|
543
|
-
nodesInserted,
|
|
544
|
-
edgesInserted,
|
|
545
|
-
durationMs: Date.now() - t0,
|
|
546
|
-
};
|
|
547
|
-
return result;
|
|
548
|
-
}
|
|
549
|
-
finally {
|
|
550
|
-
// Always close DB connection
|
|
551
|
-
try {
|
|
552
|
-
await conn.close();
|
|
553
|
-
}
|
|
554
|
-
catch { }
|
|
555
|
-
try {
|
|
556
|
-
await db.close();
|
|
557
|
-
}
|
|
558
|
-
catch { }
|
|
559
|
-
}
|
|
560
|
-
}
|
|
561
|
-
// ---------------------------------------------------------------------------
|
|
562
|
-
// Import resolution helper
|
|
563
|
-
// ---------------------------------------------------------------------------
|
|
564
|
-
/**
|
|
565
|
-
* Attempt to resolve an import source string to a known file path
|
|
566
|
-
*
|
|
567
|
-
* Handles relative paths (./foo, ../bar) and tries common extensions.
|
|
568
|
-
* Returns the matching relative file path or null if not found
|
|
569
|
-
*/
|
|
570
|
-
function resolveImportSource(source, importingFile, knownPaths) {
|
|
571
|
-
// Only resolve relative imports for now
|
|
572
|
-
if (!source.startsWith('.'))
|
|
573
|
-
return null;
|
|
574
|
-
const importDir = path.dirname(importingFile);
|
|
575
|
-
const resolved = path.posix.normalize(path.posix.join(importDir, source));
|
|
576
|
-
// Try exact match first
|
|
577
|
-
if (knownPaths.has(resolved))
|
|
578
|
-
return resolved;
|
|
579
|
-
// Try common extensions
|
|
580
|
-
const extensions = [
|
|
581
|
-
'.ts', '.tsx', '.js', '.jsx', '.py', '.java', '.go', '.rs',
|
|
582
|
-
'.kt', '.cs', '.cpp', '.c', '.h', '.hpp', '.php', '.rb', '.swift',
|
|
583
|
-
'/index.ts', '/index.tsx', '/index.js', '/index.jsx',
|
|
584
|
-
];
|
|
585
|
-
for (const ext of extensions) {
|
|
586
|
-
const candidate = resolved + ext;
|
|
587
|
-
if (knownPaths.has(candidate))
|
|
588
|
-
return candidate;
|
|
589
|
-
}
|
|
590
|
-
// Try stripping existing extension and re-resolving (e.g. './foo.js' -> './foo.ts')
|
|
591
|
-
const withoutExt = resolved.replace(/\.[^/.]+$/, '');
|
|
592
|
-
if (withoutExt !== resolved) {
|
|
593
|
-
for (const ext of extensions) {
|
|
594
|
-
const candidate = withoutExt + ext;
|
|
595
|
-
if (knownPaths.has(candidate))
|
|
596
|
-
return candidate;
|
|
597
|
-
}
|
|
598
|
-
}
|
|
599
|
-
return null;
|
|
600
|
-
}
|
|
601
|
-
// ---------------------------------------------------------------------------
|
|
602
|
-
// IPC message listener — entry point
|
|
603
|
-
// ---------------------------------------------------------------------------
|
|
604
|
-
process.on('message', async (raw) => {
|
|
605
|
-
let msg;
|
|
606
|
-
try {
|
|
607
|
-
msg = parseParentMessage(raw);
|
|
608
|
-
}
|
|
609
|
-
catch (err) {
|
|
610
|
-
sendError(new Error(`Failed to parse parent message: ${err instanceof Error ? err.message : String(err)}`));
|
|
611
|
-
process.exit(1);
|
|
612
|
-
return; // unreachable but satisfies control flow analysis
|
|
613
|
-
}
|
|
614
|
-
switch (msg.kind) {
|
|
615
|
-
case 'shutdown':
|
|
616
|
-
process.exit(0);
|
|
617
|
-
break;
|
|
618
|
-
case 'refresh':
|
|
619
|
-
try {
|
|
620
|
-
const result = await runRefresh(msg.payload);
|
|
621
|
-
sendSuccess(result);
|
|
622
|
-
}
|
|
623
|
-
catch (err) {
|
|
624
|
-
sendError(err instanceof Error ? err : new Error(String(err)));
|
|
625
|
-
process.exit(1);
|
|
626
|
-
}
|
|
627
|
-
break;
|
|
628
|
-
default: {
|
|
629
|
-
const _exhaustive = msg;
|
|
630
|
-
throw new Error(`Unhandled parent message kind: ${JSON.stringify(_exhaustive)}`);
|
|
631
|
-
}
|
|
632
|
-
}
|
|
633
|
-
});
|
|
634
|
-
// ---------------------------------------------------------------------------
|
|
635
|
-
// Safety nets — ensure the child never crashes silently
|
|
636
|
-
// ---------------------------------------------------------------------------
|
|
637
|
-
process.on('uncaughtException', (err) => {
|
|
638
|
-
sendError(new Error(`Uncaught exception in child: ${err.message}`));
|
|
639
|
-
process.exit(1);
|
|
640
|
-
});
|
|
641
|
-
process.on('unhandledRejection', (reason) => {
|
|
642
|
-
const message = reason instanceof Error ? reason.message : String(reason);
|
|
643
|
-
sendError(new Error(`Unhandled rejection in child: ${message}`));
|
|
644
|
-
process.exit(1);
|
|
645
|
-
});
|
|
646
|
-
// If the parent disconnects (closes the IPC channel), exit gracefully
|
|
647
|
-
process.on('disconnect', () => {
|
|
648
|
-
process.exit(0);
|
|
649
|
-
});
|
|
@@ -1,32 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* @file refresh-coordinator.ts
|
|
3
|
-
* @description Parent-side orchestrator that manages child process lifecycle for
|
|
4
|
-
* incremental DB refreshes — forks the worker, sends dirty files over IPC,
|
|
5
|
-
* and resolves/rejects the caller's promise based on the child's response
|
|
6
|
-
*/
|
|
7
|
-
import { type RepoId, type RepoRoot, type DbPath, type DirtyFileEntry, type RefreshResult } from './types.js';
|
|
8
|
-
/** Identifies a repo and its storage locations for a refresh operation */
|
|
9
|
-
export interface RefreshTarget {
|
|
10
|
-
readonly id: RepoId;
|
|
11
|
-
readonly repoRoot: RepoRoot;
|
|
12
|
-
readonly dbPath: DbPath;
|
|
13
|
-
readonly storagePath: string;
|
|
14
|
-
}
|
|
15
|
-
export declare class RefreshCoordinator {
|
|
16
|
-
/** In-flight refreshes keyed by repo ID — ensures one refresh per repo at a time */
|
|
17
|
-
private readonly inFlight;
|
|
18
|
-
/**
|
|
19
|
-
* Trigger an incremental refresh for the given repo
|
|
20
|
-
*
|
|
21
|
-
* If a refresh for this repo is already in progress the existing promise is
|
|
22
|
-
* returned (deduplication). Otherwise a child process is forked, the dirty
|
|
23
|
-
* file list is sent over IPC, and the returned promise settles once the child
|
|
24
|
-
* reports success or failure
|
|
25
|
-
*/
|
|
26
|
-
refresh(repo: RefreshTarget, dirtyFiles: readonly DirtyFileEntry[], existingPaths: readonly string[], timeoutMs?: number): Promise<RefreshResult>;
|
|
27
|
-
/**
|
|
28
|
-
* Core refresh logic — queries existing file paths from the DB, forks the
|
|
29
|
-
* child process, and wires up IPC + timeout handling
|
|
30
|
-
*/
|
|
31
|
-
private doRefresh;
|
|
32
|
-
}
|