@veewo/gitnexus 1.3.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +234 -0
- package/dist/benchmark/agent-context/evaluators.d.ts +9 -0
- package/dist/benchmark/agent-context/evaluators.js +196 -0
- package/dist/benchmark/agent-context/evaluators.test.d.ts +1 -0
- package/dist/benchmark/agent-context/evaluators.test.js +39 -0
- package/dist/benchmark/agent-context/io.d.ts +2 -0
- package/dist/benchmark/agent-context/io.js +23 -0
- package/dist/benchmark/agent-context/io.test.d.ts +1 -0
- package/dist/benchmark/agent-context/io.test.js +19 -0
- package/dist/benchmark/agent-context/report.d.ts +2 -0
- package/dist/benchmark/agent-context/report.js +59 -0
- package/dist/benchmark/agent-context/report.test.d.ts +1 -0
- package/dist/benchmark/agent-context/report.test.js +85 -0
- package/dist/benchmark/agent-context/runner.d.ts +46 -0
- package/dist/benchmark/agent-context/runner.js +111 -0
- package/dist/benchmark/agent-context/runner.test.d.ts +1 -0
- package/dist/benchmark/agent-context/runner.test.js +79 -0
- package/dist/benchmark/agent-context/tool-runner.d.ts +7 -0
- package/dist/benchmark/agent-context/tool-runner.js +18 -0
- package/dist/benchmark/agent-context/tool-runner.test.d.ts +1 -0
- package/dist/benchmark/agent-context/tool-runner.test.js +11 -0
- package/dist/benchmark/agent-context/types.d.ts +40 -0
- package/dist/benchmark/agent-context/types.js +1 -0
- package/dist/benchmark/analyze-runner.d.ts +16 -0
- package/dist/benchmark/analyze-runner.js +51 -0
- package/dist/benchmark/analyze-runner.test.d.ts +1 -0
- package/dist/benchmark/analyze-runner.test.js +37 -0
- package/dist/benchmark/evaluators.d.ts +6 -0
- package/dist/benchmark/evaluators.js +10 -0
- package/dist/benchmark/evaluators.test.d.ts +1 -0
- package/dist/benchmark/evaluators.test.js +12 -0
- package/dist/benchmark/io.d.ts +7 -0
- package/dist/benchmark/io.js +25 -0
- package/dist/benchmark/io.test.d.ts +1 -0
- package/dist/benchmark/io.test.js +35 -0
- package/dist/benchmark/neonspark-candidates.d.ts +19 -0
- package/dist/benchmark/neonspark-candidates.js +94 -0
- package/dist/benchmark/neonspark-candidates.test.d.ts +1 -0
- package/dist/benchmark/neonspark-candidates.test.js +43 -0
- package/dist/benchmark/neonspark-materialize.d.ts +19 -0
- package/dist/benchmark/neonspark-materialize.js +111 -0
- package/dist/benchmark/neonspark-materialize.test.d.ts +1 -0
- package/dist/benchmark/neonspark-materialize.test.js +124 -0
- package/dist/benchmark/neonspark-sync.d.ts +3 -0
- package/dist/benchmark/neonspark-sync.js +53 -0
- package/dist/benchmark/neonspark-sync.test.d.ts +1 -0
- package/dist/benchmark/neonspark-sync.test.js +20 -0
- package/dist/benchmark/report.d.ts +1 -0
- package/dist/benchmark/report.js +7 -0
- package/dist/benchmark/runner.d.ts +48 -0
- package/dist/benchmark/runner.js +302 -0
- package/dist/benchmark/runner.test.d.ts +1 -0
- package/dist/benchmark/runner.test.js +50 -0
- package/dist/benchmark/scoring.d.ts +16 -0
- package/dist/benchmark/scoring.js +27 -0
- package/dist/benchmark/scoring.test.d.ts +1 -0
- package/dist/benchmark/scoring.test.js +24 -0
- package/dist/benchmark/tool-runner.d.ts +6 -0
- package/dist/benchmark/tool-runner.js +17 -0
- package/dist/benchmark/types.d.ts +36 -0
- package/dist/benchmark/types.js +1 -0
- package/dist/cli/ai-context.d.ts +22 -0
- package/dist/cli/ai-context.js +184 -0
- package/dist/cli/ai-context.test.d.ts +1 -0
- package/dist/cli/ai-context.test.js +30 -0
- package/dist/cli/analyze-multi-scope-regression.test.d.ts +1 -0
- package/dist/cli/analyze-multi-scope-regression.test.js +22 -0
- package/dist/cli/analyze-options.d.ts +7 -0
- package/dist/cli/analyze-options.js +56 -0
- package/dist/cli/analyze-options.test.d.ts +1 -0
- package/dist/cli/analyze-options.test.js +36 -0
- package/dist/cli/analyze.d.ts +14 -0
- package/dist/cli/analyze.js +384 -0
- package/dist/cli/augment.d.ts +13 -0
- package/dist/cli/augment.js +33 -0
- package/dist/cli/benchmark-agent-context.d.ts +29 -0
- package/dist/cli/benchmark-agent-context.js +61 -0
- package/dist/cli/benchmark-agent-context.test.d.ts +1 -0
- package/dist/cli/benchmark-agent-context.test.js +80 -0
- package/dist/cli/benchmark-unity.d.ts +15 -0
- package/dist/cli/benchmark-unity.js +31 -0
- package/dist/cli/benchmark-unity.test.d.ts +1 -0
- package/dist/cli/benchmark-unity.test.js +18 -0
- package/dist/cli/claude-hooks.d.ts +22 -0
- package/dist/cli/claude-hooks.js +97 -0
- package/dist/cli/clean.d.ts +10 -0
- package/dist/cli/clean.js +60 -0
- package/dist/cli/eval-server.d.ts +30 -0
- package/dist/cli/eval-server.js +372 -0
- package/dist/cli/index.d.ts +2 -0
- package/dist/cli/index.js +182 -0
- package/dist/cli/list.d.ts +6 -0
- package/dist/cli/list.js +33 -0
- package/dist/cli/mcp.d.ts +8 -0
- package/dist/cli/mcp.js +34 -0
- package/dist/cli/repo-manager-alias.test.d.ts +1 -0
- package/dist/cli/repo-manager-alias.test.js +40 -0
- package/dist/cli/scope-filter.test.d.ts +1 -0
- package/dist/cli/scope-filter.test.js +49 -0
- package/dist/cli/serve.d.ts +4 -0
- package/dist/cli/serve.js +6 -0
- package/dist/cli/setup.d.ts +8 -0
- package/dist/cli/setup.js +311 -0
- package/dist/cli/setup.test.d.ts +1 -0
- package/dist/cli/setup.test.js +31 -0
- package/dist/cli/status.d.ts +6 -0
- package/dist/cli/status.js +27 -0
- package/dist/cli/tool.d.ts +40 -0
- package/dist/cli/tool.js +94 -0
- package/dist/cli/version.test.d.ts +1 -0
- package/dist/cli/version.test.js +19 -0
- package/dist/cli/wiki.d.ts +15 -0
- package/dist/cli/wiki.js +361 -0
- package/dist/config/ignore-service.d.ts +1 -0
- package/dist/config/ignore-service.js +210 -0
- package/dist/config/supported-languages.d.ts +12 -0
- package/dist/config/supported-languages.js +15 -0
- package/dist/core/augmentation/engine.d.ts +26 -0
- package/dist/core/augmentation/engine.js +213 -0
- package/dist/core/embeddings/embedder.d.ts +60 -0
- package/dist/core/embeddings/embedder.js +251 -0
- package/dist/core/embeddings/embedding-pipeline.d.ts +51 -0
- package/dist/core/embeddings/embedding-pipeline.js +329 -0
- package/dist/core/embeddings/index.d.ts +9 -0
- package/dist/core/embeddings/index.js +9 -0
- package/dist/core/embeddings/text-generator.d.ts +24 -0
- package/dist/core/embeddings/text-generator.js +182 -0
- package/dist/core/embeddings/types.d.ts +87 -0
- package/dist/core/embeddings/types.js +32 -0
- package/dist/core/graph/graph.d.ts +2 -0
- package/dist/core/graph/graph.js +66 -0
- package/dist/core/graph/types.d.ts +61 -0
- package/dist/core/graph/types.js +1 -0
- package/dist/core/ingestion/ast-cache.d.ts +11 -0
- package/dist/core/ingestion/ast-cache.js +34 -0
- package/dist/core/ingestion/call-processor.d.ts +15 -0
- package/dist/core/ingestion/call-processor.js +327 -0
- package/dist/core/ingestion/cluster-enricher.d.ts +38 -0
- package/dist/core/ingestion/cluster-enricher.js +170 -0
- package/dist/core/ingestion/community-processor.d.ts +39 -0
- package/dist/core/ingestion/community-processor.js +312 -0
- package/dist/core/ingestion/entry-point-scoring.d.ts +39 -0
- package/dist/core/ingestion/entry-point-scoring.js +260 -0
- package/dist/core/ingestion/filesystem-walker.d.ts +28 -0
- package/dist/core/ingestion/filesystem-walker.js +80 -0
- package/dist/core/ingestion/framework-detection.d.ts +39 -0
- package/dist/core/ingestion/framework-detection.js +235 -0
- package/dist/core/ingestion/heritage-processor.d.ts +20 -0
- package/dist/core/ingestion/heritage-processor.js +197 -0
- package/dist/core/ingestion/import-processor.d.ts +38 -0
- package/dist/core/ingestion/import-processor.js +778 -0
- package/dist/core/ingestion/parsing-processor.d.ts +15 -0
- package/dist/core/ingestion/parsing-processor.js +291 -0
- package/dist/core/ingestion/pipeline.d.ts +5 -0
- package/dist/core/ingestion/pipeline.js +323 -0
- package/dist/core/ingestion/process-processor.d.ts +51 -0
- package/dist/core/ingestion/process-processor.js +309 -0
- package/dist/core/ingestion/scope-filter.d.ts +25 -0
- package/dist/core/ingestion/scope-filter.js +100 -0
- package/dist/core/ingestion/structure-processor.d.ts +2 -0
- package/dist/core/ingestion/structure-processor.js +36 -0
- package/dist/core/ingestion/symbol-table.d.ts +33 -0
- package/dist/core/ingestion/symbol-table.js +38 -0
- package/dist/core/ingestion/tree-sitter-queries.d.ts +12 -0
- package/dist/core/ingestion/tree-sitter-queries.js +398 -0
- package/dist/core/ingestion/utils.d.ts +10 -0
- package/dist/core/ingestion/utils.js +50 -0
- package/dist/core/ingestion/workers/parse-worker.d.ts +59 -0
- package/dist/core/ingestion/workers/parse-worker.js +672 -0
- package/dist/core/ingestion/workers/worker-pool.d.ts +16 -0
- package/dist/core/ingestion/workers/worker-pool.js +120 -0
- package/dist/core/kuzu/csv-generator.d.ts +29 -0
- package/dist/core/kuzu/csv-generator.js +336 -0
- package/dist/core/kuzu/kuzu-adapter.d.ts +101 -0
- package/dist/core/kuzu/kuzu-adapter.js +753 -0
- package/dist/core/kuzu/schema.d.ts +53 -0
- package/dist/core/kuzu/schema.js +407 -0
- package/dist/core/search/bm25-index.d.ts +23 -0
- package/dist/core/search/bm25-index.js +95 -0
- package/dist/core/search/hybrid-search.d.ts +49 -0
- package/dist/core/search/hybrid-search.js +118 -0
- package/dist/core/tree-sitter/parser-loader.d.ts +4 -0
- package/dist/core/tree-sitter/parser-loader.js +44 -0
- package/dist/core/wiki/generator.d.ts +110 -0
- package/dist/core/wiki/generator.js +786 -0
- package/dist/core/wiki/graph-queries.d.ts +80 -0
- package/dist/core/wiki/graph-queries.js +238 -0
- package/dist/core/wiki/html-viewer.d.ts +10 -0
- package/dist/core/wiki/html-viewer.js +297 -0
- package/dist/core/wiki/llm-client.d.ts +40 -0
- package/dist/core/wiki/llm-client.js +162 -0
- package/dist/core/wiki/prompts.d.ts +53 -0
- package/dist/core/wiki/prompts.js +174 -0
- package/dist/lib/utils.d.ts +1 -0
- package/dist/lib/utils.js +3 -0
- package/dist/mcp/core/embedder.d.ts +27 -0
- package/dist/mcp/core/embedder.js +108 -0
- package/dist/mcp/core/kuzu-adapter.d.ts +34 -0
- package/dist/mcp/core/kuzu-adapter.js +231 -0
- package/dist/mcp/local/local-backend.d.ts +160 -0
- package/dist/mcp/local/local-backend.js +1646 -0
- package/dist/mcp/resources.d.ts +31 -0
- package/dist/mcp/resources.js +407 -0
- package/dist/mcp/server.d.ts +23 -0
- package/dist/mcp/server.js +251 -0
- package/dist/mcp/staleness.d.ts +15 -0
- package/dist/mcp/staleness.js +29 -0
- package/dist/mcp/tools.d.ts +24 -0
- package/dist/mcp/tools.js +195 -0
- package/dist/server/api.d.ts +10 -0
- package/dist/server/api.js +344 -0
- package/dist/server/mcp-http.d.ts +13 -0
- package/dist/server/mcp-http.js +100 -0
- package/dist/storage/git.d.ts +6 -0
- package/dist/storage/git.js +32 -0
- package/dist/storage/repo-manager.d.ts +125 -0
- package/dist/storage/repo-manager.js +257 -0
- package/dist/types/pipeline.d.ts +34 -0
- package/dist/types/pipeline.js +18 -0
- package/hooks/claude/gitnexus-hook.cjs +135 -0
- package/hooks/claude/pre-tool-use.sh +78 -0
- package/hooks/claude/session-start.sh +42 -0
- package/package.json +92 -0
- package/skills/gitnexus-cli.md +82 -0
- package/skills/gitnexus-debugging.md +89 -0
- package/skills/gitnexus-exploring.md +78 -0
- package/skills/gitnexus-guide.md +64 -0
- package/skills/gitnexus-impact-analysis.md +97 -0
- package/skills/gitnexus-refactoring.md +121 -0
- package/vendor/leiden/index.cjs +355 -0
- package/vendor/leiden/utils.cjs +392 -0
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
export interface WorkerPool {
|
|
2
|
+
/**
|
|
3
|
+
* Dispatch items across workers. Items are split into chunks (one per worker),
|
|
4
|
+
* each worker processes its chunk via sub-batches to limit peak memory,
|
|
5
|
+
* and results are concatenated back in order.
|
|
6
|
+
*/
|
|
7
|
+
dispatch<TInput, TResult>(items: TInput[], onProgress?: (filesProcessed: number) => void): Promise<TResult[]>;
|
|
8
|
+
/** Terminate all workers. Must be called when done. */
|
|
9
|
+
terminate(): Promise<void>;
|
|
10
|
+
/** Number of workers in the pool */
|
|
11
|
+
readonly size: number;
|
|
12
|
+
}
|
|
13
|
+
/**
|
|
14
|
+
* Create a pool of worker threads.
|
|
15
|
+
*/
|
|
16
|
+
export declare const createWorkerPool: (workerUrl: URL, poolSize?: number) => WorkerPool;
|
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
import { Worker } from 'node:worker_threads';
|
|
2
|
+
import os from 'node:os';
|
|
3
|
+
/**
|
|
4
|
+
* Max files to send to a worker in a single postMessage.
|
|
5
|
+
* Keeps structured-clone memory bounded per sub-batch.
|
|
6
|
+
*/
|
|
7
|
+
const SUB_BATCH_SIZE = 1500;
|
|
8
|
+
/** Per sub-batch timeout. If a single sub-batch takes longer than this,
|
|
9
|
+
* likely a pathological file (e.g. minified 50MB JS). Fail fast. */
|
|
10
|
+
const SUB_BATCH_TIMEOUT_MS = 30_000;
|
|
11
|
+
/**
|
|
12
|
+
* Create a pool of worker threads.
|
|
13
|
+
*/
|
|
14
|
+
export const createWorkerPool = (workerUrl, poolSize) => {
|
|
15
|
+
const size = poolSize ?? Math.min(8, Math.max(1, os.cpus().length - 1));
|
|
16
|
+
const workers = [];
|
|
17
|
+
for (let i = 0; i < size; i++) {
|
|
18
|
+
workers.push(new Worker(workerUrl));
|
|
19
|
+
}
|
|
20
|
+
const dispatch = (items, onProgress) => {
|
|
21
|
+
if (items.length === 0)
|
|
22
|
+
return Promise.resolve([]);
|
|
23
|
+
const chunkSize = Math.ceil(items.length / size);
|
|
24
|
+
const chunks = [];
|
|
25
|
+
for (let i = 0; i < items.length; i += chunkSize) {
|
|
26
|
+
chunks.push(items.slice(i, i + chunkSize));
|
|
27
|
+
}
|
|
28
|
+
const workerProgress = new Array(chunks.length).fill(0);
|
|
29
|
+
const promises = chunks.map((chunk, i) => {
|
|
30
|
+
const worker = workers[i];
|
|
31
|
+
return new Promise((resolve, reject) => {
|
|
32
|
+
let settled = false;
|
|
33
|
+
let subBatchTimer = null;
|
|
34
|
+
const cleanup = () => {
|
|
35
|
+
if (subBatchTimer)
|
|
36
|
+
clearTimeout(subBatchTimer);
|
|
37
|
+
worker.removeListener('message', handler);
|
|
38
|
+
worker.removeListener('error', errorHandler);
|
|
39
|
+
worker.removeListener('exit', exitHandler);
|
|
40
|
+
};
|
|
41
|
+
const resetSubBatchTimer = () => {
|
|
42
|
+
if (subBatchTimer)
|
|
43
|
+
clearTimeout(subBatchTimer);
|
|
44
|
+
subBatchTimer = setTimeout(() => {
|
|
45
|
+
if (!settled) {
|
|
46
|
+
settled = true;
|
|
47
|
+
cleanup();
|
|
48
|
+
reject(new Error(`Worker ${i} sub-batch timed out after ${SUB_BATCH_TIMEOUT_MS / 1000}s (chunk: ${chunk.length} items).`));
|
|
49
|
+
}
|
|
50
|
+
}, SUB_BATCH_TIMEOUT_MS);
|
|
51
|
+
};
|
|
52
|
+
let subBatchIdx = 0;
|
|
53
|
+
const sendNextSubBatch = () => {
|
|
54
|
+
const start = subBatchIdx * SUB_BATCH_SIZE;
|
|
55
|
+
if (start >= chunk.length) {
|
|
56
|
+
worker.postMessage({ type: 'flush' });
|
|
57
|
+
return;
|
|
58
|
+
}
|
|
59
|
+
const subBatch = chunk.slice(start, start + SUB_BATCH_SIZE);
|
|
60
|
+
subBatchIdx++;
|
|
61
|
+
resetSubBatchTimer();
|
|
62
|
+
worker.postMessage({ type: 'sub-batch', files: subBatch });
|
|
63
|
+
};
|
|
64
|
+
const handler = (msg) => {
|
|
65
|
+
if (settled)
|
|
66
|
+
return;
|
|
67
|
+
if (msg && msg.type === 'progress') {
|
|
68
|
+
workerProgress[i] = msg.filesProcessed;
|
|
69
|
+
if (onProgress) {
|
|
70
|
+
const total = workerProgress.reduce((a, b) => a + b, 0);
|
|
71
|
+
onProgress(total);
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
else if (msg && msg.type === 'sub-batch-done') {
|
|
75
|
+
sendNextSubBatch();
|
|
76
|
+
}
|
|
77
|
+
else if (msg && msg.type === 'error') {
|
|
78
|
+
settled = true;
|
|
79
|
+
cleanup();
|
|
80
|
+
reject(new Error(`Worker ${i} error: ${msg.error}`));
|
|
81
|
+
}
|
|
82
|
+
else if (msg && msg.type === 'result') {
|
|
83
|
+
settled = true;
|
|
84
|
+
cleanup();
|
|
85
|
+
resolve(msg.data);
|
|
86
|
+
}
|
|
87
|
+
else {
|
|
88
|
+
settled = true;
|
|
89
|
+
cleanup();
|
|
90
|
+
resolve(msg);
|
|
91
|
+
}
|
|
92
|
+
};
|
|
93
|
+
const errorHandler = (err) => {
|
|
94
|
+
if (!settled) {
|
|
95
|
+
settled = true;
|
|
96
|
+
cleanup();
|
|
97
|
+
reject(err);
|
|
98
|
+
}
|
|
99
|
+
};
|
|
100
|
+
const exitHandler = (code) => {
|
|
101
|
+
if (!settled) {
|
|
102
|
+
settled = true;
|
|
103
|
+
cleanup();
|
|
104
|
+
reject(new Error(`Worker ${i} exited with code ${code}. Likely OOM or native addon failure.`));
|
|
105
|
+
}
|
|
106
|
+
};
|
|
107
|
+
worker.on('message', handler);
|
|
108
|
+
worker.once('error', errorHandler);
|
|
109
|
+
worker.once('exit', exitHandler);
|
|
110
|
+
sendNextSubBatch();
|
|
111
|
+
});
|
|
112
|
+
});
|
|
113
|
+
return Promise.all(promises);
|
|
114
|
+
};
|
|
115
|
+
const terminate = async () => {
|
|
116
|
+
await Promise.all(workers.map(w => w.terminate()));
|
|
117
|
+
workers.length = 0;
|
|
118
|
+
};
|
|
119
|
+
return { dispatch, terminate, size };
|
|
120
|
+
};
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* CSV Generator for KuzuDB Hybrid Schema
|
|
3
|
+
*
|
|
4
|
+
* Streams CSV rows directly to disk files in a single pass over graph nodes.
|
|
5
|
+
* File contents are lazy-read from disk per-node to avoid holding the entire
|
|
6
|
+
* repo in RAM. Rows are buffered (FLUSH_EVERY) before writing to minimize
|
|
7
|
+
* per-row Promise overhead.
|
|
8
|
+
*
|
|
9
|
+
* RFC 4180 Compliant:
|
|
10
|
+
* - Fields containing commas, double quotes, or newlines are enclosed in double quotes
|
|
11
|
+
* - Double quotes within fields are escaped by doubling them ("")
|
|
12
|
+
* - All fields are consistently quoted for safety with code content
|
|
13
|
+
*/
|
|
14
|
+
import { KnowledgeGraph } from '../graph/types.js';
|
|
15
|
+
import { NodeTableName } from './schema.js';
|
|
16
|
+
export interface StreamedCSVResult {
|
|
17
|
+
nodeFiles: Map<NodeTableName, {
|
|
18
|
+
csvPath: string;
|
|
19
|
+
rows: number;
|
|
20
|
+
}>;
|
|
21
|
+
relCsvPath: string;
|
|
22
|
+
relRows: number;
|
|
23
|
+
}
|
|
24
|
+
/**
|
|
25
|
+
* Stream all CSV data directly to disk files.
|
|
26
|
+
* Iterates graph nodes exactly ONCE — routes each node to the right writer.
|
|
27
|
+
* File contents are lazy-read from disk with a generous LRU cache.
|
|
28
|
+
*/
|
|
29
|
+
export declare const streamAllCSVsToDisk: (graph: KnowledgeGraph, repoPath: string, csvDir: string) => Promise<StreamedCSVResult>;
|
|
@@ -0,0 +1,336 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* CSV Generator for KuzuDB Hybrid Schema
|
|
3
|
+
*
|
|
4
|
+
* Streams CSV rows directly to disk files in a single pass over graph nodes.
|
|
5
|
+
* File contents are lazy-read from disk per-node to avoid holding the entire
|
|
6
|
+
* repo in RAM. Rows are buffered (FLUSH_EVERY) before writing to minimize
|
|
7
|
+
* per-row Promise overhead.
|
|
8
|
+
*
|
|
9
|
+
* RFC 4180 Compliant:
|
|
10
|
+
* - Fields containing commas, double quotes, or newlines are enclosed in double quotes
|
|
11
|
+
* - Double quotes within fields are escaped by doubling them ("")
|
|
12
|
+
* - All fields are consistently quoted for safety with code content
|
|
13
|
+
*/
|
|
14
|
+
import fs from 'fs/promises';
|
|
15
|
+
import { createWriteStream } from 'fs';
|
|
16
|
+
import path from 'path';
|
|
17
|
+
/** Flush buffered rows to disk every N rows */
|
|
18
|
+
const FLUSH_EVERY = 500;
|
|
19
|
+
// ============================================================================
|
|
20
|
+
// CSV ESCAPE UTILITIES
|
|
21
|
+
// ============================================================================
|
|
22
|
+
const sanitizeUTF8 = (str) => {
|
|
23
|
+
return str
|
|
24
|
+
.replace(/\r\n/g, '\n')
|
|
25
|
+
.replace(/\r/g, '\n')
|
|
26
|
+
.replace(/[\x00-\x08\x0B\x0C\x0E-\x1F\x7F]/g, '')
|
|
27
|
+
.replace(/[\uD800-\uDFFF]/g, '')
|
|
28
|
+
.replace(/[\uFFFE\uFFFF]/g, '');
|
|
29
|
+
};
|
|
30
|
+
const escapeCSVField = (value) => {
|
|
31
|
+
if (value === undefined || value === null)
|
|
32
|
+
return '""';
|
|
33
|
+
let str = String(value);
|
|
34
|
+
str = sanitizeUTF8(str);
|
|
35
|
+
return `"${str.replace(/"/g, '""')}"`;
|
|
36
|
+
};
|
|
37
|
+
const escapeCSVNumber = (value, defaultValue = -1) => {
|
|
38
|
+
if (value === undefined || value === null)
|
|
39
|
+
return String(defaultValue);
|
|
40
|
+
return String(value);
|
|
41
|
+
};
|
|
42
|
+
// ============================================================================
|
|
43
|
+
// CONTENT EXTRACTION (lazy — reads from disk on demand)
|
|
44
|
+
// ============================================================================
|
|
45
|
+
const isBinaryContent = (content) => {
|
|
46
|
+
if (!content || content.length === 0)
|
|
47
|
+
return false;
|
|
48
|
+
const sample = content.slice(0, 1000);
|
|
49
|
+
let nonPrintable = 0;
|
|
50
|
+
for (let i = 0; i < sample.length; i++) {
|
|
51
|
+
const code = sample.charCodeAt(i);
|
|
52
|
+
if ((code < 9) || (code > 13 && code < 32) || code === 127)
|
|
53
|
+
nonPrintable++;
|
|
54
|
+
}
|
|
55
|
+
return (nonPrintable / sample.length) > 0.1;
|
|
56
|
+
};
|
|
57
|
+
/**
|
|
58
|
+
* LRU content cache — avoids re-reading the same source file for every
|
|
59
|
+
* symbol defined in it. Sized generously so most files stay cached during
|
|
60
|
+
* the single-pass node iteration.
|
|
61
|
+
*/
|
|
62
|
+
class FileContentCache {
|
|
63
|
+
cache = new Map();
|
|
64
|
+
accessOrder = [];
|
|
65
|
+
maxSize;
|
|
66
|
+
repoPath;
|
|
67
|
+
constructor(repoPath, maxSize = 3000) {
|
|
68
|
+
this.repoPath = repoPath;
|
|
69
|
+
this.maxSize = maxSize;
|
|
70
|
+
}
|
|
71
|
+
async get(relativePath) {
|
|
72
|
+
if (!relativePath)
|
|
73
|
+
return '';
|
|
74
|
+
const cached = this.cache.get(relativePath);
|
|
75
|
+
if (cached !== undefined)
|
|
76
|
+
return cached;
|
|
77
|
+
try {
|
|
78
|
+
const fullPath = path.join(this.repoPath, relativePath);
|
|
79
|
+
const content = await fs.readFile(fullPath, 'utf-8');
|
|
80
|
+
this.set(relativePath, content);
|
|
81
|
+
return content;
|
|
82
|
+
}
|
|
83
|
+
catch {
|
|
84
|
+
this.set(relativePath, '');
|
|
85
|
+
return '';
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
set(key, value) {
|
|
89
|
+
if (this.cache.size >= this.maxSize) {
|
|
90
|
+
const oldest = this.accessOrder.shift();
|
|
91
|
+
if (oldest)
|
|
92
|
+
this.cache.delete(oldest);
|
|
93
|
+
}
|
|
94
|
+
this.cache.set(key, value);
|
|
95
|
+
this.accessOrder.push(key);
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
const extractContent = async (node, contentCache) => {
|
|
99
|
+
const filePath = node.properties.filePath;
|
|
100
|
+
const content = await contentCache.get(filePath);
|
|
101
|
+
if (!content)
|
|
102
|
+
return '';
|
|
103
|
+
if (node.label === 'Folder')
|
|
104
|
+
return '';
|
|
105
|
+
if (isBinaryContent(content))
|
|
106
|
+
return '[Binary file - content not stored]';
|
|
107
|
+
if (node.label === 'File') {
|
|
108
|
+
const MAX_FILE_CONTENT = 10000;
|
|
109
|
+
return content.length > MAX_FILE_CONTENT
|
|
110
|
+
? content.slice(0, MAX_FILE_CONTENT) + '\n... [truncated]'
|
|
111
|
+
: content;
|
|
112
|
+
}
|
|
113
|
+
const startLine = node.properties.startLine;
|
|
114
|
+
const endLine = node.properties.endLine;
|
|
115
|
+
if (startLine === undefined || endLine === undefined)
|
|
116
|
+
return '';
|
|
117
|
+
const lines = content.split('\n');
|
|
118
|
+
const start = Math.max(0, startLine - 2);
|
|
119
|
+
const end = Math.min(lines.length - 1, endLine + 2);
|
|
120
|
+
const snippet = lines.slice(start, end + 1).join('\n');
|
|
121
|
+
const MAX_SNIPPET = 5000;
|
|
122
|
+
return snippet.length > MAX_SNIPPET
|
|
123
|
+
? snippet.slice(0, MAX_SNIPPET) + '\n... [truncated]'
|
|
124
|
+
: snippet;
|
|
125
|
+
};
|
|
126
|
+
// ============================================================================
|
|
127
|
+
// BUFFERED CSV WRITER
|
|
128
|
+
// ============================================================================
|
|
129
|
+
class BufferedCSVWriter {
|
|
130
|
+
ws;
|
|
131
|
+
buffer = [];
|
|
132
|
+
rows = 0;
|
|
133
|
+
constructor(filePath, header) {
|
|
134
|
+
this.ws = createWriteStream(filePath, 'utf-8');
|
|
135
|
+
// Large repos flush many times — raise listener cap to avoid MaxListenersExceededWarning
|
|
136
|
+
this.ws.setMaxListeners(50);
|
|
137
|
+
this.buffer.push(header);
|
|
138
|
+
}
|
|
139
|
+
addRow(row) {
|
|
140
|
+
this.buffer.push(row);
|
|
141
|
+
this.rows++;
|
|
142
|
+
if (this.buffer.length >= FLUSH_EVERY) {
|
|
143
|
+
return this.flush();
|
|
144
|
+
}
|
|
145
|
+
return Promise.resolve();
|
|
146
|
+
}
|
|
147
|
+
flush() {
|
|
148
|
+
if (this.buffer.length === 0)
|
|
149
|
+
return Promise.resolve();
|
|
150
|
+
const chunk = this.buffer.join('\n') + '\n';
|
|
151
|
+
this.buffer.length = 0;
|
|
152
|
+
return new Promise((resolve, reject) => {
|
|
153
|
+
const ok = this.ws.write(chunk);
|
|
154
|
+
if (ok)
|
|
155
|
+
resolve();
|
|
156
|
+
else
|
|
157
|
+
this.ws.once('drain', resolve);
|
|
158
|
+
});
|
|
159
|
+
}
|
|
160
|
+
async finish() {
|
|
161
|
+
await this.flush();
|
|
162
|
+
return new Promise((resolve, reject) => {
|
|
163
|
+
this.ws.end(() => resolve());
|
|
164
|
+
this.ws.on('error', reject);
|
|
165
|
+
});
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
/**
|
|
169
|
+
* Stream all CSV data directly to disk files.
|
|
170
|
+
* Iterates graph nodes exactly ONCE — routes each node to the right writer.
|
|
171
|
+
* File contents are lazy-read from disk with a generous LRU cache.
|
|
172
|
+
*/
|
|
173
|
+
export const streamAllCSVsToDisk = async (graph, repoPath, csvDir) => {
|
|
174
|
+
// Remove stale CSVs from previous crashed runs, then recreate
|
|
175
|
+
try {
|
|
176
|
+
await fs.rm(csvDir, { recursive: true, force: true });
|
|
177
|
+
}
|
|
178
|
+
catch { }
|
|
179
|
+
await fs.mkdir(csvDir, { recursive: true });
|
|
180
|
+
// We open ~30 concurrent write-streams; raise process limit to suppress
|
|
181
|
+
// MaxListenersExceededWarning (restored after all streams finish).
|
|
182
|
+
const prevMax = process.getMaxListeners();
|
|
183
|
+
process.setMaxListeners(prevMax + 40);
|
|
184
|
+
const contentCache = new FileContentCache(repoPath);
|
|
185
|
+
// Create writers for every node type up-front
|
|
186
|
+
const fileWriter = new BufferedCSVWriter(path.join(csvDir, 'file.csv'), 'id,name,filePath,content');
|
|
187
|
+
const folderWriter = new BufferedCSVWriter(path.join(csvDir, 'folder.csv'), 'id,name,filePath');
|
|
188
|
+
const codeElementHeader = 'id,name,filePath,startLine,endLine,isExported,content,description';
|
|
189
|
+
const functionWriter = new BufferedCSVWriter(path.join(csvDir, 'function.csv'), codeElementHeader);
|
|
190
|
+
const classWriter = new BufferedCSVWriter(path.join(csvDir, 'class.csv'), codeElementHeader);
|
|
191
|
+
const interfaceWriter = new BufferedCSVWriter(path.join(csvDir, 'interface.csv'), codeElementHeader);
|
|
192
|
+
const methodWriter = new BufferedCSVWriter(path.join(csvDir, 'method.csv'), codeElementHeader);
|
|
193
|
+
const codeElemWriter = new BufferedCSVWriter(path.join(csvDir, 'codeelement.csv'), codeElementHeader);
|
|
194
|
+
const communityWriter = new BufferedCSVWriter(path.join(csvDir, 'community.csv'), 'id,label,heuristicLabel,keywords,description,enrichedBy,cohesion,symbolCount');
|
|
195
|
+
const processWriter = new BufferedCSVWriter(path.join(csvDir, 'process.csv'), 'id,label,heuristicLabel,processType,stepCount,communities,entryPointId,terminalId');
|
|
196
|
+
// Multi-language node types share the same CSV shape (no isExported column)
|
|
197
|
+
const multiLangHeader = 'id,name,filePath,startLine,endLine,content,description';
|
|
198
|
+
const MULTI_LANG_TYPES = ['Struct', 'Enum', 'Macro', 'Typedef', 'Union', 'Namespace', 'Trait', 'Impl',
|
|
199
|
+
'TypeAlias', 'Const', 'Static', 'Property', 'Record', 'Delegate', 'Annotation', 'Constructor', 'Template', 'Module'];
|
|
200
|
+
const multiLangWriters = new Map();
|
|
201
|
+
for (const t of MULTI_LANG_TYPES) {
|
|
202
|
+
multiLangWriters.set(t, new BufferedCSVWriter(path.join(csvDir, `${t.toLowerCase()}.csv`), multiLangHeader));
|
|
203
|
+
}
|
|
204
|
+
const codeWriterMap = {
|
|
205
|
+
'Function': functionWriter,
|
|
206
|
+
'Class': classWriter,
|
|
207
|
+
'Interface': interfaceWriter,
|
|
208
|
+
'Method': methodWriter,
|
|
209
|
+
'CodeElement': codeElemWriter,
|
|
210
|
+
};
|
|
211
|
+
const seenFileIds = new Set();
|
|
212
|
+
// --- SINGLE PASS over all nodes ---
|
|
213
|
+
for (const node of graph.iterNodes()) {
|
|
214
|
+
switch (node.label) {
|
|
215
|
+
case 'File': {
|
|
216
|
+
if (seenFileIds.has(node.id))
|
|
217
|
+
break;
|
|
218
|
+
seenFileIds.add(node.id);
|
|
219
|
+
const content = await extractContent(node, contentCache);
|
|
220
|
+
await fileWriter.addRow([
|
|
221
|
+
escapeCSVField(node.id),
|
|
222
|
+
escapeCSVField(node.properties.name || ''),
|
|
223
|
+
escapeCSVField(node.properties.filePath || ''),
|
|
224
|
+
escapeCSVField(content),
|
|
225
|
+
].join(','));
|
|
226
|
+
break;
|
|
227
|
+
}
|
|
228
|
+
case 'Folder':
|
|
229
|
+
await folderWriter.addRow([
|
|
230
|
+
escapeCSVField(node.id),
|
|
231
|
+
escapeCSVField(node.properties.name || ''),
|
|
232
|
+
escapeCSVField(node.properties.filePath || ''),
|
|
233
|
+
].join(','));
|
|
234
|
+
break;
|
|
235
|
+
case 'Community': {
|
|
236
|
+
const keywords = node.properties.keywords || [];
|
|
237
|
+
const keywordsStr = `[${keywords.map((k) => `'${k.replace(/'/g, "''")}'`).join(',')}]`;
|
|
238
|
+
await communityWriter.addRow([
|
|
239
|
+
escapeCSVField(node.id),
|
|
240
|
+
escapeCSVField(node.properties.name || ''),
|
|
241
|
+
escapeCSVField(node.properties.heuristicLabel || ''),
|
|
242
|
+
keywordsStr,
|
|
243
|
+
escapeCSVField(node.properties.description || ''),
|
|
244
|
+
escapeCSVField(node.properties.enrichedBy || 'heuristic'),
|
|
245
|
+
escapeCSVNumber(node.properties.cohesion, 0),
|
|
246
|
+
escapeCSVNumber(node.properties.symbolCount, 0),
|
|
247
|
+
].join(','));
|
|
248
|
+
break;
|
|
249
|
+
}
|
|
250
|
+
case 'Process': {
|
|
251
|
+
const communities = node.properties.communities || [];
|
|
252
|
+
const communitiesStr = `[${communities.map((c) => `'${c.replace(/'/g, "''")}'`).join(',')}]`;
|
|
253
|
+
await processWriter.addRow([
|
|
254
|
+
escapeCSVField(node.id),
|
|
255
|
+
escapeCSVField(node.properties.name || ''),
|
|
256
|
+
escapeCSVField(node.properties.heuristicLabel || ''),
|
|
257
|
+
escapeCSVField(node.properties.processType || ''),
|
|
258
|
+
escapeCSVNumber(node.properties.stepCount, 0),
|
|
259
|
+
escapeCSVField(communitiesStr),
|
|
260
|
+
escapeCSVField(node.properties.entryPointId || ''),
|
|
261
|
+
escapeCSVField(node.properties.terminalId || ''),
|
|
262
|
+
].join(','));
|
|
263
|
+
break;
|
|
264
|
+
}
|
|
265
|
+
default: {
|
|
266
|
+
// Code element nodes (Function, Class, Interface, Method, CodeElement)
|
|
267
|
+
const writer = codeWriterMap[node.label];
|
|
268
|
+
if (writer) {
|
|
269
|
+
const content = await extractContent(node, contentCache);
|
|
270
|
+
await writer.addRow([
|
|
271
|
+
escapeCSVField(node.id),
|
|
272
|
+
escapeCSVField(node.properties.name || ''),
|
|
273
|
+
escapeCSVField(node.properties.filePath || ''),
|
|
274
|
+
escapeCSVNumber(node.properties.startLine, -1),
|
|
275
|
+
escapeCSVNumber(node.properties.endLine, -1),
|
|
276
|
+
node.properties.isExported ? 'true' : 'false',
|
|
277
|
+
escapeCSVField(content),
|
|
278
|
+
escapeCSVField(node.properties.description || ''),
|
|
279
|
+
].join(','));
|
|
280
|
+
}
|
|
281
|
+
else {
|
|
282
|
+
// Multi-language node types (Struct, Impl, Trait, Macro, etc.)
|
|
283
|
+
const mlWriter = multiLangWriters.get(node.label);
|
|
284
|
+
if (mlWriter) {
|
|
285
|
+
const content = await extractContent(node, contentCache);
|
|
286
|
+
await mlWriter.addRow([
|
|
287
|
+
escapeCSVField(node.id),
|
|
288
|
+
escapeCSVField(node.properties.name || ''),
|
|
289
|
+
escapeCSVField(node.properties.filePath || ''),
|
|
290
|
+
escapeCSVNumber(node.properties.startLine, -1),
|
|
291
|
+
escapeCSVNumber(node.properties.endLine, -1),
|
|
292
|
+
escapeCSVField(content),
|
|
293
|
+
escapeCSVField(node.properties.description || ''),
|
|
294
|
+
].join(','));
|
|
295
|
+
}
|
|
296
|
+
}
|
|
297
|
+
break;
|
|
298
|
+
}
|
|
299
|
+
}
|
|
300
|
+
}
|
|
301
|
+
// Finish all node writers
|
|
302
|
+
const allWriters = [fileWriter, folderWriter, functionWriter, classWriter, interfaceWriter, methodWriter, codeElemWriter, communityWriter, processWriter, ...multiLangWriters.values()];
|
|
303
|
+
await Promise.all(allWriters.map(w => w.finish()));
|
|
304
|
+
// --- Stream relationship CSV ---
|
|
305
|
+
const relCsvPath = path.join(csvDir, 'relations.csv');
|
|
306
|
+
const relWriter = new BufferedCSVWriter(relCsvPath, 'from,to,type,confidence,reason,step');
|
|
307
|
+
for (const rel of graph.iterRelationships()) {
|
|
308
|
+
await relWriter.addRow([
|
|
309
|
+
escapeCSVField(rel.sourceId),
|
|
310
|
+
escapeCSVField(rel.targetId),
|
|
311
|
+
escapeCSVField(rel.type),
|
|
312
|
+
escapeCSVNumber(rel.confidence, 1.0),
|
|
313
|
+
escapeCSVField(rel.reason),
|
|
314
|
+
escapeCSVNumber(rel.step, 0),
|
|
315
|
+
].join(','));
|
|
316
|
+
}
|
|
317
|
+
await relWriter.finish();
|
|
318
|
+
// Build result map — only include tables that have rows
|
|
319
|
+
const nodeFiles = new Map();
|
|
320
|
+
const tableMap = [
|
|
321
|
+
['File', fileWriter], ['Folder', folderWriter],
|
|
322
|
+
['Function', functionWriter], ['Class', classWriter],
|
|
323
|
+
['Interface', interfaceWriter], ['Method', methodWriter],
|
|
324
|
+
['CodeElement', codeElemWriter],
|
|
325
|
+
['Community', communityWriter], ['Process', processWriter],
|
|
326
|
+
...Array.from(multiLangWriters.entries()).map(([name, w]) => [name, w]),
|
|
327
|
+
];
|
|
328
|
+
for (const [name, writer] of tableMap) {
|
|
329
|
+
if (writer.rows > 0) {
|
|
330
|
+
nodeFiles.set(name, { csvPath: path.join(csvDir, `${name.toLowerCase()}.csv`), rows: writer.rows });
|
|
331
|
+
}
|
|
332
|
+
}
|
|
333
|
+
// Restore original process listener limit
|
|
334
|
+
process.setMaxListeners(prevMax);
|
|
335
|
+
return { nodeFiles, relCsvPath, relRows: relWriter.rows };
|
|
336
|
+
};
|
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
import kuzu from 'kuzu';
|
|
2
|
+
import { KnowledgeGraph } from '../graph/types.js';
|
|
3
|
+
export declare const initKuzu: (dbPath: string) => Promise<{
|
|
4
|
+
db: kuzu.Database;
|
|
5
|
+
conn: kuzu.Connection;
|
|
6
|
+
}>;
|
|
7
|
+
/**
|
|
8
|
+
* Execute multiple queries against one repo DB atomically.
|
|
9
|
+
* While the callback runs, no other request can switch the active DB.
|
|
10
|
+
*/
|
|
11
|
+
export declare const withKuzuDb: <T>(dbPath: string, operation: () => Promise<T>) => Promise<T>;
|
|
12
|
+
export type KuzuProgressCallback = (message: string) => void;
|
|
13
|
+
export declare const loadGraphToKuzu: (graph: KnowledgeGraph, repoPath: string, storagePath: string, onProgress?: KuzuProgressCallback) => Promise<{
|
|
14
|
+
success: boolean;
|
|
15
|
+
insertedRels: number;
|
|
16
|
+
skippedRels: number;
|
|
17
|
+
warnings: string[];
|
|
18
|
+
}>;
|
|
19
|
+
/**
|
|
20
|
+
* Insert a single node to KuzuDB
|
|
21
|
+
* @param label - Node type (File, Function, Class, etc.)
|
|
22
|
+
* @param properties - Node properties
|
|
23
|
+
* @param dbPath - Path to KuzuDB database (optional if already initialized)
|
|
24
|
+
*/
|
|
25
|
+
export declare const insertNodeToKuzu: (label: string, properties: Record<string, any>, dbPath?: string) => Promise<boolean>;
|
|
26
|
+
/**
|
|
27
|
+
* Batch insert multiple nodes to KuzuDB using a single connection
|
|
28
|
+
* @param nodes - Array of {label, properties} to insert
|
|
29
|
+
* @param dbPath - Path to KuzuDB database
|
|
30
|
+
* @returns Object with success count and error count
|
|
31
|
+
*/
|
|
32
|
+
export declare const batchInsertNodesToKuzu: (nodes: Array<{
|
|
33
|
+
label: string;
|
|
34
|
+
properties: Record<string, any>;
|
|
35
|
+
}>, dbPath: string) => Promise<{
|
|
36
|
+
inserted: number;
|
|
37
|
+
failed: number;
|
|
38
|
+
}>;
|
|
39
|
+
export declare const executeQuery: (cypher: string) => Promise<any[]>;
|
|
40
|
+
export declare const executeWithReusedStatement: (cypher: string, paramsList: Array<Record<string, any>>) => Promise<void>;
|
|
41
|
+
export declare const getKuzuStats: () => Promise<{
|
|
42
|
+
nodes: number;
|
|
43
|
+
edges: number;
|
|
44
|
+
}>;
|
|
45
|
+
/**
|
|
46
|
+
* Load cached embeddings from KuzuDB before a rebuild.
|
|
47
|
+
* Returns all embedding vectors so they can be re-inserted after the graph is reloaded,
|
|
48
|
+
* avoiding expensive re-embedding of unchanged nodes.
|
|
49
|
+
*/
|
|
50
|
+
export declare const loadCachedEmbeddings: () => Promise<{
|
|
51
|
+
embeddingNodeIds: Set<string>;
|
|
52
|
+
embeddings: Array<{
|
|
53
|
+
nodeId: string;
|
|
54
|
+
embedding: number[];
|
|
55
|
+
}>;
|
|
56
|
+
}>;
|
|
57
|
+
export declare const closeKuzu: () => Promise<void>;
|
|
58
|
+
export declare const isKuzuReady: () => boolean;
|
|
59
|
+
/**
|
|
60
|
+
* Delete all nodes (and their relationships) for a specific file from KuzuDB
|
|
61
|
+
* @param filePath - The file path to delete nodes for
|
|
62
|
+
* @param dbPath - Optional path to KuzuDB for per-query connection
|
|
63
|
+
* @returns Object with counts of deleted nodes
|
|
64
|
+
*/
|
|
65
|
+
export declare const deleteNodesForFile: (filePath: string, dbPath?: string) => Promise<{
|
|
66
|
+
deletedNodes: number;
|
|
67
|
+
}>;
|
|
68
|
+
export declare const getEmbeddingTableName: () => string;
|
|
69
|
+
/**
|
|
70
|
+
* Load the FTS extension (required before using FTS functions).
|
|
71
|
+
* Safe to call multiple times — tracks loaded state via module-level ftsLoaded.
|
|
72
|
+
*/
|
|
73
|
+
export declare const loadFTSExtension: () => Promise<void>;
|
|
74
|
+
/**
|
|
75
|
+
* Create a full-text search index on a table
|
|
76
|
+
* @param tableName - The node table name (e.g., 'File', 'CodeSymbol')
|
|
77
|
+
* @param indexName - Name for the FTS index
|
|
78
|
+
* @param properties - List of properties to index (e.g., ['name', 'code'])
|
|
79
|
+
* @param stemmer - Stemming algorithm (default: 'porter')
|
|
80
|
+
*/
|
|
81
|
+
export declare const createFTSIndex: (tableName: string, indexName: string, properties: string[], stemmer?: string) => Promise<void>;
|
|
82
|
+
/**
|
|
83
|
+
* Query a full-text search index
|
|
84
|
+
* @param tableName - The node table name
|
|
85
|
+
* @param indexName - FTS index name
|
|
86
|
+
* @param query - Search query string
|
|
87
|
+
* @param limit - Maximum results
|
|
88
|
+
* @param conjunctive - If true, all terms must match (AND); if false, any term matches (OR)
|
|
89
|
+
* @returns Array of { node properties, score }
|
|
90
|
+
*/
|
|
91
|
+
export declare const queryFTS: (tableName: string, indexName: string, query: string, limit?: number, conjunctive?: boolean) => Promise<Array<{
|
|
92
|
+
nodeId: string;
|
|
93
|
+
name: string;
|
|
94
|
+
filePath: string;
|
|
95
|
+
score: number;
|
|
96
|
+
[key: string]: any;
|
|
97
|
+
}>>;
|
|
98
|
+
/**
|
|
99
|
+
* Drop an FTS index
|
|
100
|
+
*/
|
|
101
|
+
export declare const dropFTSIndex: (tableName: string, indexName: string) => Promise<void>;
|