@renseiai/agentfactory-code-intelligence 0.8.8 → 0.8.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/src/embedding/__tests__/embedding.test.d.ts +2 -0
- package/dist/src/embedding/__tests__/embedding.test.d.ts.map +1 -0
- package/dist/src/embedding/__tests__/embedding.test.js +339 -0
- package/dist/src/embedding/chunker.d.ts +40 -0
- package/dist/src/embedding/chunker.d.ts.map +1 -0
- package/dist/src/embedding/chunker.js +135 -0
- package/dist/src/embedding/embedding-provider.d.ts +15 -0
- package/dist/src/embedding/embedding-provider.d.ts.map +1 -0
- package/dist/src/embedding/embedding-provider.js +1 -0
- package/dist/src/embedding/voyage-provider.d.ts +39 -0
- package/dist/src/embedding/voyage-provider.d.ts.map +1 -0
- package/dist/src/embedding/voyage-provider.js +146 -0
- package/dist/src/index.d.ts +14 -2
- package/dist/src/index.d.ts.map +1 -1
- package/dist/src/index.js +10 -1
- package/dist/src/indexing/__tests__/vector-indexing.test.d.ts +2 -0
- package/dist/src/indexing/__tests__/vector-indexing.test.d.ts.map +1 -0
- package/dist/src/indexing/__tests__/vector-indexing.test.js +291 -0
- package/dist/src/indexing/incremental-indexer.d.ts +4 -0
- package/dist/src/indexing/incremental-indexer.d.ts.map +1 -1
- package/dist/src/indexing/incremental-indexer.js +45 -0
- package/dist/src/indexing/vector-indexer.d.ts +63 -0
- package/dist/src/indexing/vector-indexer.d.ts.map +1 -0
- package/dist/src/indexing/vector-indexer.js +197 -0
- package/dist/src/plugin/code-intelligence-plugin.d.ts.map +1 -1
- package/dist/src/plugin/code-intelligence-plugin.js +4 -2
- package/dist/src/reranking/__tests__/reranker.test.d.ts +2 -0
- package/dist/src/reranking/__tests__/reranker.test.d.ts.map +1 -0
- package/dist/src/reranking/__tests__/reranker.test.js +503 -0
- package/dist/src/reranking/cohere-reranker.d.ts +26 -0
- package/dist/src/reranking/cohere-reranker.d.ts.map +1 -0
- package/dist/src/reranking/cohere-reranker.js +110 -0
- package/dist/src/reranking/reranker-provider.d.ts +40 -0
- package/dist/src/reranking/reranker-provider.d.ts.map +1 -0
- package/dist/src/reranking/reranker-provider.js +6 -0
- package/dist/src/reranking/voyage-reranker.d.ts +27 -0
- package/dist/src/reranking/voyage-reranker.d.ts.map +1 -0
- package/dist/src/reranking/voyage-reranker.js +111 -0
- package/dist/src/search/__tests__/hybrid-search.test.d.ts +2 -0
- package/dist/src/search/__tests__/hybrid-search.test.d.ts.map +1 -0
- package/dist/src/search/__tests__/hybrid-search.test.js +437 -0
- package/dist/src/search/__tests__/query-classifier.test.d.ts +2 -0
- package/dist/src/search/__tests__/query-classifier.test.d.ts.map +1 -0
- package/dist/src/search/__tests__/query-classifier.test.js +136 -0
- package/dist/src/search/hybrid-search.d.ts +56 -0
- package/dist/src/search/hybrid-search.d.ts.map +1 -0
- package/dist/src/search/hybrid-search.js +299 -0
- package/dist/src/search/query-classifier.d.ts +20 -0
- package/dist/src/search/query-classifier.d.ts.map +1 -0
- package/dist/src/search/query-classifier.js +58 -0
- package/dist/src/search/score-normalizer.d.ts +16 -0
- package/dist/src/search/score-normalizer.d.ts.map +1 -0
- package/dist/src/search/score-normalizer.js +26 -0
- package/dist/src/types.d.ts +83 -0
- package/dist/src/types.d.ts.map +1 -1
- package/dist/src/types.js +36 -2
- package/dist/src/vector/__tests__/vector-store.test.d.ts +2 -0
- package/dist/src/vector/__tests__/vector-store.test.d.ts.map +1 -0
- package/dist/src/vector/__tests__/vector-store.test.js +278 -0
- package/dist/src/vector/hnsw-store.d.ts +48 -0
- package/dist/src/vector/hnsw-store.d.ts.map +1 -0
- package/dist/src/vector/hnsw-store.js +437 -0
- package/dist/src/vector/vector-store.d.ts +15 -0
- package/dist/src/vector/vector-store.d.ts.map +1 -0
- package/dist/src/vector/vector-store.js +1 -0
- package/package.json +1 -1
|
@@ -0,0 +1,437 @@
|
|
|
1
|
+
import { mkdir, readFile, writeFile } from 'node:fs/promises';
|
|
2
|
+
import { join } from 'node:path';
|
|
3
|
+
// ── Cosine Similarity ───────────────────────────────────────────────
|
|
4
|
+
function cosineSimilarity(a, b) {
|
|
5
|
+
let dot = 0;
|
|
6
|
+
let normA = 0;
|
|
7
|
+
let normB = 0;
|
|
8
|
+
for (let i = 0; i < a.length; i++) {
|
|
9
|
+
dot += a[i] * b[i];
|
|
10
|
+
normA += a[i] * a[i];
|
|
11
|
+
normB += b[i] * b[i];
|
|
12
|
+
}
|
|
13
|
+
const denom = Math.sqrt(normA) * Math.sqrt(normB);
|
|
14
|
+
if (denom === 0)
|
|
15
|
+
return 0;
|
|
16
|
+
return dot / denom;
|
|
17
|
+
}
|
|
18
|
+
// ── HNSW Threshold ──────────────────────────────────────────────────
|
|
19
|
+
/** Minimum chunk count to trigger HNSW graph construction. Below this, brute-force is used. */
|
|
20
|
+
const DEFAULT_HNSW_THRESHOLD = 1000;
|
|
21
|
+
// ── InMemoryVectorStore ─────────────────────────────────────────────
|
|
22
|
+
export class InMemoryVectorStore {
|
|
23
|
+
M;
|
|
24
|
+
efConstruction;
|
|
25
|
+
efSearch;
|
|
26
|
+
modelConfigHash;
|
|
27
|
+
hnswThreshold;
|
|
28
|
+
// Flat storage — always maintained
|
|
29
|
+
chunks = new Map();
|
|
30
|
+
vectors = new Map();
|
|
31
|
+
// HNSW graph — built when size >= threshold
|
|
32
|
+
nodes = new Map();
|
|
33
|
+
entryPoint = null;
|
|
34
|
+
maxLevel = 0;
|
|
35
|
+
hnswBuilt = false;
|
|
36
|
+
constructor(options = {}) {
|
|
37
|
+
this.M = options.M ?? 16;
|
|
38
|
+
this.efConstruction = options.efConstruction ?? 200;
|
|
39
|
+
this.efSearch = options.efSearch ?? 100;
|
|
40
|
+
this.modelConfigHash = options.modelConfigHash ?? '';
|
|
41
|
+
this.hnswThreshold = DEFAULT_HNSW_THRESHOLD;
|
|
42
|
+
}
|
|
43
|
+
/**
|
|
44
|
+
* Override the HNSW promotion threshold (useful for testing).
|
|
45
|
+
*/
|
|
46
|
+
setHNSWThreshold(n) {
|
|
47
|
+
this.hnswThreshold = n;
|
|
48
|
+
}
|
|
49
|
+
// ── VectorStore interface ───────────────────────────────────────
|
|
50
|
+
async insert(chunks) {
|
|
51
|
+
for (const chunk of chunks) {
|
|
52
|
+
if (!chunk.embedding || chunk.embedding.length === 0)
|
|
53
|
+
continue;
|
|
54
|
+
this.chunks.set(chunk.id, chunk);
|
|
55
|
+
this.vectors.set(chunk.id, chunk.embedding);
|
|
56
|
+
}
|
|
57
|
+
// Check if we should build / update the HNSW graph
|
|
58
|
+
if (this.chunks.size >= this.hnswThreshold && !this.hnswBuilt) {
|
|
59
|
+
this.buildHNSW();
|
|
60
|
+
}
|
|
61
|
+
else if (this.hnswBuilt) {
|
|
62
|
+
// Incrementally add new chunks to existing graph
|
|
63
|
+
for (const chunk of chunks) {
|
|
64
|
+
if (!chunk.embedding || chunk.embedding.length === 0)
|
|
65
|
+
continue;
|
|
66
|
+
if (!this.nodes.has(chunk.id)) {
|
|
67
|
+
this.hnswInsert(chunk.id, chunk, chunk.embedding);
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
async search(query, topK) {
|
|
73
|
+
if (this.chunks.size === 0)
|
|
74
|
+
return [];
|
|
75
|
+
if (this.hnswBuilt) {
|
|
76
|
+
return this.hnswSearch(query, topK);
|
|
77
|
+
}
|
|
78
|
+
return this.bruteForceSearch(query, topK);
|
|
79
|
+
}
|
|
80
|
+
async delete(ids) {
|
|
81
|
+
for (const id of ids) {
|
|
82
|
+
this.chunks.delete(id);
|
|
83
|
+
this.vectors.delete(id);
|
|
84
|
+
if (this.hnswBuilt) {
|
|
85
|
+
this.hnswRemove(id);
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
size() {
|
|
90
|
+
return this.chunks.size;
|
|
91
|
+
}
|
|
92
|
+
async save(dirPath) {
|
|
93
|
+
await mkdir(dirPath, { recursive: true });
|
|
94
|
+
const meta = {
|
|
95
|
+
model: 'in-memory',
|
|
96
|
+
dimensions: this.getFirstDimensions(),
|
|
97
|
+
chunkCount: this.chunks.size,
|
|
98
|
+
createdAt: Date.now(),
|
|
99
|
+
modelConfigHash: this.modelConfigHash,
|
|
100
|
+
};
|
|
101
|
+
const index = this.serializeIndex();
|
|
102
|
+
await writeFile(join(dirPath, 'meta.json'), JSON.stringify(meta, null, 2));
|
|
103
|
+
await writeFile(join(dirPath, 'index.json'), JSON.stringify(index));
|
|
104
|
+
}
|
|
105
|
+
async load(dirPath) {
|
|
106
|
+
const metaRaw = await readFile(join(dirPath, 'meta.json'), 'utf-8');
|
|
107
|
+
const meta = JSON.parse(metaRaw);
|
|
108
|
+
// Validate model config hash
|
|
109
|
+
if (this.modelConfigHash && meta.modelConfigHash && meta.modelConfigHash !== this.modelConfigHash) {
|
|
110
|
+
throw new Error(`Index model config hash mismatch: stored="${meta.modelConfigHash}" current="${this.modelConfigHash}"`);
|
|
111
|
+
}
|
|
112
|
+
const indexRaw = await readFile(join(dirPath, 'index.json'), 'utf-8');
|
|
113
|
+
const index = JSON.parse(indexRaw);
|
|
114
|
+
this.deserializeIndex(index);
|
|
115
|
+
}
|
|
116
|
+
async clear() {
|
|
117
|
+
this.chunks.clear();
|
|
118
|
+
this.vectors.clear();
|
|
119
|
+
this.nodes.clear();
|
|
120
|
+
this.entryPoint = null;
|
|
121
|
+
this.maxLevel = 0;
|
|
122
|
+
this.hnswBuilt = false;
|
|
123
|
+
}
|
|
124
|
+
// ── Query whether the HNSW graph is active (for testing) ──────
|
|
125
|
+
get isHNSWActive() {
|
|
126
|
+
return this.hnswBuilt;
|
|
127
|
+
}
|
|
128
|
+
// ── Brute-force search ────────────────────────────────────────
|
|
129
|
+
bruteForceSearch(query, topK) {
|
|
130
|
+
const results = [];
|
|
131
|
+
for (const [id, vector] of this.vectors) {
|
|
132
|
+
const score = cosineSimilarity(query, vector);
|
|
133
|
+
const chunk = this.chunks.get(id);
|
|
134
|
+
results.push({ chunk, score });
|
|
135
|
+
}
|
|
136
|
+
results.sort((a, b) => b.score - a.score);
|
|
137
|
+
return results.slice(0, topK);
|
|
138
|
+
}
|
|
139
|
+
// ── HNSW: Build from scratch ──────────────────────────────────
|
|
140
|
+
buildHNSW() {
|
|
141
|
+
this.nodes.clear();
|
|
142
|
+
this.entryPoint = null;
|
|
143
|
+
this.maxLevel = 0;
|
|
144
|
+
for (const [id, vector] of this.vectors) {
|
|
145
|
+
const chunk = this.chunks.get(id);
|
|
146
|
+
this.hnswInsert(id, chunk, vector);
|
|
147
|
+
}
|
|
148
|
+
this.hnswBuilt = true;
|
|
149
|
+
}
|
|
150
|
+
// ── HNSW: Random level assignment ─────────────────────────────
|
|
151
|
+
randomLevel() {
|
|
152
|
+
// Level = floor(-ln(uniform) * mL), where mL = 1/ln(M)
|
|
153
|
+
const mL = 1 / Math.log(this.M);
|
|
154
|
+
return Math.floor(-Math.log(Math.random()) * mL);
|
|
155
|
+
}
|
|
156
|
+
// ── HNSW: Insert a single element ────────────────────────────
|
|
157
|
+
hnswInsert(id, chunk, vector) {
|
|
158
|
+
const level = this.randomLevel();
|
|
159
|
+
const node = {
|
|
160
|
+
id,
|
|
161
|
+
chunk,
|
|
162
|
+
vector,
|
|
163
|
+
level,
|
|
164
|
+
neighbors: [],
|
|
165
|
+
};
|
|
166
|
+
for (let l = 0; l <= level; l++) {
|
|
167
|
+
node.neighbors.push(new Map());
|
|
168
|
+
}
|
|
169
|
+
this.nodes.set(id, node);
|
|
170
|
+
if (!this.entryPoint) {
|
|
171
|
+
this.entryPoint = node;
|
|
172
|
+
this.maxLevel = level;
|
|
173
|
+
return;
|
|
174
|
+
}
|
|
175
|
+
let currentNode = this.entryPoint;
|
|
176
|
+
// Traverse from top to node's level+1 — greedy single-hop
|
|
177
|
+
for (let l = this.maxLevel; l > level; l--) {
|
|
178
|
+
currentNode = this.greedyClosest(currentNode, vector, l);
|
|
179
|
+
}
|
|
180
|
+
// For each layer from min(level, maxLevel) down to 0, do ef-search and connect
|
|
181
|
+
const topInsertLevel = Math.min(level, this.maxLevel);
|
|
182
|
+
for (let l = topInsertLevel; l >= 0; l--) {
|
|
183
|
+
const candidates = this.searchLayer(currentNode, vector, this.efConstruction, l);
|
|
184
|
+
const neighbors = this.selectNeighbors(candidates, this.M);
|
|
185
|
+
for (const [neighborId, score] of neighbors) {
|
|
186
|
+
const neighbor = this.nodes.get(neighborId);
|
|
187
|
+
// Connect bidirectionally
|
|
188
|
+
node.neighbors[l].set(neighborId, score);
|
|
189
|
+
if (l < neighbor.neighbors.length) {
|
|
190
|
+
neighbor.neighbors[l].set(id, score);
|
|
191
|
+
// Prune neighbor's connections if needed
|
|
192
|
+
if (neighbor.neighbors[l].size > this.M) {
|
|
193
|
+
this.pruneConnections(neighbor, l);
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
// Move currentNode to the closest found for next layer
|
|
198
|
+
if (candidates.length > 0) {
|
|
199
|
+
currentNode = this.nodes.get(candidates[0][0]);
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
// Update entry point if new node has higher level
|
|
203
|
+
if (level > this.maxLevel) {
|
|
204
|
+
this.entryPoint = node;
|
|
205
|
+
this.maxLevel = level;
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
// ── HNSW: Remove an element ───────────────────────────────────
|
|
209
|
+
hnswRemove(id) {
|
|
210
|
+
const node = this.nodes.get(id);
|
|
211
|
+
if (!node)
|
|
212
|
+
return;
|
|
213
|
+
// Remove all bidirectional edges
|
|
214
|
+
for (let l = 0; l < node.neighbors.length; l++) {
|
|
215
|
+
for (const neighborId of node.neighbors[l].keys()) {
|
|
216
|
+
const neighbor = this.nodes.get(neighborId);
|
|
217
|
+
if (neighbor && l < neighbor.neighbors.length) {
|
|
218
|
+
neighbor.neighbors[l].delete(id);
|
|
219
|
+
}
|
|
220
|
+
}
|
|
221
|
+
}
|
|
222
|
+
this.nodes.delete(id);
|
|
223
|
+
// If this was the entry point, pick a new one
|
|
224
|
+
if (this.entryPoint?.id === id) {
|
|
225
|
+
if (this.nodes.size === 0) {
|
|
226
|
+
this.entryPoint = null;
|
|
227
|
+
this.maxLevel = 0;
|
|
228
|
+
this.hnswBuilt = false;
|
|
229
|
+
}
|
|
230
|
+
else {
|
|
231
|
+
// Find node with highest level
|
|
232
|
+
let best = null;
|
|
233
|
+
for (const n of this.nodes.values()) {
|
|
234
|
+
if (!best || n.level > best.level)
|
|
235
|
+
best = n;
|
|
236
|
+
}
|
|
237
|
+
this.entryPoint = best;
|
|
238
|
+
this.maxLevel = best?.level ?? 0;
|
|
239
|
+
}
|
|
240
|
+
}
|
|
241
|
+
}
|
|
242
|
+
// ── HNSW: Search ──────────────────────────────────────────────
|
|
243
|
+
hnswSearch(query, topK) {
|
|
244
|
+
if (!this.entryPoint)
|
|
245
|
+
return [];
|
|
246
|
+
let currentNode = this.entryPoint;
|
|
247
|
+
// Greedy descent from top layer to layer 1
|
|
248
|
+
for (let l = this.maxLevel; l > 0; l--) {
|
|
249
|
+
currentNode = this.greedyClosest(currentNode, query, l);
|
|
250
|
+
}
|
|
251
|
+
// ef-search at layer 0
|
|
252
|
+
const candidates = this.searchLayer(currentNode, query, Math.max(this.efSearch, topK), 0);
|
|
253
|
+
const results = candidates
|
|
254
|
+
.slice(0, topK)
|
|
255
|
+
.map(([id, score]) => ({
|
|
256
|
+
chunk: this.chunks.get(id),
|
|
257
|
+
score,
|
|
258
|
+
}));
|
|
259
|
+
return results;
|
|
260
|
+
}
|
|
261
|
+
// ── HNSW: Greedy closest at a layer ───────────────────────────
|
|
262
|
+
greedyClosest(start, query, layer) {
|
|
263
|
+
let current = start;
|
|
264
|
+
let bestSim = cosineSimilarity(query, current.vector);
|
|
265
|
+
let improved = true;
|
|
266
|
+
while (improved) {
|
|
267
|
+
improved = false;
|
|
268
|
+
if (layer >= current.neighbors.length)
|
|
269
|
+
break;
|
|
270
|
+
for (const neighborId of current.neighbors[layer].keys()) {
|
|
271
|
+
const neighbor = this.nodes.get(neighborId);
|
|
272
|
+
if (!neighbor)
|
|
273
|
+
continue;
|
|
274
|
+
const sim = cosineSimilarity(query, neighbor.vector);
|
|
275
|
+
if (sim > bestSim) {
|
|
276
|
+
bestSim = sim;
|
|
277
|
+
current = neighbor;
|
|
278
|
+
improved = true;
|
|
279
|
+
}
|
|
280
|
+
}
|
|
281
|
+
}
|
|
282
|
+
return current;
|
|
283
|
+
}
|
|
284
|
+
// ── HNSW: ef-bounded search at a single layer ────────────────
|
|
285
|
+
searchLayer(entryNode, query, ef, layer) {
|
|
286
|
+
const visited = new Set();
|
|
287
|
+
const candidates = []; // [id, similarity]
|
|
288
|
+
const results = []; // [id, similarity]
|
|
289
|
+
const entrySim = cosineSimilarity(query, entryNode.vector);
|
|
290
|
+
candidates.push([entryNode.id, entrySim]);
|
|
291
|
+
results.push([entryNode.id, entrySim]);
|
|
292
|
+
visited.add(entryNode.id);
|
|
293
|
+
while (candidates.length > 0) {
|
|
294
|
+
// Pick the best candidate (highest similarity)
|
|
295
|
+
candidates.sort((a, b) => b[1] - a[1]);
|
|
296
|
+
const [currentId, currentSim] = candidates.shift();
|
|
297
|
+
// Get the worst result
|
|
298
|
+
results.sort((a, b) => b[1] - a[1]);
|
|
299
|
+
const worstResult = results[results.length - 1][1];
|
|
300
|
+
// If current candidate is worse than worst result and we have enough results, stop
|
|
301
|
+
if (currentSim < worstResult && results.length >= ef)
|
|
302
|
+
break;
|
|
303
|
+
const currentNode = this.nodes.get(currentId);
|
|
304
|
+
if (!currentNode || layer >= currentNode.neighbors.length)
|
|
305
|
+
continue;
|
|
306
|
+
for (const neighborId of currentNode.neighbors[layer].keys()) {
|
|
307
|
+
if (visited.has(neighborId))
|
|
308
|
+
continue;
|
|
309
|
+
visited.add(neighborId);
|
|
310
|
+
const neighbor = this.nodes.get(neighborId);
|
|
311
|
+
if (!neighbor)
|
|
312
|
+
continue;
|
|
313
|
+
const sim = cosineSimilarity(query, neighbor.vector);
|
|
314
|
+
results.sort((a, b) => b[1] - a[1]);
|
|
315
|
+
const currentWorst = results[results.length - 1][1];
|
|
316
|
+
if (results.length < ef || sim > currentWorst) {
|
|
317
|
+
candidates.push([neighborId, sim]);
|
|
318
|
+
results.push([neighborId, sim]);
|
|
319
|
+
if (results.length > ef) {
|
|
320
|
+
results.sort((a, b) => b[1] - a[1]);
|
|
321
|
+
results.pop();
|
|
322
|
+
}
|
|
323
|
+
}
|
|
324
|
+
}
|
|
325
|
+
}
|
|
326
|
+
results.sort((a, b) => b[1] - a[1]);
|
|
327
|
+
return results;
|
|
328
|
+
}
|
|
329
|
+
// ── HNSW: Select best neighbors ──────────────────────────────
|
|
330
|
+
selectNeighbors(candidates, maxNeighbors) {
|
|
331
|
+
// Simple: just take the top-M by similarity
|
|
332
|
+
const sorted = [...candidates].sort((a, b) => b[1] - a[1]);
|
|
333
|
+
return sorted.slice(0, maxNeighbors);
|
|
334
|
+
}
|
|
335
|
+
// ── HNSW: Prune overloaded connections ────────────────────────
|
|
336
|
+
pruneConnections(node, layer) {
|
|
337
|
+
const neighbors = Array.from(node.neighbors[layer].entries())
|
|
338
|
+
.map(([id]) => {
|
|
339
|
+
const n = this.nodes.get(id);
|
|
340
|
+
if (!n)
|
|
341
|
+
return null;
|
|
342
|
+
const sim = cosineSimilarity(node.vector, n.vector);
|
|
343
|
+
return [id, sim];
|
|
344
|
+
})
|
|
345
|
+
.filter((x) => x !== null);
|
|
346
|
+
neighbors.sort((a, b) => b[1] - a[1]);
|
|
347
|
+
const kept = neighbors.slice(0, this.M);
|
|
348
|
+
node.neighbors[layer].clear();
|
|
349
|
+
for (const [id, sim] of kept) {
|
|
350
|
+
node.neighbors[layer].set(id, sim);
|
|
351
|
+
}
|
|
352
|
+
}
|
|
353
|
+
// ── Serialization ─────────────────────────────────────────────
|
|
354
|
+
serializeIndex() {
|
|
355
|
+
const serializedNodes = [];
|
|
356
|
+
for (const node of this.nodes.values()) {
|
|
357
|
+
const neighbors = [];
|
|
358
|
+
for (let l = 0; l < node.neighbors.length; l++) {
|
|
359
|
+
neighbors.push(Array.from(node.neighbors[l].keys()));
|
|
360
|
+
}
|
|
361
|
+
serializedNodes.push({
|
|
362
|
+
id: node.id,
|
|
363
|
+
chunk: node.chunk,
|
|
364
|
+
vector: node.vector,
|
|
365
|
+
level: node.level,
|
|
366
|
+
neighbors,
|
|
367
|
+
});
|
|
368
|
+
}
|
|
369
|
+
// If HNSW is not built, serialize from flat storage
|
|
370
|
+
if (!this.hnswBuilt && this.chunks.size > 0) {
|
|
371
|
+
for (const [id, chunk] of this.chunks) {
|
|
372
|
+
const vector = this.vectors.get(id);
|
|
373
|
+
if (!vector)
|
|
374
|
+
continue;
|
|
375
|
+
serializedNodes.push({
|
|
376
|
+
id,
|
|
377
|
+
chunk,
|
|
378
|
+
vector,
|
|
379
|
+
level: 0,
|
|
380
|
+
neighbors: [[]],
|
|
381
|
+
});
|
|
382
|
+
}
|
|
383
|
+
}
|
|
384
|
+
return {
|
|
385
|
+
entryPointId: this.entryPoint?.id ?? null,
|
|
386
|
+
maxLevel: this.maxLevel,
|
|
387
|
+
nodes: serializedNodes,
|
|
388
|
+
};
|
|
389
|
+
}
|
|
390
|
+
deserializeIndex(index) {
|
|
391
|
+
this.chunks.clear();
|
|
392
|
+
this.vectors.clear();
|
|
393
|
+
this.nodes.clear();
|
|
394
|
+
this.entryPoint = null;
|
|
395
|
+
this.maxLevel = index.maxLevel;
|
|
396
|
+
// First pass: create all nodes without neighbors
|
|
397
|
+
for (const sn of index.nodes) {
|
|
398
|
+
this.chunks.set(sn.id, sn.chunk);
|
|
399
|
+
this.vectors.set(sn.id, sn.vector);
|
|
400
|
+
const node = {
|
|
401
|
+
id: sn.id,
|
|
402
|
+
chunk: sn.chunk,
|
|
403
|
+
vector: sn.vector,
|
|
404
|
+
level: sn.level,
|
|
405
|
+
neighbors: [],
|
|
406
|
+
};
|
|
407
|
+
for (let l = 0; l <= sn.level; l++) {
|
|
408
|
+
node.neighbors.push(new Map());
|
|
409
|
+
}
|
|
410
|
+
this.nodes.set(sn.id, node);
|
|
411
|
+
}
|
|
412
|
+
// Second pass: wire up neighbor connections
|
|
413
|
+
for (const sn of index.nodes) {
|
|
414
|
+
const node = this.nodes.get(sn.id);
|
|
415
|
+
for (let l = 0; l < sn.neighbors.length; l++) {
|
|
416
|
+
for (const neighborId of sn.neighbors[l]) {
|
|
417
|
+
if (this.nodes.has(neighborId)) {
|
|
418
|
+
node.neighbors[l].set(neighborId, 0);
|
|
419
|
+
}
|
|
420
|
+
}
|
|
421
|
+
}
|
|
422
|
+
}
|
|
423
|
+
// Set entry point
|
|
424
|
+
if (index.entryPointId && this.nodes.has(index.entryPointId)) {
|
|
425
|
+
this.entryPoint = this.nodes.get(index.entryPointId);
|
|
426
|
+
}
|
|
427
|
+
// Mark HNSW as built if we have enough nodes
|
|
428
|
+
this.hnswBuilt = this.nodes.size >= this.hnswThreshold;
|
|
429
|
+
}
|
|
430
|
+
// ── Helpers ───────────────────────────────────────────────────
|
|
431
|
+
getFirstDimensions() {
|
|
432
|
+
for (const vector of this.vectors.values()) {
|
|
433
|
+
return vector.length;
|
|
434
|
+
}
|
|
435
|
+
return 0;
|
|
436
|
+
}
|
|
437
|
+
}
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import type { EmbeddingChunk } from '../types.js';
|
|
2
|
+
export interface VectorSearchResult {
|
|
3
|
+
chunk: EmbeddingChunk;
|
|
4
|
+
score: number;
|
|
5
|
+
}
|
|
6
|
+
export interface VectorStore {
|
|
7
|
+
insert(chunks: EmbeddingChunk[]): Promise<void>;
|
|
8
|
+
search(query: number[], topK: number): Promise<VectorSearchResult[]>;
|
|
9
|
+
delete(ids: string[]): Promise<void>;
|
|
10
|
+
size(): number;
|
|
11
|
+
save(path: string): Promise<void>;
|
|
12
|
+
load(path: string): Promise<void>;
|
|
13
|
+
clear(): Promise<void>;
|
|
14
|
+
}
|
|
15
|
+
//# sourceMappingURL=vector-store.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"vector-store.d.ts","sourceRoot":"","sources":["../../../src/vector/vector-store.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,aAAa,CAAA;AAEjD,MAAM,WAAW,kBAAkB;IACjC,KAAK,EAAE,cAAc,CAAA;IACrB,KAAK,EAAE,MAAM,CAAA;CACd;AAED,MAAM,WAAW,WAAW;IAC1B,MAAM,CAAC,MAAM,EAAE,cAAc,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IAC/C,MAAM,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,kBAAkB,EAAE,CAAC,CAAA;IACpE,MAAM,CAAC,GAAG,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IACpC,IAAI,IAAI,MAAM,CAAA;IACd,IAAI,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IACjC,IAAI,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IACjC,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC,CAAA;CACvB"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@renseiai/agentfactory-code-intelligence",
|
|
3
|
-
"version": "0.8.
|
|
3
|
+
"version": "0.8.9",
|
|
4
4
|
"type": "module",
|
|
5
5
|
"description": "Code intelligence for AgentFactory — tree-sitter AST parsing, BM25 search, incremental indexing, memory deduplication",
|
|
6
6
|
"author": "Rensei AI (https://rensei.ai)",
|