@renseiai/agentfactory-code-intelligence 0.8.8 → 0.8.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/src/embedding/__tests__/embedding.test.d.ts +2 -0
- package/dist/src/embedding/__tests__/embedding.test.d.ts.map +1 -0
- package/dist/src/embedding/__tests__/embedding.test.js +339 -0
- package/dist/src/embedding/chunker.d.ts +40 -0
- package/dist/src/embedding/chunker.d.ts.map +1 -0
- package/dist/src/embedding/chunker.js +135 -0
- package/dist/src/embedding/embedding-provider.d.ts +15 -0
- package/dist/src/embedding/embedding-provider.d.ts.map +1 -0
- package/dist/src/embedding/embedding-provider.js +1 -0
- package/dist/src/embedding/voyage-provider.d.ts +39 -0
- package/dist/src/embedding/voyage-provider.d.ts.map +1 -0
- package/dist/src/embedding/voyage-provider.js +146 -0
- package/dist/src/index.d.ts +14 -2
- package/dist/src/index.d.ts.map +1 -1
- package/dist/src/index.js +10 -1
- package/dist/src/indexing/__tests__/vector-indexing.test.d.ts +2 -0
- package/dist/src/indexing/__tests__/vector-indexing.test.d.ts.map +1 -0
- package/dist/src/indexing/__tests__/vector-indexing.test.js +291 -0
- package/dist/src/indexing/incremental-indexer.d.ts +4 -0
- package/dist/src/indexing/incremental-indexer.d.ts.map +1 -1
- package/dist/src/indexing/incremental-indexer.js +45 -0
- package/dist/src/indexing/vector-indexer.d.ts +63 -0
- package/dist/src/indexing/vector-indexer.d.ts.map +1 -0
- package/dist/src/indexing/vector-indexer.js +197 -0
- package/dist/src/plugin/code-intelligence-plugin.d.ts.map +1 -1
- package/dist/src/plugin/code-intelligence-plugin.js +4 -2
- package/dist/src/reranking/__tests__/reranker.test.d.ts +2 -0
- package/dist/src/reranking/__tests__/reranker.test.d.ts.map +1 -0
- package/dist/src/reranking/__tests__/reranker.test.js +503 -0
- package/dist/src/reranking/cohere-reranker.d.ts +26 -0
- package/dist/src/reranking/cohere-reranker.d.ts.map +1 -0
- package/dist/src/reranking/cohere-reranker.js +110 -0
- package/dist/src/reranking/reranker-provider.d.ts +40 -0
- package/dist/src/reranking/reranker-provider.d.ts.map +1 -0
- package/dist/src/reranking/reranker-provider.js +6 -0
- package/dist/src/reranking/voyage-reranker.d.ts +27 -0
- package/dist/src/reranking/voyage-reranker.d.ts.map +1 -0
- package/dist/src/reranking/voyage-reranker.js +111 -0
- package/dist/src/search/__tests__/hybrid-search.test.d.ts +2 -0
- package/dist/src/search/__tests__/hybrid-search.test.d.ts.map +1 -0
- package/dist/src/search/__tests__/hybrid-search.test.js +437 -0
- package/dist/src/search/__tests__/query-classifier.test.d.ts +2 -0
- package/dist/src/search/__tests__/query-classifier.test.d.ts.map +1 -0
- package/dist/src/search/__tests__/query-classifier.test.js +136 -0
- package/dist/src/search/hybrid-search.d.ts +56 -0
- package/dist/src/search/hybrid-search.d.ts.map +1 -0
- package/dist/src/search/hybrid-search.js +299 -0
- package/dist/src/search/query-classifier.d.ts +20 -0
- package/dist/src/search/query-classifier.d.ts.map +1 -0
- package/dist/src/search/query-classifier.js +58 -0
- package/dist/src/search/score-normalizer.d.ts +16 -0
- package/dist/src/search/score-normalizer.d.ts.map +1 -0
- package/dist/src/search/score-normalizer.js +26 -0
- package/dist/src/types.d.ts +83 -0
- package/dist/src/types.d.ts.map +1 -1
- package/dist/src/types.js +36 -2
- package/dist/src/vector/__tests__/vector-store.test.d.ts +2 -0
- package/dist/src/vector/__tests__/vector-store.test.d.ts.map +1 -0
- package/dist/src/vector/__tests__/vector-store.test.js +278 -0
- package/dist/src/vector/hnsw-store.d.ts +48 -0
- package/dist/src/vector/hnsw-store.d.ts.map +1 -0
- package/dist/src/vector/hnsw-store.js +437 -0
- package/dist/src/vector/vector-store.d.ts +15 -0
- package/dist/src/vector/vector-store.d.ts.map +1 -0
- package/dist/src/vector/vector-store.js +1 -0
- package/package.json +1 -1
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
import type { EmbeddingProvider } from '../embedding/embedding-provider.js';
|
|
2
|
+
import type { VectorStore } from '../vector/vector-store.js';
|
|
3
|
+
import type { FileAST } from '../types.js';
|
|
4
|
+
export interface VectorIndexConfig {
|
|
5
|
+
enabled: boolean;
|
|
6
|
+
embeddingProvider: EmbeddingProvider;
|
|
7
|
+
vectorStore: VectorStore;
|
|
8
|
+
batchSize?: number;
|
|
9
|
+
maxConcurrentBatches?: number;
|
|
10
|
+
}
|
|
11
|
+
export interface VectorIndexProgress {
|
|
12
|
+
phase: 'chunking' | 'embedding' | 'indexing';
|
|
13
|
+
processed: number;
|
|
14
|
+
total: number;
|
|
15
|
+
}
|
|
16
|
+
/**
|
|
17
|
+
* Orchestrates the chunk -> embed -> store flow for dense vector indexing.
|
|
18
|
+
*
|
|
19
|
+
* Integrates with the EmbeddingProvider and VectorStore interfaces to
|
|
20
|
+
* compute embeddings for code chunks and maintain a searchable vector index.
|
|
21
|
+
*/
|
|
22
|
+
export declare class VectorIndexer {
|
|
23
|
+
private readonly enabled;
|
|
24
|
+
private readonly provider;
|
|
25
|
+
private readonly store;
|
|
26
|
+
private readonly batchSize;
|
|
27
|
+
private readonly maxConcurrentBatches;
|
|
28
|
+
private readonly chunker;
|
|
29
|
+
/** Tracks content hashes for previously indexed chunks (id -> hash). */
|
|
30
|
+
private chunkHashes;
|
|
31
|
+
constructor(config: VectorIndexConfig);
|
|
32
|
+
/**
|
|
33
|
+
* Index new/modified files: chunk -> embed -> insert into vector store.
|
|
34
|
+
*/
|
|
35
|
+
indexFiles(asts: FileAST[], fileContents: Map<string, string>, onProgress?: (progress: VectorIndexProgress) => void): Promise<{
|
|
36
|
+
chunked: number;
|
|
37
|
+
embedded: number;
|
|
38
|
+
}>;
|
|
39
|
+
/**
|
|
40
|
+
* Remove all chunks for given file paths from vector store.
|
|
41
|
+
* Chunk IDs start with filePath, so we filter by prefix.
|
|
42
|
+
*/
|
|
43
|
+
removeFiles(filePaths: string[]): Promise<void>;
|
|
44
|
+
/**
|
|
45
|
+
* Update chunks for modified files (delete old, insert new) with content-hash diffing.
|
|
46
|
+
* Only re-embeds chunks whose content actually changed.
|
|
47
|
+
*/
|
|
48
|
+
updateFiles(asts: FileAST[], fileContents: Map<string, string>, onProgress?: (progress: VectorIndexProgress) => void): Promise<{
|
|
49
|
+
added: number;
|
|
50
|
+
removed: number;
|
|
51
|
+
unchanged: number;
|
|
52
|
+
}>;
|
|
53
|
+
/**
|
|
54
|
+
* Get the underlying vector store (for save/load operations).
|
|
55
|
+
*/
|
|
56
|
+
getStore(): VectorStore;
|
|
57
|
+
/**
|
|
58
|
+
* Embed chunks in batches with controlled concurrency.
|
|
59
|
+
* Assigns embeddings back to the chunk objects in-place.
|
|
60
|
+
*/
|
|
61
|
+
private embedChunks;
|
|
62
|
+
}
|
|
63
|
+
//# sourceMappingURL=vector-indexer.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"vector-indexer.d.ts","sourceRoot":"","sources":["../../../src/indexing/vector-indexer.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,iBAAiB,EAAE,MAAM,oCAAoC,CAAA;AAC3E,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,2BAA2B,CAAA;AAE5D,OAAO,KAAK,EAAE,OAAO,EAAkB,MAAM,aAAa,CAAA;AAI1D,MAAM,WAAW,iBAAiB;IAChC,OAAO,EAAE,OAAO,CAAA;IAChB,iBAAiB,EAAE,iBAAiB,CAAA;IACpC,WAAW,EAAE,WAAW,CAAA;IACxB,SAAS,CAAC,EAAE,MAAM,CAAA;IAClB,oBAAoB,CAAC,EAAE,MAAM,CAAA;CAC9B;AAID,MAAM,WAAW,mBAAmB;IAClC,KAAK,EAAE,UAAU,GAAG,WAAW,GAAG,UAAU,CAAA;IAC5C,SAAS,EAAE,MAAM,CAAA;IACjB,KAAK,EAAE,MAAM,CAAA;CACd;AAUD;;;;;GAKG;AACH,qBAAa,aAAa;IACxB,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAS;IACjC,OAAO,CAAC,QAAQ,CAAC,QAAQ,CAAmB;IAC5C,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAa;IACnC,OAAO,CAAC,QAAQ,CAAC,SAAS,CAAQ;IAClC,OAAO,CAAC,QAAQ,CAAC,oBAAoB,CAAQ;IAC7C,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAS;IAEjC,wEAAwE;IACxE,OAAO,CAAC,WAAW,CAAiC;gBAExC,MAAM,EAAE,iBAAiB;IASrC;;OAEG;IACG,UAAU,CACd,IAAI,EAAE,OAAO,EAAE,EACf,YAAY,EAAE,GAAG,CAAC,MAAM,EAAE,MAAM,CAAC,EACjC,UAAU,CAAC,EAAE,CAAC,QAAQ,EAAE,mBAAmB,KAAK,IAAI,GACnD,OAAO,CAAC;QAAE,OAAO,EAAE,MAAM,CAAC;QAAC,QAAQ,EAAE,MAAM,CAAA;KAAE,CAAC;IA2BjD;;;OAGG;IACG,WAAW,CAAC,SAAS,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;IAqBrD;;;OAGG;IACG,WAAW,CACf,IAAI,EAAE,OAAO,EAAE,EACf,YAAY,EAAE,GAAG,CAAC,MAAM,EAAE,MAAM,CAAC,EACjC,UAAU,CAAC,EAAE,CAAC,QAAQ,EAAE,mBAAmB,KAAK,IAAI,GACnD,OAAO,CAAC;QAAE,KAAK,EAAE,MAAM,CAAC;QAAC,OAAO,EAAE,MAAM,CAAC;QAAC,SAAS,EAAE,MAAM,CAAA;KAAE,CAAC;IAuFjE;;OAEG;IACH,QAAQ,IAAI,WAAW;IAMvB;;;OAGG;YACW,WAAW;CAsC1B"}
|
|
@@ -0,0 +1,197 @@
|
|
|
1
|
+
import { createHash } from 'node:crypto';
|
|
2
|
+
import { Chunker } from '../embedding/chunker.js';
|
|
3
|
+
// ── Content Hashing ──────────────────────────────────────────────────
|
|
4
|
+
function contentHash(text) {
|
|
5
|
+
return createHash('sha256').update(text).digest('hex').slice(0, 16);
|
|
6
|
+
}
|
|
7
|
+
// ── VectorIndexer ────────────────────────────────────────────────────
|
|
8
|
+
/**
|
|
9
|
+
* Orchestrates the chunk -> embed -> store flow for dense vector indexing.
|
|
10
|
+
*
|
|
11
|
+
* Integrates with the EmbeddingProvider and VectorStore interfaces to
|
|
12
|
+
* compute embeddings for code chunks and maintain a searchable vector index.
|
|
13
|
+
*/
|
|
14
|
+
export class VectorIndexer {
|
|
15
|
+
enabled;
|
|
16
|
+
provider;
|
|
17
|
+
store;
|
|
18
|
+
batchSize;
|
|
19
|
+
maxConcurrentBatches;
|
|
20
|
+
chunker;
|
|
21
|
+
/** Tracks content hashes for previously indexed chunks (id -> hash). */
|
|
22
|
+
chunkHashes = new Map();
|
|
23
|
+
constructor(config) {
|
|
24
|
+
this.enabled = config.enabled;
|
|
25
|
+
this.provider = config.embeddingProvider;
|
|
26
|
+
this.store = config.vectorStore;
|
|
27
|
+
this.batchSize = config.batchSize ?? 128;
|
|
28
|
+
this.maxConcurrentBatches = config.maxConcurrentBatches ?? 2;
|
|
29
|
+
this.chunker = new Chunker();
|
|
30
|
+
}
|
|
31
|
+
/**
|
|
32
|
+
* Index new/modified files: chunk -> embed -> insert into vector store.
|
|
33
|
+
*/
|
|
34
|
+
async indexFiles(asts, fileContents, onProgress) {
|
|
35
|
+
if (!this.enabled)
|
|
36
|
+
return { chunked: 0, embedded: 0 };
|
|
37
|
+
// Phase 1: Chunking
|
|
38
|
+
onProgress?.({ phase: 'chunking', processed: 0, total: asts.length });
|
|
39
|
+
const chunks = this.chunker.chunkFiles(asts, fileContents);
|
|
40
|
+
onProgress?.({ phase: 'chunking', processed: asts.length, total: asts.length });
|
|
41
|
+
if (chunks.length === 0)
|
|
42
|
+
return { chunked: 0, embedded: 0 };
|
|
43
|
+
// Phase 2: Embedding (batch)
|
|
44
|
+
await this.embedChunks(chunks, onProgress);
|
|
45
|
+
// Phase 3: Indexing (insert into store)
|
|
46
|
+
onProgress?.({ phase: 'indexing', processed: 0, total: chunks.length });
|
|
47
|
+
await this.store.insert(chunks);
|
|
48
|
+
// Track content hashes for future diffing
|
|
49
|
+
for (const chunk of chunks) {
|
|
50
|
+
this.chunkHashes.set(chunk.id, contentHash(chunk.content));
|
|
51
|
+
}
|
|
52
|
+
onProgress?.({ phase: 'indexing', processed: chunks.length, total: chunks.length });
|
|
53
|
+
return { chunked: chunks.length, embedded: chunks.length };
|
|
54
|
+
}
|
|
55
|
+
/**
|
|
56
|
+
* Remove all chunks for given file paths from vector store.
|
|
57
|
+
* Chunk IDs start with filePath, so we filter by prefix.
|
|
58
|
+
*/
|
|
59
|
+
async removeFiles(filePaths) {
|
|
60
|
+
if (!this.enabled)
|
|
61
|
+
return;
|
|
62
|
+
const idsToDelete = [];
|
|
63
|
+
for (const [id] of this.chunkHashes) {
|
|
64
|
+
for (const filePath of filePaths) {
|
|
65
|
+
if (id.startsWith(filePath + ':')) {
|
|
66
|
+
idsToDelete.push(id);
|
|
67
|
+
break;
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
if (idsToDelete.length > 0) {
|
|
72
|
+
await this.store.delete(idsToDelete);
|
|
73
|
+
for (const id of idsToDelete) {
|
|
74
|
+
this.chunkHashes.delete(id);
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
/**
|
|
79
|
+
* Update chunks for modified files (delete old, insert new) with content-hash diffing.
|
|
80
|
+
* Only re-embeds chunks whose content actually changed.
|
|
81
|
+
*/
|
|
82
|
+
async updateFiles(asts, fileContents, onProgress) {
|
|
83
|
+
if (!this.enabled)
|
|
84
|
+
return { added: 0, removed: 0, unchanged: 0 };
|
|
85
|
+
// Phase 1: Chunk the new versions
|
|
86
|
+
onProgress?.({ phase: 'chunking', processed: 0, total: asts.length });
|
|
87
|
+
const newChunks = this.chunker.chunkFiles(asts, fileContents);
|
|
88
|
+
onProgress?.({ phase: 'chunking', processed: asts.length, total: asts.length });
|
|
89
|
+
// Collect file paths being updated
|
|
90
|
+
const filePaths = new Set(asts.map(a => a.filePath));
|
|
91
|
+
// Collect old chunk IDs for these files
|
|
92
|
+
const oldChunkIds = new Set();
|
|
93
|
+
for (const [id] of this.chunkHashes) {
|
|
94
|
+
for (const filePath of filePaths) {
|
|
95
|
+
if (id.startsWith(filePath + ':')) {
|
|
96
|
+
oldChunkIds.add(id);
|
|
97
|
+
break;
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
// Diff: compare content hashes
|
|
102
|
+
const newChunkMap = new Map();
|
|
103
|
+
for (const chunk of newChunks) {
|
|
104
|
+
newChunkMap.set(chunk.id, chunk);
|
|
105
|
+
}
|
|
106
|
+
const unchangedIds = new Set();
|
|
107
|
+
const changedChunks = [];
|
|
108
|
+
const addedChunks = [];
|
|
109
|
+
for (const chunk of newChunks) {
|
|
110
|
+
const newHash = contentHash(chunk.content);
|
|
111
|
+
const oldHash = this.chunkHashes.get(chunk.id);
|
|
112
|
+
if (oldHash && oldHash === newHash) {
|
|
113
|
+
// Content unchanged — skip re-embedding
|
|
114
|
+
unchangedIds.add(chunk.id);
|
|
115
|
+
}
|
|
116
|
+
else if (oldHash) {
|
|
117
|
+
// Content changed — needs re-embedding
|
|
118
|
+
changedChunks.push(chunk);
|
|
119
|
+
}
|
|
120
|
+
else {
|
|
121
|
+
// New chunk
|
|
122
|
+
addedChunks.push(chunk);
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
// IDs to remove: old chunk IDs that are no longer present in new chunks
|
|
126
|
+
const removedIds = [];
|
|
127
|
+
for (const id of oldChunkIds) {
|
|
128
|
+
if (!newChunkMap.has(id)) {
|
|
129
|
+
removedIds.push(id);
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
// Delete removed and changed chunks from store
|
|
133
|
+
const idsToDelete = [...removedIds, ...changedChunks.map(c => c.id)];
|
|
134
|
+
if (idsToDelete.length > 0) {
|
|
135
|
+
await this.store.delete(idsToDelete);
|
|
136
|
+
}
|
|
137
|
+
// Embed and insert changed + added chunks
|
|
138
|
+
const toEmbed = [...changedChunks, ...addedChunks];
|
|
139
|
+
if (toEmbed.length > 0) {
|
|
140
|
+
await this.embedChunks(toEmbed, onProgress);
|
|
141
|
+
onProgress?.({ phase: 'indexing', processed: 0, total: toEmbed.length });
|
|
142
|
+
await this.store.insert(toEmbed);
|
|
143
|
+
onProgress?.({ phase: 'indexing', processed: toEmbed.length, total: toEmbed.length });
|
|
144
|
+
}
|
|
145
|
+
// Update content hashes
|
|
146
|
+
for (const id of removedIds) {
|
|
147
|
+
this.chunkHashes.delete(id);
|
|
148
|
+
}
|
|
149
|
+
for (const chunk of toEmbed) {
|
|
150
|
+
this.chunkHashes.set(chunk.id, contentHash(chunk.content));
|
|
151
|
+
}
|
|
152
|
+
return {
|
|
153
|
+
added: addedChunks.length + changedChunks.length,
|
|
154
|
+
removed: removedIds.length,
|
|
155
|
+
unchanged: unchangedIds.size,
|
|
156
|
+
};
|
|
157
|
+
}
|
|
158
|
+
/**
|
|
159
|
+
* Get the underlying vector store (for save/load operations).
|
|
160
|
+
*/
|
|
161
|
+
getStore() {
|
|
162
|
+
return this.store;
|
|
163
|
+
}
|
|
164
|
+
// ── Private Helpers ──────────────────────────────────────────────
|
|
165
|
+
/**
|
|
166
|
+
* Embed chunks in batches with controlled concurrency.
|
|
167
|
+
* Assigns embeddings back to the chunk objects in-place.
|
|
168
|
+
*/
|
|
169
|
+
async embedChunks(chunks, onProgress) {
|
|
170
|
+
const totalChunks = chunks.length;
|
|
171
|
+
let processedCount = 0;
|
|
172
|
+
onProgress?.({ phase: 'embedding', processed: 0, total: totalChunks });
|
|
173
|
+
// Split into batches
|
|
174
|
+
const batches = [];
|
|
175
|
+
for (let i = 0; i < chunks.length; i += this.batchSize) {
|
|
176
|
+
batches.push(chunks.slice(i, i + this.batchSize));
|
|
177
|
+
}
|
|
178
|
+
// Process batches with controlled concurrency
|
|
179
|
+
for (let i = 0; i < batches.length; i += this.maxConcurrentBatches) {
|
|
180
|
+
const concurrentBatches = batches.slice(i, i + this.maxConcurrentBatches);
|
|
181
|
+
const results = await Promise.all(concurrentBatches.map(batch => {
|
|
182
|
+
const texts = batch.map(c => c.content);
|
|
183
|
+
return this.provider.embed(texts);
|
|
184
|
+
}));
|
|
185
|
+
// Assign embeddings back to chunks
|
|
186
|
+
for (let batchIdx = 0; batchIdx < concurrentBatches.length; batchIdx++) {
|
|
187
|
+
const batch = concurrentBatches[batchIdx];
|
|
188
|
+
const embeddings = results[batchIdx];
|
|
189
|
+
for (let chunkIdx = 0; chunkIdx < batch.length; chunkIdx++) {
|
|
190
|
+
batch[chunkIdx].embedding = embeddings[chunkIdx];
|
|
191
|
+
}
|
|
192
|
+
processedCount += batch.length;
|
|
193
|
+
onProgress?.({ phase: 'embedding', processed: processedCount, total: totalChunks });
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"code-intelligence-plugin.d.ts","sourceRoot":"","sources":["../../../src/plugin/code-intelligence-plugin.ts"],"names":[],"mappings":"AACA,OAAO,EAAQ,KAAK,oBAAoB,EAAE,MAAM,gCAAgC,CAAA;
|
|
1
|
+
{"version":3,"file":"code-intelligence-plugin.d.ts","sourceRoot":"","sources":["../../../src/plugin/code-intelligence-plugin.ts"],"names":[],"mappings":"AACA,OAAO,EAAQ,KAAK,oBAAoB,EAAE,MAAM,gCAAgC,CAAA;AAchF,mEAAmE;AACnE,MAAM,WAAW,UAAU;IACzB,IAAI,EAAE,MAAM,CAAA;IACZ,WAAW,EAAE,MAAM,CAAA;IACnB,WAAW,CAAC,OAAO,EAAE,iBAAiB,GAAG,oBAAoB,CAAC,GAAG,CAAC,EAAE,CAAA;CACrE;AAED,qDAAqD;AACrD,MAAM,WAAW,iBAAiB;IAChC,GAAG,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;IAC3B,GAAG,EAAE,MAAM,CAAA;CACZ;AAED,2CAA2C;AAC3C,eAAO,MAAM,sBAAsB,EAAE,UAkHpC,CAAA"}
|
|
@@ -3,6 +3,7 @@ import { tool } from '@anthropic-ai/claude-agent-sdk';
|
|
|
3
3
|
import { SymbolExtractor } from '../parser/symbol-extractor.js';
|
|
4
4
|
import { IncrementalIndexer } from '../indexing/incremental-indexer.js';
|
|
5
5
|
import { SearchEngine } from '../search/search-engine.js';
|
|
6
|
+
import { HybridSearchEngine } from '../search/hybrid-search.js';
|
|
6
7
|
import { RepoMapGenerator } from '../repo-map/repo-map-generator.js';
|
|
7
8
|
import { DedupPipeline } from '../memory/dedup-pipeline.js';
|
|
8
9
|
import { InMemoryStore } from '../memory/memory-store.js';
|
|
@@ -14,6 +15,7 @@ export const codeIntelligencePlugin = {
|
|
|
14
15
|
const extractor = new SymbolExtractor();
|
|
15
16
|
const indexer = new IncrementalIndexer(extractor, { indexDir: '.agentfactory/code-index' });
|
|
16
17
|
const searchEngine = new SearchEngine();
|
|
18
|
+
const hybridEngine = new HybridSearchEngine(searchEngine, null, null);
|
|
17
19
|
const repoMapGen = new RepoMapGenerator();
|
|
18
20
|
const dedupStore = new InMemoryStore();
|
|
19
21
|
const dedupPipeline = new DedupPipeline(dedupStore);
|
|
@@ -59,13 +61,13 @@ export const codeIntelligencePlugin = {
|
|
|
59
61
|
};
|
|
60
62
|
}
|
|
61
63
|
}),
|
|
62
|
-
tool('af_code_search_code', 'Search code using BM25 ranking with code-aware tokenization', {
|
|
64
|
+
tool('af_code_search_code', 'Search code using hybrid BM25 + semantic ranking with code-aware tokenization', {
|
|
63
65
|
query: z.string().describe('Code search query'),
|
|
64
66
|
max_results: z.number().optional().describe('Maximum results (default 20)'),
|
|
65
67
|
language: z.string().optional().describe('Filter by language'),
|
|
66
68
|
}, async (args) => {
|
|
67
69
|
try {
|
|
68
|
-
const results =
|
|
70
|
+
const results = await hybridEngine.search({
|
|
69
71
|
query: args.query,
|
|
70
72
|
maxResults: args.max_results ?? 20,
|
|
71
73
|
language: args.language,
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"reranker.test.d.ts","sourceRoot":"","sources":["../../../../src/reranking/__tests__/reranker.test.ts"],"names":[],"mappings":""}
|