@mhalder/qdrant-mcp-server 2.2.0 → 3.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +16 -0
- package/README.md +76 -1
- package/build/code/indexer.d.ts +2 -1
- package/build/code/indexer.d.ts.map +1 -1
- package/build/code/indexer.js +37 -7
- package/build/code/indexer.js.map +1 -1
- package/build/git/chunker.d.ts +39 -0
- package/build/git/chunker.d.ts.map +1 -0
- package/build/git/chunker.js +210 -0
- package/build/git/chunker.js.map +1 -0
- package/build/git/chunker.test.d.ts +2 -0
- package/build/git/chunker.test.d.ts.map +1 -0
- package/build/git/chunker.test.js +230 -0
- package/build/git/chunker.test.js.map +1 -0
- package/build/git/config.d.ts +34 -0
- package/build/git/config.d.ts.map +1 -0
- package/build/git/config.js +163 -0
- package/build/git/config.js.map +1 -0
- package/build/git/extractor.d.ts +57 -0
- package/build/git/extractor.d.ts.map +1 -0
- package/build/git/extractor.integration.test.d.ts +6 -0
- package/build/git/extractor.integration.test.d.ts.map +1 -0
- package/build/git/extractor.integration.test.js +166 -0
- package/build/git/extractor.integration.test.js.map +1 -0
- package/build/git/extractor.js +231 -0
- package/build/git/extractor.js.map +1 -0
- package/build/git/extractor.test.d.ts +2 -0
- package/build/git/extractor.test.d.ts.map +1 -0
- package/build/git/extractor.test.js +267 -0
- package/build/git/extractor.test.js.map +1 -0
- package/build/git/index.d.ts +10 -0
- package/build/git/index.d.ts.map +1 -0
- package/build/git/index.js +11 -0
- package/build/git/index.js.map +1 -0
- package/build/git/indexer.d.ts +50 -0
- package/build/git/indexer.d.ts.map +1 -0
- package/build/git/indexer.js +588 -0
- package/build/git/indexer.js.map +1 -0
- package/build/git/indexer.test.d.ts +2 -0
- package/build/git/indexer.test.d.ts.map +1 -0
- package/build/git/indexer.test.js +867 -0
- package/build/git/indexer.test.js.map +1 -0
- package/build/git/sync/synchronizer.d.ts +43 -0
- package/build/git/sync/synchronizer.d.ts.map +1 -0
- package/build/git/sync/synchronizer.js +108 -0
- package/build/git/sync/synchronizer.js.map +1 -0
- package/build/git/sync/synchronizer.test.d.ts +2 -0
- package/build/git/sync/synchronizer.test.d.ts.map +1 -0
- package/build/git/sync/synchronizer.test.js +188 -0
- package/build/git/sync/synchronizer.test.js.map +1 -0
- package/build/git/types.d.ts +159 -0
- package/build/git/types.d.ts.map +1 -0
- package/build/git/types.js +5 -0
- package/build/git/types.js.map +1 -0
- package/build/index.js +18 -0
- package/build/index.js.map +1 -1
- package/build/tools/git-history.d.ts +10 -0
- package/build/tools/git-history.d.ts.map +1 -0
- package/build/tools/git-history.js +144 -0
- package/build/tools/git-history.js.map +1 -0
- package/build/tools/index.d.ts +2 -0
- package/build/tools/index.d.ts.map +1 -1
- package/build/tools/index.js +4 -0
- package/build/tools/index.js.map +1 -1
- package/build/tools/schemas.d.ts +24 -0
- package/build/tools/schemas.d.ts.map +1 -1
- package/build/tools/schemas.js +64 -0
- package/build/tools/schemas.js.map +1 -1
- package/package.json +1 -1
- package/src/code/indexer.ts +49 -7
- package/src/git/chunker.test.ts +284 -0
- package/src/git/chunker.ts +256 -0
- package/src/git/config.ts +173 -0
- package/src/git/extractor.integration.test.ts +221 -0
- package/src/git/extractor.test.ts +403 -0
- package/src/git/extractor.ts +284 -0
- package/src/git/index.ts +31 -0
- package/src/git/indexer.test.ts +1089 -0
- package/src/git/indexer.ts +745 -0
- package/src/git/sync/synchronizer.test.ts +250 -0
- package/src/git/sync/synchronizer.ts +122 -0
- package/src/git/types.ts +192 -0
- package/src/index.ts +42 -0
- package/src/tools/git-history.ts +208 -0
- package/src/tools/index.ts +7 -0
- package/src/tools/schemas.ts +75 -0
- package/vitest.config.ts +2 -0
|
@@ -0,0 +1,867 @@
|
|
|
1
|
+
import { describe, it, expect, vi, beforeEach } from "vitest";
|
|
2
|
+
import { GitHistoryIndexer } from "./indexer.js";
|
|
3
|
+
import { DEFAULT_GIT_CONFIG } from "./config.js";
|
|
4
|
+
// Create mock instances
|
|
5
|
+
const mockExtractorInstance = {
|
|
6
|
+
validateRepository: vi.fn(),
|
|
7
|
+
getLatestCommitHash: vi.fn(),
|
|
8
|
+
getRemoteUrl: vi.fn(),
|
|
9
|
+
getCommits: vi.fn(),
|
|
10
|
+
getCommitDiff: vi.fn(),
|
|
11
|
+
};
|
|
12
|
+
const mockChunkerInstance = {
|
|
13
|
+
classifyCommitType: vi.fn(),
|
|
14
|
+
createChunks: vi.fn(),
|
|
15
|
+
generateChunkId: vi.fn(),
|
|
16
|
+
};
|
|
17
|
+
const mockSynchronizerInstance = {
|
|
18
|
+
initialize: vi.fn(),
|
|
19
|
+
getLastCommitHash: vi.fn(),
|
|
20
|
+
getLastIndexedAt: vi.fn(),
|
|
21
|
+
getCommitsIndexed: vi.fn(),
|
|
22
|
+
updateSnapshot: vi.fn(),
|
|
23
|
+
deleteSnapshot: vi.fn(),
|
|
24
|
+
};
|
|
25
|
+
// Mock dependencies using class syntax
|
|
26
|
+
vi.mock("./extractor.js", () => {
|
|
27
|
+
return {
|
|
28
|
+
GitExtractor: class MockGitExtractor {
|
|
29
|
+
validateRepository = mockExtractorInstance.validateRepository;
|
|
30
|
+
getLatestCommitHash = mockExtractorInstance.getLatestCommitHash;
|
|
31
|
+
getRemoteUrl = mockExtractorInstance.getRemoteUrl;
|
|
32
|
+
getCommits = mockExtractorInstance.getCommits;
|
|
33
|
+
getCommitDiff = mockExtractorInstance.getCommitDiff;
|
|
34
|
+
},
|
|
35
|
+
normalizeRemoteUrl: (url) => {
|
|
36
|
+
if (!url)
|
|
37
|
+
return "";
|
|
38
|
+
return url
|
|
39
|
+
.replace(/^git@[^:]+:/, "")
|
|
40
|
+
.replace(/^https?:\/\/[^/]+\//, "")
|
|
41
|
+
.replace(/\.git$/, "");
|
|
42
|
+
},
|
|
43
|
+
};
|
|
44
|
+
});
|
|
45
|
+
vi.mock("./chunker.js", () => {
|
|
46
|
+
return {
|
|
47
|
+
CommitChunker: class MockCommitChunker {
|
|
48
|
+
classifyCommitType = mockChunkerInstance.classifyCommitType;
|
|
49
|
+
createChunks = mockChunkerInstance.createChunks;
|
|
50
|
+
generateChunkId = mockChunkerInstance.generateChunkId;
|
|
51
|
+
},
|
|
52
|
+
};
|
|
53
|
+
});
|
|
54
|
+
vi.mock("./sync/synchronizer.js", () => {
|
|
55
|
+
return {
|
|
56
|
+
GitSynchronizer: class MockGitSynchronizer {
|
|
57
|
+
initialize = mockSynchronizerInstance.initialize;
|
|
58
|
+
getLastCommitHash = mockSynchronizerInstance.getLastCommitHash;
|
|
59
|
+
getLastIndexedAt = mockSynchronizerInstance.getLastIndexedAt;
|
|
60
|
+
getCommitsIndexed = mockSynchronizerInstance.getCommitsIndexed;
|
|
61
|
+
updateSnapshot = mockSynchronizerInstance.updateSnapshot;
|
|
62
|
+
deleteSnapshot = mockSynchronizerInstance.deleteSnapshot;
|
|
63
|
+
},
|
|
64
|
+
};
|
|
65
|
+
});
|
|
66
|
+
vi.mock("node:fs", () => ({
|
|
67
|
+
promises: {
|
|
68
|
+
realpath: vi.fn().mockImplementation((p) => Promise.resolve(p)),
|
|
69
|
+
},
|
|
70
|
+
}));
|
|
71
|
+
describe("GitHistoryIndexer", () => {
|
|
72
|
+
let indexer;
|
|
73
|
+
let mockQdrant;
|
|
74
|
+
let mockEmbeddings;
|
|
75
|
+
const config = { ...DEFAULT_GIT_CONFIG };
|
|
76
|
+
beforeEach(() => {
|
|
77
|
+
vi.clearAllMocks();
|
|
78
|
+
// Reset mock instances
|
|
79
|
+
mockExtractorInstance.validateRepository.mockResolvedValue(true);
|
|
80
|
+
mockExtractorInstance.getLatestCommitHash.mockResolvedValue("abc123def456");
|
|
81
|
+
mockExtractorInstance.getRemoteUrl.mockResolvedValue("git@github.com:test/repo.git");
|
|
82
|
+
mockExtractorInstance.getCommits.mockResolvedValue([]);
|
|
83
|
+
mockExtractorInstance.getCommitDiff.mockResolvedValue("");
|
|
84
|
+
mockChunkerInstance.classifyCommitType.mockReturnValue("feat");
|
|
85
|
+
mockChunkerInstance.createChunks.mockReturnValue([
|
|
86
|
+
{
|
|
87
|
+
content: "test content",
|
|
88
|
+
metadata: {
|
|
89
|
+
commitHash: "abc123",
|
|
90
|
+
shortHash: "abc12",
|
|
91
|
+
author: "Test",
|
|
92
|
+
authorEmail: "test@example.com",
|
|
93
|
+
date: "2024-01-15T10:00:00Z",
|
|
94
|
+
subject: "test commit",
|
|
95
|
+
commitType: "feat",
|
|
96
|
+
files: [],
|
|
97
|
+
insertions: 0,
|
|
98
|
+
deletions: 0,
|
|
99
|
+
repoPath: "/test/repo",
|
|
100
|
+
},
|
|
101
|
+
},
|
|
102
|
+
]);
|
|
103
|
+
mockChunkerInstance.generateChunkId.mockReturnValue("gitcommit_abc123");
|
|
104
|
+
mockSynchronizerInstance.initialize.mockResolvedValue(false);
|
|
105
|
+
mockSynchronizerInstance.getLastCommitHash.mockReturnValue(null);
|
|
106
|
+
mockSynchronizerInstance.getLastIndexedAt.mockReturnValue(null);
|
|
107
|
+
mockSynchronizerInstance.getCommitsIndexed.mockReturnValue(0);
|
|
108
|
+
mockSynchronizerInstance.updateSnapshot.mockResolvedValue(undefined);
|
|
109
|
+
mockSynchronizerInstance.deleteSnapshot.mockResolvedValue(undefined);
|
|
110
|
+
mockQdrant = {
|
|
111
|
+
collectionExists: vi.fn().mockResolvedValue(false),
|
|
112
|
+
createCollection: vi.fn().mockResolvedValue(undefined),
|
|
113
|
+
deleteCollection: vi.fn().mockResolvedValue(undefined),
|
|
114
|
+
getCollectionInfo: vi
|
|
115
|
+
.fn()
|
|
116
|
+
.mockResolvedValue({ pointsCount: 0, hybridEnabled: false }),
|
|
117
|
+
addPoints: vi.fn().mockResolvedValue(undefined),
|
|
118
|
+
addPointsWithSparse: vi.fn().mockResolvedValue(undefined),
|
|
119
|
+
search: vi.fn().mockResolvedValue([]),
|
|
120
|
+
hybridSearch: vi.fn().mockResolvedValue([]),
|
|
121
|
+
getPoint: vi.fn().mockResolvedValue(null),
|
|
122
|
+
};
|
|
123
|
+
mockEmbeddings = {
|
|
124
|
+
getDimensions: vi.fn().mockReturnValue(768),
|
|
125
|
+
embed: vi.fn().mockResolvedValue({ embedding: Array(768).fill(0.5) }),
|
|
126
|
+
embedBatch: vi
|
|
127
|
+
.fn()
|
|
128
|
+
.mockResolvedValue([{ embedding: Array(768).fill(0.5) }]),
|
|
129
|
+
};
|
|
130
|
+
indexer = new GitHistoryIndexer(mockQdrant, mockEmbeddings, config);
|
|
131
|
+
});
|
|
132
|
+
describe("indexHistory", () => {
|
|
133
|
+
it("should index commits successfully", async () => {
|
|
134
|
+
const mockCommits = [
|
|
135
|
+
{
|
|
136
|
+
hash: "abc123",
|
|
137
|
+
shortHash: "abc12",
|
|
138
|
+
author: "John Doe",
|
|
139
|
+
authorEmail: "john@example.com",
|
|
140
|
+
date: new Date("2024-01-15"),
|
|
141
|
+
subject: "feat: add feature",
|
|
142
|
+
body: "",
|
|
143
|
+
files: ["src/file.ts"],
|
|
144
|
+
insertions: 10,
|
|
145
|
+
deletions: 5,
|
|
146
|
+
},
|
|
147
|
+
];
|
|
148
|
+
mockExtractorInstance.validateRepository.mockResolvedValue(true);
|
|
149
|
+
mockExtractorInstance.getLatestCommitHash.mockResolvedValue("abc123");
|
|
150
|
+
mockExtractorInstance.getCommits.mockResolvedValue(mockCommits);
|
|
151
|
+
mockExtractorInstance.getCommitDiff.mockResolvedValue("diff content");
|
|
152
|
+
const stats = await indexer.indexHistory("/test/repo");
|
|
153
|
+
expect(stats.status).toBe("completed");
|
|
154
|
+
expect(stats.commitsScanned).toBe(1);
|
|
155
|
+
expect(mockQdrant.createCollection).toHaveBeenCalled();
|
|
156
|
+
expect(mockEmbeddings.embedBatch).toHaveBeenCalled();
|
|
157
|
+
expect(mockQdrant.addPoints).toHaveBeenCalled();
|
|
158
|
+
});
|
|
159
|
+
it("should fail for non-git repository", async () => {
|
|
160
|
+
mockExtractorInstance.validateRepository.mockResolvedValue(false);
|
|
161
|
+
const stats = await indexer.indexHistory("/not/a/repo");
|
|
162
|
+
expect(stats.status).toBe("failed");
|
|
163
|
+
expect(stats.errors?.some((e) => e.includes("Not a valid git repository"))).toBe(true);
|
|
164
|
+
});
|
|
165
|
+
it("should handle empty repository", async () => {
|
|
166
|
+
mockExtractorInstance.validateRepository.mockResolvedValue(true);
|
|
167
|
+
mockExtractorInstance.getCommits.mockResolvedValue([]);
|
|
168
|
+
const stats = await indexer.indexHistory("/empty/repo");
|
|
169
|
+
expect(stats.status).toBe("completed");
|
|
170
|
+
expect(stats.commitsScanned).toBe(0);
|
|
171
|
+
expect(stats.chunksCreated).toBe(0);
|
|
172
|
+
});
|
|
173
|
+
it("should delete existing collection when forceReindex is true", async () => {
|
|
174
|
+
mockQdrant.collectionExists.mockResolvedValue(true);
|
|
175
|
+
mockExtractorInstance.validateRepository.mockResolvedValue(true);
|
|
176
|
+
mockExtractorInstance.getLatestCommitHash.mockResolvedValue("abc123");
|
|
177
|
+
mockExtractorInstance.getCommits.mockResolvedValue([]);
|
|
178
|
+
await indexer.indexHistory("/test/repo", { forceReindex: true });
|
|
179
|
+
expect(mockQdrant.deleteCollection).toHaveBeenCalled();
|
|
180
|
+
expect(mockQdrant.createCollection).toHaveBeenCalled();
|
|
181
|
+
});
|
|
182
|
+
it("should call progress callback during indexing", async () => {
|
|
183
|
+
const mockCommits = [
|
|
184
|
+
{
|
|
185
|
+
hash: "abc123",
|
|
186
|
+
shortHash: "abc12",
|
|
187
|
+
author: "Test",
|
|
188
|
+
authorEmail: "test@example.com",
|
|
189
|
+
date: new Date(),
|
|
190
|
+
subject: "test",
|
|
191
|
+
body: "",
|
|
192
|
+
files: [],
|
|
193
|
+
insertions: 0,
|
|
194
|
+
deletions: 0,
|
|
195
|
+
},
|
|
196
|
+
];
|
|
197
|
+
mockExtractorInstance.validateRepository.mockResolvedValue(true);
|
|
198
|
+
mockExtractorInstance.getLatestCommitHash.mockResolvedValue("abc123");
|
|
199
|
+
mockExtractorInstance.getCommits.mockResolvedValue(mockCommits);
|
|
200
|
+
mockExtractorInstance.getCommitDiff.mockResolvedValue("");
|
|
201
|
+
const progressCallback = vi.fn();
|
|
202
|
+
await indexer.indexHistory("/test/repo", {}, progressCallback);
|
|
203
|
+
expect(progressCallback).toHaveBeenCalled();
|
|
204
|
+
expect(progressCallback).toHaveBeenCalledWith(expect.objectContaining({
|
|
205
|
+
phase: expect.stringMatching(/extracting|chunking|embedding|storing/),
|
|
206
|
+
}));
|
|
207
|
+
});
|
|
208
|
+
});
|
|
209
|
+
describe("searchHistory", () => {
|
|
210
|
+
beforeEach(() => {
|
|
211
|
+
mockQdrant.collectionExists.mockResolvedValue(true);
|
|
212
|
+
});
|
|
213
|
+
it("should search indexed history", async () => {
|
|
214
|
+
mockQdrant.search.mockResolvedValue([
|
|
215
|
+
{
|
|
216
|
+
score: 0.9,
|
|
217
|
+
payload: {
|
|
218
|
+
content: "test content",
|
|
219
|
+
commitHash: "abc123",
|
|
220
|
+
shortHash: "abc12",
|
|
221
|
+
author: "John Doe",
|
|
222
|
+
date: "2024-01-15T10:00:00Z",
|
|
223
|
+
subject: "feat: add feature",
|
|
224
|
+
commitType: "feat",
|
|
225
|
+
files: ["src/file.ts"],
|
|
226
|
+
},
|
|
227
|
+
},
|
|
228
|
+
]);
|
|
229
|
+
const results = await indexer.searchHistory("/test/repo", "add feature");
|
|
230
|
+
expect(results).toHaveLength(1);
|
|
231
|
+
expect(results[0].shortHash).toBe("abc12");
|
|
232
|
+
expect(results[0].score).toBe(0.9);
|
|
233
|
+
expect(mockEmbeddings.embed).toHaveBeenCalledWith("add feature");
|
|
234
|
+
});
|
|
235
|
+
it("should throw error when history not indexed", async () => {
|
|
236
|
+
mockQdrant.collectionExists.mockResolvedValue(false);
|
|
237
|
+
await expect(indexer.searchHistory("/test/repo", "query")).rejects.toThrow("Git history not indexed");
|
|
238
|
+
});
|
|
239
|
+
it("should apply commit type filter", async () => {
|
|
240
|
+
mockQdrant.search.mockResolvedValue([]);
|
|
241
|
+
await indexer.searchHistory("/test/repo", "query", {
|
|
242
|
+
commitTypes: ["fix", "feat"],
|
|
243
|
+
});
|
|
244
|
+
expect(mockQdrant.search).toHaveBeenCalledWith(expect.any(String), expect.any(Array), expect.any(Number), expect.objectContaining({
|
|
245
|
+
must: expect.arrayContaining([
|
|
246
|
+
expect.objectContaining({
|
|
247
|
+
key: "commitType",
|
|
248
|
+
match: { any: ["fix", "feat"] },
|
|
249
|
+
}),
|
|
250
|
+
]),
|
|
251
|
+
}));
|
|
252
|
+
});
|
|
253
|
+
it("should apply date range filter", async () => {
|
|
254
|
+
mockQdrant.search.mockResolvedValue([]);
|
|
255
|
+
await indexer.searchHistory("/test/repo", "query", {
|
|
256
|
+
dateFrom: "2024-01-01",
|
|
257
|
+
dateTo: "2024-12-31",
|
|
258
|
+
});
|
|
259
|
+
expect(mockQdrant.search).toHaveBeenCalledWith(expect.any(String), expect.any(Array), expect.any(Number), expect.objectContaining({
|
|
260
|
+
must: expect.arrayContaining([
|
|
261
|
+
expect.objectContaining({
|
|
262
|
+
key: "date",
|
|
263
|
+
range: { gte: "2024-01-01" },
|
|
264
|
+
}),
|
|
265
|
+
expect.objectContaining({
|
|
266
|
+
key: "date",
|
|
267
|
+
range: { lte: "2024-12-31" },
|
|
268
|
+
}),
|
|
269
|
+
]),
|
|
270
|
+
}));
|
|
271
|
+
});
|
|
272
|
+
it("should apply score threshold", async () => {
|
|
273
|
+
mockQdrant.search.mockResolvedValue([
|
|
274
|
+
{ score: 0.9, payload: { commitHash: "abc" } },
|
|
275
|
+
{ score: 0.5, payload: { commitHash: "xyz" } },
|
|
276
|
+
]);
|
|
277
|
+
const results = await indexer.searchHistory("/test/repo", "query", {
|
|
278
|
+
scoreThreshold: 0.7,
|
|
279
|
+
});
|
|
280
|
+
expect(results).toHaveLength(1);
|
|
281
|
+
expect(results[0].score).toBe(0.9);
|
|
282
|
+
});
|
|
283
|
+
});
|
|
284
|
+
describe("getIndexStatus", () => {
|
|
285
|
+
it("should return not_indexed when collection does not exist", async () => {
|
|
286
|
+
mockQdrant.collectionExists.mockResolvedValue(false);
|
|
287
|
+
const status = await indexer.getIndexStatus("/test/repo");
|
|
288
|
+
expect(status.status).toBe("not_indexed");
|
|
289
|
+
expect(status.isIndexed).toBe(false);
|
|
290
|
+
});
|
|
291
|
+
it("should return indexed when collection exists and complete", async () => {
|
|
292
|
+
mockQdrant.collectionExists.mockResolvedValue(true);
|
|
293
|
+
mockQdrant.getPoint.mockResolvedValue({
|
|
294
|
+
payload: {
|
|
295
|
+
indexingComplete: true,
|
|
296
|
+
completedAt: "2024-01-15T10:00:00Z",
|
|
297
|
+
},
|
|
298
|
+
});
|
|
299
|
+
mockQdrant.getCollectionInfo.mockResolvedValue({
|
|
300
|
+
pointsCount: 101,
|
|
301
|
+
hybridEnabled: false,
|
|
302
|
+
});
|
|
303
|
+
const status = await indexer.getIndexStatus("/test/repo");
|
|
304
|
+
expect(status.status).toBe("indexed");
|
|
305
|
+
expect(status.isIndexed).toBe(true);
|
|
306
|
+
expect(status.chunksCount).toBe(100); // 101 - 1 for metadata
|
|
307
|
+
});
|
|
308
|
+
it("should return indexing when in progress", async () => {
|
|
309
|
+
mockQdrant.collectionExists.mockResolvedValue(true);
|
|
310
|
+
mockQdrant.getPoint.mockResolvedValue({
|
|
311
|
+
payload: {
|
|
312
|
+
indexingComplete: false,
|
|
313
|
+
startedAt: "2024-01-15T10:00:00Z",
|
|
314
|
+
},
|
|
315
|
+
});
|
|
316
|
+
mockQdrant.getCollectionInfo.mockResolvedValue({
|
|
317
|
+
pointsCount: 50,
|
|
318
|
+
hybridEnabled: false,
|
|
319
|
+
});
|
|
320
|
+
const status = await indexer.getIndexStatus("/test/repo");
|
|
321
|
+
expect(status.status).toBe("indexing");
|
|
322
|
+
expect(status.isIndexed).toBe(false);
|
|
323
|
+
});
|
|
324
|
+
});
|
|
325
|
+
describe("indexNewCommits", () => {
|
|
326
|
+
it("should index only new commits", async () => {
|
|
327
|
+
mockQdrant.collectionExists.mockResolvedValue(true);
|
|
328
|
+
const mockNewCommits = [
|
|
329
|
+
{
|
|
330
|
+
hash: "new123",
|
|
331
|
+
shortHash: "new12",
|
|
332
|
+
author: "Test",
|
|
333
|
+
authorEmail: "test@example.com",
|
|
334
|
+
date: new Date(),
|
|
335
|
+
subject: "new commit",
|
|
336
|
+
body: "",
|
|
337
|
+
files: [],
|
|
338
|
+
insertions: 5,
|
|
339
|
+
deletions: 2,
|
|
340
|
+
},
|
|
341
|
+
];
|
|
342
|
+
mockExtractorInstance.getCommits.mockResolvedValue(mockNewCommits);
|
|
343
|
+
mockExtractorInstance.getCommitDiff.mockResolvedValue("");
|
|
344
|
+
mockExtractorInstance.getLatestCommitHash.mockResolvedValue("new123");
|
|
345
|
+
mockSynchronizerInstance.initialize.mockResolvedValue(true);
|
|
346
|
+
mockSynchronizerInstance.getLastCommitHash.mockReturnValue("old123");
|
|
347
|
+
mockSynchronizerInstance.getCommitsIndexed.mockReturnValue(50);
|
|
348
|
+
const stats = await indexer.indexNewCommits("/test/repo");
|
|
349
|
+
expect(stats.newCommits).toBe(1);
|
|
350
|
+
expect(stats.chunksAdded).toBe(1);
|
|
351
|
+
});
|
|
352
|
+
it("should throw error when no snapshot exists", async () => {
|
|
353
|
+
mockQdrant.collectionExists.mockResolvedValue(true);
|
|
354
|
+
mockSynchronizerInstance.initialize.mockResolvedValue(false);
|
|
355
|
+
await expect(indexer.indexNewCommits("/test/repo")).rejects.toThrow("No previous snapshot found");
|
|
356
|
+
});
|
|
357
|
+
it("should return 0 when no new commits", async () => {
|
|
358
|
+
mockQdrant.collectionExists.mockResolvedValue(true);
|
|
359
|
+
mockExtractorInstance.getCommits.mockResolvedValue([]);
|
|
360
|
+
mockSynchronizerInstance.initialize.mockResolvedValue(true);
|
|
361
|
+
mockSynchronizerInstance.getLastCommitHash.mockReturnValue("abc123");
|
|
362
|
+
const stats = await indexer.indexNewCommits("/test/repo");
|
|
363
|
+
expect(stats.newCommits).toBe(0);
|
|
364
|
+
expect(stats.chunksAdded).toBe(0);
|
|
365
|
+
});
|
|
366
|
+
});
|
|
367
|
+
describe("clearIndex", () => {
|
|
368
|
+
it("should delete collection and snapshot", async () => {
|
|
369
|
+
mockQdrant.collectionExists.mockResolvedValue(true);
|
|
370
|
+
await indexer.clearIndex("/test/repo");
|
|
371
|
+
expect(mockQdrant.deleteCollection).toHaveBeenCalled();
|
|
372
|
+
expect(mockSynchronizerInstance.deleteSnapshot).toHaveBeenCalled();
|
|
373
|
+
});
|
|
374
|
+
it("should not throw when collection does not exist", async () => {
|
|
375
|
+
mockQdrant.collectionExists.mockResolvedValue(false);
|
|
376
|
+
await expect(indexer.clearIndex("/test/repo")).resolves.not.toThrow();
|
|
377
|
+
});
|
|
378
|
+
it("should ignore snapshot deletion errors", async () => {
|
|
379
|
+
mockQdrant.collectionExists.mockResolvedValue(true);
|
|
380
|
+
mockSynchronizerInstance.deleteSnapshot.mockRejectedValue(new Error("Snapshot deletion failed"));
|
|
381
|
+
await expect(indexer.clearIndex("/test/repo")).resolves.not.toThrow();
|
|
382
|
+
expect(mockQdrant.deleteCollection).toHaveBeenCalled();
|
|
383
|
+
});
|
|
384
|
+
});
|
|
385
|
+
describe("searchHistory - hybrid search", () => {
|
|
386
|
+
beforeEach(() => {
|
|
387
|
+
mockQdrant.collectionExists.mockResolvedValue(true);
|
|
388
|
+
});
|
|
389
|
+
it("should use hybrid search when collection supports it and option enabled", async () => {
|
|
390
|
+
mockQdrant.getCollectionInfo.mockResolvedValue({
|
|
391
|
+
pointsCount: 100,
|
|
392
|
+
hybridEnabled: true,
|
|
393
|
+
});
|
|
394
|
+
mockQdrant.hybridSearch.mockResolvedValue([
|
|
395
|
+
{
|
|
396
|
+
score: 0.95,
|
|
397
|
+
payload: {
|
|
398
|
+
content: "test",
|
|
399
|
+
commitHash: "abc123",
|
|
400
|
+
shortHash: "abc12",
|
|
401
|
+
author: "Test",
|
|
402
|
+
date: "2024-01-15",
|
|
403
|
+
subject: "test commit",
|
|
404
|
+
commitType: "feat",
|
|
405
|
+
files: [],
|
|
406
|
+
},
|
|
407
|
+
},
|
|
408
|
+
]);
|
|
409
|
+
const results = await indexer.searchHistory("/test/repo", "query", {
|
|
410
|
+
useHybrid: true,
|
|
411
|
+
});
|
|
412
|
+
expect(mockQdrant.hybridSearch).toHaveBeenCalled();
|
|
413
|
+
expect(mockQdrant.search).not.toHaveBeenCalled();
|
|
414
|
+
expect(results).toHaveLength(1);
|
|
415
|
+
});
|
|
416
|
+
it("should fall back to dense search when hybrid not enabled on collection", async () => {
|
|
417
|
+
mockQdrant.getCollectionInfo.mockResolvedValue({
|
|
418
|
+
pointsCount: 100,
|
|
419
|
+
hybridEnabled: false,
|
|
420
|
+
});
|
|
421
|
+
mockQdrant.search.mockResolvedValue([]);
|
|
422
|
+
await indexer.searchHistory("/test/repo", "query", {
|
|
423
|
+
useHybrid: true,
|
|
424
|
+
});
|
|
425
|
+
expect(mockQdrant.search).toHaveBeenCalled();
|
|
426
|
+
expect(mockQdrant.hybridSearch).not.toHaveBeenCalled();
|
|
427
|
+
});
|
|
428
|
+
it("should apply authors filter", async () => {
|
|
429
|
+
mockQdrant.search.mockResolvedValue([]);
|
|
430
|
+
await indexer.searchHistory("/test/repo", "query", {
|
|
431
|
+
authors: ["John Doe", "Jane Smith"],
|
|
432
|
+
});
|
|
433
|
+
expect(mockQdrant.search).toHaveBeenCalledWith(expect.any(String), expect.any(Array), expect.any(Number), expect.objectContaining({
|
|
434
|
+
must: expect.arrayContaining([
|
|
435
|
+
expect.objectContaining({
|
|
436
|
+
should: expect.arrayContaining([
|
|
437
|
+
expect.objectContaining({
|
|
438
|
+
key: "author",
|
|
439
|
+
match: { text: "John Doe" },
|
|
440
|
+
}),
|
|
441
|
+
expect.objectContaining({
|
|
442
|
+
key: "author",
|
|
443
|
+
match: { text: "Jane Smith" },
|
|
444
|
+
}),
|
|
445
|
+
]),
|
|
446
|
+
}),
|
|
447
|
+
]),
|
|
448
|
+
}));
|
|
449
|
+
});
|
|
450
|
+
});
|
|
451
|
+
describe("indexHistory - error handling", () => {
|
|
452
|
+
it("should handle commit processing errors gracefully", async () => {
|
|
453
|
+
const mockCommits = [
|
|
454
|
+
{
|
|
455
|
+
hash: "abc123",
|
|
456
|
+
shortHash: "abc12",
|
|
457
|
+
author: "Test",
|
|
458
|
+
authorEmail: "test@example.com",
|
|
459
|
+
date: new Date(),
|
|
460
|
+
subject: "test",
|
|
461
|
+
body: "",
|
|
462
|
+
files: [],
|
|
463
|
+
insertions: 0,
|
|
464
|
+
deletions: 0,
|
|
465
|
+
},
|
|
466
|
+
];
|
|
467
|
+
mockExtractorInstance.validateRepository.mockResolvedValue(true);
|
|
468
|
+
mockExtractorInstance.getLatestCommitHash.mockResolvedValue("abc123");
|
|
469
|
+
mockExtractorInstance.getCommits.mockResolvedValue(mockCommits);
|
|
470
|
+
mockExtractorInstance.getCommitDiff.mockRejectedValue(new Error("Diff extraction failed"));
|
|
471
|
+
const stats = await indexer.indexHistory("/test/repo");
|
|
472
|
+
expect(stats.errors).toBeDefined();
|
|
473
|
+
expect(stats.errors?.some((e) => e.includes("abc12"))).toBe(true);
|
|
474
|
+
});
|
|
475
|
+
it("should handle batch embedding errors with partial status", async () => {
|
|
476
|
+
const mockCommits = [
|
|
477
|
+
{
|
|
478
|
+
hash: "abc123",
|
|
479
|
+
shortHash: "abc12",
|
|
480
|
+
author: "Test",
|
|
481
|
+
authorEmail: "test@example.com",
|
|
482
|
+
date: new Date(),
|
|
483
|
+
subject: "test",
|
|
484
|
+
body: "",
|
|
485
|
+
files: [],
|
|
486
|
+
insertions: 0,
|
|
487
|
+
deletions: 0,
|
|
488
|
+
},
|
|
489
|
+
];
|
|
490
|
+
mockExtractorInstance.validateRepository.mockResolvedValue(true);
|
|
491
|
+
mockExtractorInstance.getLatestCommitHash.mockResolvedValue("abc123");
|
|
492
|
+
mockExtractorInstance.getCommits.mockResolvedValue(mockCommits);
|
|
493
|
+
mockExtractorInstance.getCommitDiff.mockResolvedValue("");
|
|
494
|
+
mockEmbeddings.embedBatch.mockRejectedValue(new Error("Embedding API error"));
|
|
495
|
+
const stats = await indexer.indexHistory("/test/repo");
|
|
496
|
+
expect(stats.status).toBe("partial");
|
|
497
|
+
expect(stats.errors?.some((e) => e.includes("batch"))).toBe(true);
|
|
498
|
+
});
|
|
499
|
+
it("should handle snapshot save failure gracefully", async () => {
|
|
500
|
+
const consoleSpy = vi
|
|
501
|
+
.spyOn(console, "error")
|
|
502
|
+
.mockImplementation(() => { });
|
|
503
|
+
const mockCommits = [
|
|
504
|
+
{
|
|
505
|
+
hash: "abc123",
|
|
506
|
+
shortHash: "abc12",
|
|
507
|
+
author: "Test",
|
|
508
|
+
authorEmail: "test@example.com",
|
|
509
|
+
date: new Date(),
|
|
510
|
+
subject: "test",
|
|
511
|
+
body: "",
|
|
512
|
+
files: [],
|
|
513
|
+
insertions: 0,
|
|
514
|
+
deletions: 0,
|
|
515
|
+
},
|
|
516
|
+
];
|
|
517
|
+
mockExtractorInstance.validateRepository.mockResolvedValue(true);
|
|
518
|
+
mockExtractorInstance.getLatestCommitHash.mockResolvedValue("abc123");
|
|
519
|
+
mockExtractorInstance.getCommits.mockResolvedValue(mockCommits);
|
|
520
|
+
mockExtractorInstance.getCommitDiff.mockResolvedValue("");
|
|
521
|
+
mockSynchronizerInstance.updateSnapshot.mockRejectedValue(new Error("Snapshot write failed"));
|
|
522
|
+
const stats = await indexer.indexHistory("/test/repo");
|
|
523
|
+
expect(stats.status).toBe("completed");
|
|
524
|
+
expect(stats.errors?.some((e) => e.includes("Snapshot"))).toBe(true);
|
|
525
|
+
consoleSpy.mockRestore();
|
|
526
|
+
});
|
|
527
|
+
it("should handle storeIndexingMarker errors silently", async () => {
|
|
528
|
+
const consoleSpy = vi
|
|
529
|
+
.spyOn(console, "error")
|
|
530
|
+
.mockImplementation(() => { });
|
|
531
|
+
mockExtractorInstance.validateRepository.mockResolvedValue(true);
|
|
532
|
+
mockExtractorInstance.getLatestCommitHash.mockResolvedValue("abc123");
|
|
533
|
+
mockExtractorInstance.getCommits.mockResolvedValue([]);
|
|
534
|
+
mockQdrant.addPoints.mockRejectedValueOnce(new Error("Marker failed"));
|
|
535
|
+
const stats = await indexer.indexHistory("/test/repo");
|
|
536
|
+
expect(stats.status).toBe("completed");
|
|
537
|
+
expect(consoleSpy).toHaveBeenCalled();
|
|
538
|
+
consoleSpy.mockRestore();
|
|
539
|
+
});
|
|
540
|
+
it("should use hybrid search for indexing when enabled", async () => {
|
|
541
|
+
const hybridConfig = {
|
|
542
|
+
...DEFAULT_GIT_CONFIG,
|
|
543
|
+
enableHybridSearch: true,
|
|
544
|
+
};
|
|
545
|
+
const hybridIndexer = new GitHistoryIndexer(mockQdrant, mockEmbeddings, hybridConfig);
|
|
546
|
+
const mockCommits = [
|
|
547
|
+
{
|
|
548
|
+
hash: "abc123",
|
|
549
|
+
shortHash: "abc12",
|
|
550
|
+
author: "Test",
|
|
551
|
+
authorEmail: "test@example.com",
|
|
552
|
+
date: new Date(),
|
|
553
|
+
subject: "test",
|
|
554
|
+
body: "",
|
|
555
|
+
files: [],
|
|
556
|
+
insertions: 0,
|
|
557
|
+
deletions: 0,
|
|
558
|
+
},
|
|
559
|
+
];
|
|
560
|
+
mockExtractorInstance.validateRepository.mockResolvedValue(true);
|
|
561
|
+
mockExtractorInstance.getLatestCommitHash.mockResolvedValue("abc123");
|
|
562
|
+
mockExtractorInstance.getCommits.mockResolvedValue(mockCommits);
|
|
563
|
+
mockExtractorInstance.getCommitDiff.mockResolvedValue("");
|
|
564
|
+
mockQdrant.getCollectionInfo.mockResolvedValue({
|
|
565
|
+
pointsCount: 0,
|
|
566
|
+
hybridEnabled: true,
|
|
567
|
+
});
|
|
568
|
+
await hybridIndexer.indexHistory("/test/repo");
|
|
569
|
+
expect(mockQdrant.createCollection).toHaveBeenCalledWith(expect.any(String), 768, "Cosine", true);
|
|
570
|
+
expect(mockQdrant.addPointsWithSparse).toHaveBeenCalled();
|
|
571
|
+
});
|
|
572
|
+
it("should handle chunks with no content after processing", async () => {
|
|
573
|
+
const mockCommits = [
|
|
574
|
+
{
|
|
575
|
+
hash: "abc123",
|
|
576
|
+
shortHash: "abc12",
|
|
577
|
+
author: "Test",
|
|
578
|
+
authorEmail: "test@example.com",
|
|
579
|
+
date: new Date(),
|
|
580
|
+
subject: "test",
|
|
581
|
+
body: "",
|
|
582
|
+
files: [],
|
|
583
|
+
insertions: 0,
|
|
584
|
+
deletions: 0,
|
|
585
|
+
},
|
|
586
|
+
];
|
|
587
|
+
mockExtractorInstance.validateRepository.mockResolvedValue(true);
|
|
588
|
+
mockExtractorInstance.getLatestCommitHash.mockResolvedValue("abc123");
|
|
589
|
+
mockExtractorInstance.getCommits.mockResolvedValue(mockCommits);
|
|
590
|
+
mockChunkerInstance.createChunks.mockReturnValue([]);
|
|
591
|
+
const stats = await indexer.indexHistory("/test/repo");
|
|
592
|
+
expect(stats.status).toBe("completed");
|
|
593
|
+
expect(stats.chunksCreated).toBe(0);
|
|
594
|
+
});
|
|
595
|
+
});
|
|
596
|
+
describe("indexNewCommits - additional coverage", () => {
|
|
597
|
+
it("should throw error when collection does not exist", async () => {
|
|
598
|
+
mockQdrant.collectionExists.mockResolvedValue(false);
|
|
599
|
+
await expect(indexer.indexNewCommits("/test/repo")).rejects.toThrow("Git history not indexed");
|
|
600
|
+
});
|
|
601
|
+
it("should throw error when lastCommitHash is null", async () => {
|
|
602
|
+
mockQdrant.collectionExists.mockResolvedValue(true);
|
|
603
|
+
mockSynchronizerInstance.initialize.mockResolvedValue(true);
|
|
604
|
+
mockSynchronizerInstance.getLastCommitHash.mockReturnValue(null);
|
|
605
|
+
await expect(indexer.indexNewCommits("/test/repo")).rejects.toThrow("Invalid snapshot: no last commit hash");
|
|
606
|
+
});
|
|
607
|
+
it("should call progress callback during incremental indexing", async () => {
|
|
608
|
+
mockQdrant.collectionExists.mockResolvedValue(true);
|
|
609
|
+
const mockNewCommits = [
|
|
610
|
+
{
|
|
611
|
+
hash: "new123",
|
|
612
|
+
shortHash: "new12",
|
|
613
|
+
author: "Test",
|
|
614
|
+
authorEmail: "test@example.com",
|
|
615
|
+
date: new Date(),
|
|
616
|
+
subject: "new commit",
|
|
617
|
+
body: "",
|
|
618
|
+
files: [],
|
|
619
|
+
insertions: 5,
|
|
620
|
+
deletions: 2,
|
|
621
|
+
},
|
|
622
|
+
];
|
|
623
|
+
mockExtractorInstance.getCommits.mockResolvedValue(mockNewCommits);
|
|
624
|
+
mockExtractorInstance.getCommitDiff.mockResolvedValue("");
|
|
625
|
+
mockExtractorInstance.getLatestCommitHash.mockResolvedValue("new123");
|
|
626
|
+
mockSynchronizerInstance.initialize.mockResolvedValue(true);
|
|
627
|
+
mockSynchronizerInstance.getLastCommitHash.mockReturnValue("old123");
|
|
628
|
+
mockSynchronizerInstance.getCommitsIndexed.mockReturnValue(50);
|
|
629
|
+
const progressCallback = vi.fn();
|
|
630
|
+
await indexer.indexNewCommits("/test/repo", progressCallback);
|
|
631
|
+
expect(progressCallback).toHaveBeenCalled();
|
|
632
|
+
expect(progressCallback).toHaveBeenCalledWith(expect.objectContaining({
|
|
633
|
+
phase: expect.stringMatching(/extracting|chunking|embedding|storing/),
|
|
634
|
+
}));
|
|
635
|
+
});
|
|
636
|
+
it("should use hybrid search for incremental indexing when enabled", async () => {
|
|
637
|
+
const hybridConfig = {
|
|
638
|
+
...DEFAULT_GIT_CONFIG,
|
|
639
|
+
enableHybridSearch: true,
|
|
640
|
+
};
|
|
641
|
+
const hybridIndexer = new GitHistoryIndexer(mockQdrant, mockEmbeddings, hybridConfig);
|
|
642
|
+
mockQdrant.collectionExists.mockResolvedValue(true);
|
|
643
|
+
const mockNewCommits = [
|
|
644
|
+
{
|
|
645
|
+
hash: "new123",
|
|
646
|
+
shortHash: "new12",
|
|
647
|
+
author: "Test",
|
|
648
|
+
authorEmail: "test@example.com",
|
|
649
|
+
date: new Date(),
|
|
650
|
+
subject: "new commit",
|
|
651
|
+
body: "",
|
|
652
|
+
files: [],
|
|
653
|
+
insertions: 5,
|
|
654
|
+
deletions: 2,
|
|
655
|
+
},
|
|
656
|
+
];
|
|
657
|
+
mockExtractorInstance.getCommits.mockResolvedValue(mockNewCommits);
|
|
658
|
+
mockExtractorInstance.getCommitDiff.mockResolvedValue("");
|
|
659
|
+
mockExtractorInstance.getLatestCommitHash.mockResolvedValue("new123");
|
|
660
|
+
mockSynchronizerInstance.initialize.mockResolvedValue(true);
|
|
661
|
+
mockSynchronizerInstance.getLastCommitHash.mockReturnValue("old123");
|
|
662
|
+
mockSynchronizerInstance.getCommitsIndexed.mockReturnValue(50);
|
|
663
|
+
await hybridIndexer.indexNewCommits("/test/repo");
|
|
664
|
+
expect(mockQdrant.addPointsWithSparse).toHaveBeenCalled();
|
|
665
|
+
});
|
|
666
|
+
});
|
|
667
|
+
describe("getIndexStatus - additional coverage", () => {
|
|
668
|
+
it("should return indexed for legacy collection without marker but with content", async () => {
|
|
669
|
+
mockQdrant.collectionExists.mockResolvedValue(true);
|
|
670
|
+
mockQdrant.getPoint.mockResolvedValue(null); // No marker
|
|
671
|
+
mockQdrant.getCollectionInfo.mockResolvedValue({
|
|
672
|
+
pointsCount: 50,
|
|
673
|
+
hybridEnabled: false,
|
|
674
|
+
});
|
|
675
|
+
mockSynchronizerInstance.initialize.mockResolvedValue(true);
|
|
676
|
+
mockSynchronizerInstance.getCommitsIndexed.mockReturnValue(25);
|
|
677
|
+
mockSynchronizerInstance.getLastCommitHash.mockReturnValue("abc123");
|
|
678
|
+
const status = await indexer.getIndexStatus("/test/repo");
|
|
679
|
+
expect(status.status).toBe("indexed");
|
|
680
|
+
expect(status.isIndexed).toBe(true);
|
|
681
|
+
expect(status.chunksCount).toBe(50);
|
|
682
|
+
expect(status.commitsCount).toBe(25);
|
|
683
|
+
expect(status.lastCommitHash).toBe("abc123");
|
|
684
|
+
});
|
|
685
|
+
it("should return not_indexed for empty legacy collection", async () => {
|
|
686
|
+
mockQdrant.collectionExists.mockResolvedValue(true);
|
|
687
|
+
mockQdrant.getPoint.mockResolvedValue(null); // No marker
|
|
688
|
+
mockQdrant.getCollectionInfo.mockResolvedValue({
|
|
689
|
+
pointsCount: 0,
|
|
690
|
+
hybridEnabled: false,
|
|
691
|
+
});
|
|
692
|
+
const status = await indexer.getIndexStatus("/test/repo");
|
|
693
|
+
expect(status.status).toBe("not_indexed");
|
|
694
|
+
expect(status.isIndexed).toBe(false);
|
|
695
|
+
expect(status.chunksCount).toBe(0);
|
|
696
|
+
});
|
|
697
|
+
it("should include snapshot data when available for indexed status", async () => {
|
|
698
|
+
mockQdrant.collectionExists.mockResolvedValue(true);
|
|
699
|
+
mockQdrant.getPoint.mockResolvedValue({
|
|
700
|
+
payload: {
|
|
701
|
+
indexingComplete: true,
|
|
702
|
+
},
|
|
703
|
+
});
|
|
704
|
+
mockQdrant.getCollectionInfo.mockResolvedValue({
|
|
705
|
+
pointsCount: 101,
|
|
706
|
+
hybridEnabled: false,
|
|
707
|
+
});
|
|
708
|
+
mockSynchronizerInstance.initialize.mockResolvedValue(true);
|
|
709
|
+
mockSynchronizerInstance.getCommitsIndexed.mockReturnValue(100);
|
|
710
|
+
mockSynchronizerInstance.getLastCommitHash.mockReturnValue("latest123");
|
|
711
|
+
mockSynchronizerInstance.getLastIndexedAt.mockReturnValue(new Date("2024-01-15T10:00:00Z"));
|
|
712
|
+
const status = await indexer.getIndexStatus("/test/repo");
|
|
713
|
+
expect(status.isIndexed).toBe(true);
|
|
714
|
+
expect(status.commitsCount).toBe(100);
|
|
715
|
+
expect(status.lastCommitHash).toBe("latest123");
|
|
716
|
+
expect(status.lastIndexedAt).toEqual(new Date("2024-01-15T10:00:00Z"));
|
|
717
|
+
});
|
|
718
|
+
it("should fall back to marker completedAt when no snapshot", async () => {
|
|
719
|
+
mockQdrant.collectionExists.mockResolvedValue(true);
|
|
720
|
+
mockQdrant.getPoint.mockResolvedValue({
|
|
721
|
+
payload: {
|
|
722
|
+
indexingComplete: true,
|
|
723
|
+
completedAt: "2024-01-20T15:30:00Z",
|
|
724
|
+
},
|
|
725
|
+
});
|
|
726
|
+
mockQdrant.getCollectionInfo.mockResolvedValue({
|
|
727
|
+
pointsCount: 51,
|
|
728
|
+
hybridEnabled: false,
|
|
729
|
+
});
|
|
730
|
+
mockSynchronizerInstance.initialize.mockResolvedValue(false);
|
|
731
|
+
const status = await indexer.getIndexStatus("/test/repo");
|
|
732
|
+
expect(status.isIndexed).toBe(true);
|
|
733
|
+
expect(status.lastIndexedAt).toEqual(new Date("2024-01-20T15:30:00Z"));
|
|
734
|
+
expect(status.commitsCount).toBeUndefined();
|
|
735
|
+
});
|
|
736
|
+
});
|
|
737
|
+
describe("searchHistory - date range validation", () => {
|
|
738
|
+
beforeEach(() => {
|
|
739
|
+
mockQdrant.collectionExists.mockResolvedValue(true);
|
|
740
|
+
mockQdrant.search.mockResolvedValue([]);
|
|
741
|
+
});
|
|
742
|
+
it("should throw error when dateFrom is after dateTo", async () => {
|
|
743
|
+
await expect(indexer.searchHistory("/test/repo", "query", {
|
|
744
|
+
dateFrom: "2024-12-31",
|
|
745
|
+
dateTo: "2024-01-01",
|
|
746
|
+
})).rejects.toThrow("Invalid date range: dateFrom (2024-12-31) must be before dateTo (2024-01-01)");
|
|
747
|
+
});
|
|
748
|
+
it("should allow valid date range", async () => {
|
|
749
|
+
await indexer.searchHistory("/test/repo", "query", {
|
|
750
|
+
dateFrom: "2024-01-01",
|
|
751
|
+
dateTo: "2024-12-31",
|
|
752
|
+
});
|
|
753
|
+
expect(mockQdrant.search).toHaveBeenCalled();
|
|
754
|
+
});
|
|
755
|
+
it("should allow only dateFrom without dateTo", async () => {
|
|
756
|
+
await indexer.searchHistory("/test/repo", "query", {
|
|
757
|
+
dateFrom: "2024-01-01",
|
|
758
|
+
});
|
|
759
|
+
expect(mockQdrant.search).toHaveBeenCalled();
|
|
760
|
+
});
|
|
761
|
+
it("should allow only dateTo without dateFrom", async () => {
|
|
762
|
+
await indexer.searchHistory("/test/repo", "query", {
|
|
763
|
+
dateTo: "2024-12-31",
|
|
764
|
+
});
|
|
765
|
+
expect(mockQdrant.search).toHaveBeenCalled();
|
|
766
|
+
});
|
|
767
|
+
});
|
|
768
|
+
describe("indexHistory - batch retry logic", () => {
|
|
769
|
+
it("should retry failed batches with exponential backoff", async () => {
|
|
770
|
+
const mockCommits = [
|
|
771
|
+
{
|
|
772
|
+
hash: "abc123",
|
|
773
|
+
shortHash: "abc12",
|
|
774
|
+
author: "Test",
|
|
775
|
+
authorEmail: "test@example.com",
|
|
776
|
+
date: new Date(),
|
|
777
|
+
subject: "test",
|
|
778
|
+
body: "",
|
|
779
|
+
files: [],
|
|
780
|
+
insertions: 0,
|
|
781
|
+
deletions: 0,
|
|
782
|
+
},
|
|
783
|
+
];
|
|
784
|
+
mockExtractorInstance.validateRepository.mockResolvedValue(true);
|
|
785
|
+
mockExtractorInstance.getLatestCommitHash.mockResolvedValue("abc123");
|
|
786
|
+
mockExtractorInstance.getCommits.mockResolvedValue(mockCommits);
|
|
787
|
+
mockExtractorInstance.getCommitDiff.mockResolvedValue("diff content");
|
|
788
|
+
mockChunkerInstance.createChunks.mockReturnValue([
|
|
789
|
+
{
|
|
790
|
+
content: "test content",
|
|
791
|
+
metadata: {
|
|
792
|
+
commitHash: "abc123",
|
|
793
|
+
shortHash: "abc12",
|
|
794
|
+
author: "Test",
|
|
795
|
+
authorEmail: "test@example.com",
|
|
796
|
+
date: new Date().toISOString(),
|
|
797
|
+
subject: "test",
|
|
798
|
+
commitType: "feat",
|
|
799
|
+
files: [],
|
|
800
|
+
insertions: 0,
|
|
801
|
+
deletions: 0,
|
|
802
|
+
repoPath: "/test/repo",
|
|
803
|
+
},
|
|
804
|
+
},
|
|
805
|
+
]);
|
|
806
|
+
mockChunkerInstance.generateChunkId.mockReturnValue("chunk-1");
|
|
807
|
+
// Fail first two attempts, succeed on third
|
|
808
|
+
mockEmbeddings.embedBatch
|
|
809
|
+
.mockRejectedValueOnce(new Error("Rate limit"))
|
|
810
|
+
.mockRejectedValueOnce(new Error("Rate limit"))
|
|
811
|
+
.mockResolvedValueOnce([{ embedding: [0.1, 0.2, 0.3] }]);
|
|
812
|
+
mockQdrant.collectionExists.mockResolvedValue(false);
|
|
813
|
+
mockQdrant.getCollectionInfo.mockResolvedValue({ hybridEnabled: false });
|
|
814
|
+
const stats = await indexer.indexHistory("/test/repo");
|
|
815
|
+
expect(stats.status).toBe("completed");
|
|
816
|
+
expect(mockEmbeddings.embedBatch).toHaveBeenCalledTimes(3);
|
|
817
|
+
});
|
|
818
|
+
it("should mark as partial after exhausting retries", async () => {
|
|
819
|
+
const mockCommits = [
|
|
820
|
+
{
|
|
821
|
+
hash: "abc123",
|
|
822
|
+
shortHash: "abc12",
|
|
823
|
+
author: "Test",
|
|
824
|
+
authorEmail: "test@example.com",
|
|
825
|
+
date: new Date(),
|
|
826
|
+
subject: "test",
|
|
827
|
+
body: "",
|
|
828
|
+
files: [],
|
|
829
|
+
insertions: 0,
|
|
830
|
+
deletions: 0,
|
|
831
|
+
},
|
|
832
|
+
];
|
|
833
|
+
mockExtractorInstance.validateRepository.mockResolvedValue(true);
|
|
834
|
+
mockExtractorInstance.getLatestCommitHash.mockResolvedValue("abc123");
|
|
835
|
+
mockExtractorInstance.getCommits.mockResolvedValue(mockCommits);
|
|
836
|
+
mockExtractorInstance.getCommitDiff.mockResolvedValue("diff content");
|
|
837
|
+
mockChunkerInstance.createChunks.mockReturnValue([
|
|
838
|
+
{
|
|
839
|
+
content: "test content",
|
|
840
|
+
metadata: {
|
|
841
|
+
commitHash: "abc123",
|
|
842
|
+
shortHash: "abc12",
|
|
843
|
+
author: "Test",
|
|
844
|
+
authorEmail: "test@example.com",
|
|
845
|
+
date: new Date().toISOString(),
|
|
846
|
+
subject: "test",
|
|
847
|
+
commitType: "feat",
|
|
848
|
+
files: [],
|
|
849
|
+
insertions: 0,
|
|
850
|
+
deletions: 0,
|
|
851
|
+
repoPath: "/test/repo",
|
|
852
|
+
},
|
|
853
|
+
},
|
|
854
|
+
]);
|
|
855
|
+
mockChunkerInstance.generateChunkId.mockReturnValue("chunk-1");
|
|
856
|
+
// All retries fail
|
|
857
|
+
mockEmbeddings.embedBatch.mockRejectedValue(new Error("Persistent error"));
|
|
858
|
+
mockQdrant.collectionExists.mockResolvedValue(false);
|
|
859
|
+
mockQdrant.getCollectionInfo.mockResolvedValue({ hybridEnabled: false });
|
|
860
|
+
const stats = await indexer.indexHistory("/test/repo");
|
|
861
|
+
expect(stats.status).toBe("partial");
|
|
862
|
+
expect(stats.errors).toBeDefined();
|
|
863
|
+
expect(stats.errors?.some((e) => e.includes("after 3 attempts"))).toBe(true);
|
|
864
|
+
});
|
|
865
|
+
});
|
|
866
|
+
});
|
|
867
|
+
//# sourceMappingURL=indexer.test.js.map
|