codecritique 1.0.0 → 1.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +82 -114
- package/package.json +10 -9
- package/src/content-retrieval.test.js +775 -0
- package/src/custom-documents.test.js +440 -0
- package/src/feedback-loader.test.js +529 -0
- package/src/llm.test.js +256 -0
- package/src/project-analyzer.test.js +747 -0
- package/src/rag-analyzer.js +12 -0
- package/src/rag-analyzer.test.js +1109 -0
- package/src/rag-review.test.js +317 -0
- package/src/setupTests.js +131 -0
- package/src/zero-shot-classifier-open.test.js +278 -0
- package/src/embeddings/cache-manager.js +0 -364
- package/src/embeddings/constants.js +0 -40
- package/src/embeddings/database.js +0 -921
- package/src/embeddings/errors.js +0 -208
- package/src/embeddings/factory.js +0 -447
- package/src/embeddings/file-processor.js +0 -851
- package/src/embeddings/model-manager.js +0 -337
- package/src/embeddings/similarity-calculator.js +0 -97
- package/src/embeddings/types.js +0 -113
- package/src/pr-history/analyzer.js +0 -579
- package/src/pr-history/bot-detector.js +0 -123
- package/src/pr-history/cli-utils.js +0 -204
- package/src/pr-history/comment-processor.js +0 -549
- package/src/pr-history/database.js +0 -819
- package/src/pr-history/github-client.js +0 -629
- package/src/technology-keywords.json +0 -753
- package/src/utils/command.js +0 -48
- package/src/utils/constants.js +0 -263
- package/src/utils/context-inference.js +0 -364
- package/src/utils/document-detection.js +0 -105
- package/src/utils/file-validation.js +0 -271
- package/src/utils/git.js +0 -232
- package/src/utils/language-detection.js +0 -170
- package/src/utils/logging.js +0 -24
- package/src/utils/markdown.js +0 -132
- package/src/utils/mobilebert-tokenizer.js +0 -141
- package/src/utils/pr-chunking.js +0 -276
- package/src/utils/string-utils.js +0 -28
|
@@ -0,0 +1,440 @@
|
|
|
1
|
+
import { CustomDocumentProcessor } from './custom-documents.js';
|
|
2
|
+
|
|
3
|
+
vi.mock('./embeddings/model-manager.js', () => ({
|
|
4
|
+
ModelManager: class {
|
|
5
|
+
calculateQueryEmbedding = vi.fn().mockResolvedValue(createMockEmbedding());
|
|
6
|
+
calculateEmbedding = vi.fn().mockResolvedValue(createMockEmbedding());
|
|
7
|
+
calculateEmbeddingBatch = vi.fn().mockResolvedValue([createMockEmbedding(), createMockEmbedding()]);
|
|
8
|
+
},
|
|
9
|
+
}));
|
|
10
|
+
|
|
11
|
+
vi.mock('./embeddings/cache-manager.js', () => ({
|
|
12
|
+
CacheManager: class {
|
|
13
|
+
storeCustomDocuments = vi.fn().mockResolvedValue(undefined);
|
|
14
|
+
getCustomDocuments = vi.fn().mockResolvedValue([]);
|
|
15
|
+
clearCustomDocuments = vi.fn().mockResolvedValue(undefined);
|
|
16
|
+
},
|
|
17
|
+
}));
|
|
18
|
+
|
|
19
|
+
describe('CustomDocumentProcessor', () => {
|
|
20
|
+
let processor;
|
|
21
|
+
let mockModelManager;
|
|
22
|
+
let mockCacheManager;
|
|
23
|
+
|
|
24
|
+
beforeEach(() => {
|
|
25
|
+
mockConsoleSelective('log', 'error');
|
|
26
|
+
|
|
27
|
+
mockModelManager = {
|
|
28
|
+
calculateQueryEmbedding: vi.fn().mockResolvedValue(createMockEmbedding()),
|
|
29
|
+
calculateEmbedding: vi.fn().mockResolvedValue(createMockEmbedding()),
|
|
30
|
+
calculateEmbeddingBatch: vi.fn().mockResolvedValue([createMockEmbedding()]),
|
|
31
|
+
};
|
|
32
|
+
|
|
33
|
+
mockCacheManager = {
|
|
34
|
+
storeCustomDocuments: vi.fn().mockResolvedValue(undefined),
|
|
35
|
+
getCustomDocuments: vi.fn().mockResolvedValue([]),
|
|
36
|
+
clearCustomDocuments: vi.fn().mockResolvedValue(undefined),
|
|
37
|
+
};
|
|
38
|
+
|
|
39
|
+
processor = new CustomDocumentProcessor({
|
|
40
|
+
modelManager: mockModelManager,
|
|
41
|
+
cacheManager: mockCacheManager,
|
|
42
|
+
});
|
|
43
|
+
});
|
|
44
|
+
|
|
45
|
+
describe('constructor', () => {
|
|
46
|
+
it('should initialize with default options', () => {
|
|
47
|
+
const p = new CustomDocumentProcessor();
|
|
48
|
+
expect(p.customDocumentChunks).toBeInstanceOf(Map);
|
|
49
|
+
expect(p.h1EmbeddingCache).toBeInstanceOf(Map);
|
|
50
|
+
expect(p.performanceMetrics.documentsProcessed).toBe(0);
|
|
51
|
+
});
|
|
52
|
+
|
|
53
|
+
it('should accept custom dependencies', () => {
|
|
54
|
+
const p = new CustomDocumentProcessor({
|
|
55
|
+
modelManager: mockModelManager,
|
|
56
|
+
cacheManager: mockCacheManager,
|
|
57
|
+
});
|
|
58
|
+
expect(p.modelManager).toBe(mockModelManager);
|
|
59
|
+
expect(p.cacheManager).toBe(mockCacheManager);
|
|
60
|
+
});
|
|
61
|
+
});
|
|
62
|
+
|
|
63
|
+
describe('chunkDocument', () => {
|
|
64
|
+
it('should chunk document content by paragraphs', () => {
|
|
65
|
+
const doc = {
|
|
66
|
+
title: 'Test Document',
|
|
67
|
+
content: 'First paragraph.\n\nSecond paragraph.\n\nThird paragraph.',
|
|
68
|
+
};
|
|
69
|
+
|
|
70
|
+
const chunks = processor.chunkDocument(doc);
|
|
71
|
+
|
|
72
|
+
expect(chunks.length).toBeGreaterThanOrEqual(1);
|
|
73
|
+
expect(chunks[0].document_title).toBe('Test Document');
|
|
74
|
+
});
|
|
75
|
+
|
|
76
|
+
it('should extract document title from markdown header', () => {
|
|
77
|
+
const doc = {
|
|
78
|
+
title: 'instruction:./README.md',
|
|
79
|
+
content: '# Real Document Title\n\nSome content here.',
|
|
80
|
+
};
|
|
81
|
+
|
|
82
|
+
const chunks = processor.chunkDocument(doc);
|
|
83
|
+
|
|
84
|
+
expect(chunks[0].document_title).toBe('Real Document Title');
|
|
85
|
+
});
|
|
86
|
+
|
|
87
|
+
it('should extract title from filename if no header', () => {
|
|
88
|
+
const doc = {
|
|
89
|
+
title: 'instruction:./engineering_guidelines.md',
|
|
90
|
+
content: 'Content without header.',
|
|
91
|
+
};
|
|
92
|
+
|
|
93
|
+
const chunks = processor.chunkDocument(doc);
|
|
94
|
+
|
|
95
|
+
expect(chunks[0].document_title).toBe('Engineering Guidelines');
|
|
96
|
+
});
|
|
97
|
+
|
|
98
|
+
it('should split large content into multiple chunks', () => {
|
|
99
|
+
const largeContent = Array.from({ length: 50 }, (_, i) => `Paragraph ${i}: ${'x'.repeat(100)}`).join('\n\n');
|
|
100
|
+
|
|
101
|
+
const doc = {
|
|
102
|
+
title: 'Large Document',
|
|
103
|
+
content: largeContent,
|
|
104
|
+
};
|
|
105
|
+
|
|
106
|
+
const chunks = processor.chunkDocument(doc);
|
|
107
|
+
|
|
108
|
+
expect(chunks.length).toBeGreaterThan(1);
|
|
109
|
+
});
|
|
110
|
+
|
|
111
|
+
it('should set chunk metadata correctly', () => {
|
|
112
|
+
const doc = {
|
|
113
|
+
title: 'Test Doc',
|
|
114
|
+
content: 'First paragraph.\n\nSecond paragraph.',
|
|
115
|
+
};
|
|
116
|
+
|
|
117
|
+
const chunks = processor.chunkDocument(doc);
|
|
118
|
+
|
|
119
|
+
expect(chunks[0].metadata.section_start).toBe(true);
|
|
120
|
+
expect(chunks[0].metadata.total_chunks).toBe(chunks.length);
|
|
121
|
+
expect(chunks[0].metadata.chunk_hash).toBeDefined();
|
|
122
|
+
});
|
|
123
|
+
|
|
124
|
+
it('should generate unique chunk IDs', () => {
|
|
125
|
+
const doc = {
|
|
126
|
+
title: 'Test',
|
|
127
|
+
content: 'Para 1.\n\nPara 2.\n\nPara 3.',
|
|
128
|
+
};
|
|
129
|
+
|
|
130
|
+
const chunks = processor.chunkDocument(doc);
|
|
131
|
+
const ids = chunks.map((c) => c.id);
|
|
132
|
+
const uniqueIds = new Set(ids);
|
|
133
|
+
|
|
134
|
+
expect(uniqueIds.size).toBe(ids.length);
|
|
135
|
+
});
|
|
136
|
+
|
|
137
|
+
it('should throw error for document without content', () => {
|
|
138
|
+
expect(() => processor.chunkDocument({ title: 'Empty' })).toThrow();
|
|
139
|
+
expect(() => processor.chunkDocument({ title: 'Empty', content: '' })).toThrow();
|
|
140
|
+
});
|
|
141
|
+
|
|
142
|
+
it('should update performance metrics', () => {
|
|
143
|
+
const doc = {
|
|
144
|
+
title: 'Test',
|
|
145
|
+
content: 'Some content here.',
|
|
146
|
+
};
|
|
147
|
+
|
|
148
|
+
processor.chunkDocument(doc);
|
|
149
|
+
|
|
150
|
+
expect(processor.performanceMetrics.chunksGenerated).toBeGreaterThan(0);
|
|
151
|
+
});
|
|
152
|
+
});
|
|
153
|
+
|
|
154
|
+
describe('processDocumentsInMemory', () => {
|
|
155
|
+
it('should return empty array for empty input', async () => {
|
|
156
|
+
const result = await processor.processDocumentsInMemory([], '/project');
|
|
157
|
+
expect(result).toEqual([]);
|
|
158
|
+
});
|
|
159
|
+
|
|
160
|
+
it('should return empty array for null input', async () => {
|
|
161
|
+
const result = await processor.processDocumentsInMemory(null, '/project');
|
|
162
|
+
expect(result).toEqual([]);
|
|
163
|
+
});
|
|
164
|
+
|
|
165
|
+
it('should process documents and generate embeddings', async () => {
|
|
166
|
+
mockModelManager.calculateEmbeddingBatch.mockResolvedValue([createMockEmbedding()]);
|
|
167
|
+
|
|
168
|
+
const docs = [{ title: 'Doc 1', content: 'Content for doc 1.' }];
|
|
169
|
+
|
|
170
|
+
const result = await processor.processDocumentsInMemory(docs, '/project');
|
|
171
|
+
|
|
172
|
+
expect(result.length).toBeGreaterThan(0);
|
|
173
|
+
expect(result[0].embedding).toBeDefined();
|
|
174
|
+
expect(result[0].type).toBe('custom-document-chunk');
|
|
175
|
+
});
|
|
176
|
+
|
|
177
|
+
it('should store chunks in memory by project path', async () => {
|
|
178
|
+
mockModelManager.calculateEmbeddingBatch.mockResolvedValue([createMockEmbedding()]);
|
|
179
|
+
|
|
180
|
+
const docs = [{ title: 'Doc', content: 'Content.' }];
|
|
181
|
+
await processor.processDocumentsInMemory(docs, '/project');
|
|
182
|
+
|
|
183
|
+
const storedChunks = processor.customDocumentChunks.get('/project');
|
|
184
|
+
expect(storedChunks).toBeDefined();
|
|
185
|
+
});
|
|
186
|
+
|
|
187
|
+
it('should cache chunks in CacheManager', async () => {
|
|
188
|
+
mockModelManager.calculateEmbeddingBatch.mockResolvedValue([createMockEmbedding()]);
|
|
189
|
+
|
|
190
|
+
const docs = [{ title: 'Doc', content: 'Content.' }];
|
|
191
|
+
await processor.processDocumentsInMemory(docs, '/project');
|
|
192
|
+
|
|
193
|
+
expect(mockCacheManager.storeCustomDocuments).toHaveBeenCalled();
|
|
194
|
+
});
|
|
195
|
+
|
|
196
|
+
it('should fall back to individual processing on batch failure', async () => {
|
|
197
|
+
mockModelManager.calculateEmbeddingBatch.mockRejectedValue(new Error('Batch failed'));
|
|
198
|
+
mockModelManager.calculateEmbedding.mockResolvedValue(createMockEmbedding());
|
|
199
|
+
|
|
200
|
+
const docs = [{ title: 'Doc', content: 'Content.' }];
|
|
201
|
+
const result = await processor.processDocumentsInMemory(docs, '/project');
|
|
202
|
+
|
|
203
|
+
expect(mockModelManager.calculateEmbedding).toHaveBeenCalled();
|
|
204
|
+
expect(result.length).toBeGreaterThan(0);
|
|
205
|
+
});
|
|
206
|
+
|
|
207
|
+
it('should update performance metrics', async () => {
|
|
208
|
+
mockModelManager.calculateEmbeddingBatch.mockResolvedValue([createMockEmbedding()]);
|
|
209
|
+
|
|
210
|
+
const docs = [{ title: 'Doc', content: 'Content.' }];
|
|
211
|
+
await processor.processDocumentsInMemory(docs, '/project');
|
|
212
|
+
|
|
213
|
+
expect(processor.performanceMetrics.documentsProcessed).toBe(1);
|
|
214
|
+
expect(processor.performanceMetrics.embeddingsCalculated).toBeGreaterThan(0);
|
|
215
|
+
});
|
|
216
|
+
});
|
|
217
|
+
|
|
218
|
+
describe('findRelevantChunks', () => {
|
|
219
|
+
let mockChunks;
|
|
220
|
+
|
|
221
|
+
beforeEach(() => {
|
|
222
|
+
mockChunks = [
|
|
223
|
+
{
|
|
224
|
+
id: 'chunk1',
|
|
225
|
+
content: 'Content about React components',
|
|
226
|
+
document_title: 'React Guide',
|
|
227
|
+
embedding: createMockEmbedding(),
|
|
228
|
+
},
|
|
229
|
+
{
|
|
230
|
+
id: 'chunk2',
|
|
231
|
+
content: 'Content about testing',
|
|
232
|
+
document_title: 'Testing Guide',
|
|
233
|
+
embedding: createMockEmbedding(),
|
|
234
|
+
},
|
|
235
|
+
];
|
|
236
|
+
});
|
|
237
|
+
|
|
238
|
+
it('should throw error for empty query', async () => {
|
|
239
|
+
await expect(processor.findRelevantChunks('', mockChunks)).rejects.toThrow();
|
|
240
|
+
});
|
|
241
|
+
|
|
242
|
+
it('should return empty array for no chunks', async () => {
|
|
243
|
+
const result = await processor.findRelevantChunks('query', []);
|
|
244
|
+
expect(result).toEqual([]);
|
|
245
|
+
});
|
|
246
|
+
|
|
247
|
+
it('should calculate similarity and return results', async () => {
|
|
248
|
+
const result = await processor.findRelevantChunks('React', mockChunks, {
|
|
249
|
+
similarityThreshold: 0,
|
|
250
|
+
});
|
|
251
|
+
|
|
252
|
+
expect(result.length).toBeGreaterThan(0);
|
|
253
|
+
expect(result[0].similarity).toBeDefined();
|
|
254
|
+
});
|
|
255
|
+
|
|
256
|
+
it('should filter by similarity threshold', async () => {
|
|
257
|
+
const result = await processor.findRelevantChunks('query', mockChunks, {
|
|
258
|
+
similarityThreshold: 0.99,
|
|
259
|
+
});
|
|
260
|
+
|
|
261
|
+
// High threshold should filter out most results
|
|
262
|
+
expect(result.length).toBeLessThanOrEqual(mockChunks.length);
|
|
263
|
+
});
|
|
264
|
+
|
|
265
|
+
it('should limit results', async () => {
|
|
266
|
+
const result = await processor.findRelevantChunks('query', mockChunks, {
|
|
267
|
+
limit: 1,
|
|
268
|
+
similarityThreshold: 0,
|
|
269
|
+
});
|
|
270
|
+
|
|
271
|
+
expect(result.length).toBeLessThanOrEqual(1);
|
|
272
|
+
});
|
|
273
|
+
|
|
274
|
+
it('should use precomputed query embedding', async () => {
|
|
275
|
+
const precomputed = createMockEmbedding();
|
|
276
|
+
|
|
277
|
+
await processor.findRelevantChunks('query', mockChunks, {
|
|
278
|
+
precomputedQueryEmbedding: precomputed,
|
|
279
|
+
similarityThreshold: 0,
|
|
280
|
+
});
|
|
281
|
+
|
|
282
|
+
expect(mockModelManager.calculateQueryEmbedding).not.toHaveBeenCalled();
|
|
283
|
+
});
|
|
284
|
+
|
|
285
|
+
it('should apply reranking when enabled', async () => {
|
|
286
|
+
const result = await processor.findRelevantChunks('React components', mockChunks, {
|
|
287
|
+
useReranking: true,
|
|
288
|
+
queryContextForReranking: {
|
|
289
|
+
area: 'Frontend',
|
|
290
|
+
dominantTech: ['React'],
|
|
291
|
+
keywords: ['component'],
|
|
292
|
+
},
|
|
293
|
+
similarityThreshold: 0,
|
|
294
|
+
});
|
|
295
|
+
|
|
296
|
+
expect(result.some((r) => r.reranked)).toBe(true);
|
|
297
|
+
});
|
|
298
|
+
|
|
299
|
+
it('should sort results by similarity', async () => {
|
|
300
|
+
const result = await processor.findRelevantChunks('query', mockChunks, {
|
|
301
|
+
similarityThreshold: 0,
|
|
302
|
+
});
|
|
303
|
+
|
|
304
|
+
for (let i = 1; i < result.length; i++) {
|
|
305
|
+
expect(result[i - 1].similarity).toBeGreaterThanOrEqual(result[i].similarity);
|
|
306
|
+
}
|
|
307
|
+
});
|
|
308
|
+
});
|
|
309
|
+
|
|
310
|
+
describe('getExistingChunks', () => {
|
|
311
|
+
it('should return chunks from memory', async () => {
|
|
312
|
+
const chunks = [{ id: 'chunk1', content: 'test' }];
|
|
313
|
+
processor.customDocumentChunks.set('/project', chunks);
|
|
314
|
+
|
|
315
|
+
const result = await processor.getExistingChunks('/project');
|
|
316
|
+
|
|
317
|
+
expect(result).toEqual(chunks);
|
|
318
|
+
});
|
|
319
|
+
|
|
320
|
+
it('should return chunks from cache if not in memory', async () => {
|
|
321
|
+
const cachedChunks = [{ id: 'cached', content: 'from cache' }];
|
|
322
|
+
mockCacheManager.getCustomDocuments.mockResolvedValue(cachedChunks);
|
|
323
|
+
|
|
324
|
+
const result = await processor.getExistingChunks('/project');
|
|
325
|
+
|
|
326
|
+
expect(result).toEqual(cachedChunks);
|
|
327
|
+
});
|
|
328
|
+
|
|
329
|
+
it('should restore cached chunks to memory', async () => {
|
|
330
|
+
const cachedChunks = [{ id: 'cached', content: 'from cache' }];
|
|
331
|
+
mockCacheManager.getCustomDocuments.mockResolvedValue(cachedChunks);
|
|
332
|
+
|
|
333
|
+
await processor.getExistingChunks('/project');
|
|
334
|
+
|
|
335
|
+
expect(processor.customDocumentChunks.get('/project')).toEqual(cachedChunks);
|
|
336
|
+
});
|
|
337
|
+
|
|
338
|
+
it('should return empty array when no chunks exist', async () => {
|
|
339
|
+
mockCacheManager.getCustomDocuments.mockResolvedValue([]);
|
|
340
|
+
|
|
341
|
+
const result = await processor.getExistingChunks('/project');
|
|
342
|
+
|
|
343
|
+
expect(result).toEqual([]);
|
|
344
|
+
});
|
|
345
|
+
});
|
|
346
|
+
|
|
347
|
+
describe('clearProjectChunks', () => {
|
|
348
|
+
it('should clear chunks from memory', async () => {
|
|
349
|
+
processor.customDocumentChunks.set('/project', [{ id: 'chunk' }]);
|
|
350
|
+
|
|
351
|
+
await processor.clearProjectChunks('/project');
|
|
352
|
+
|
|
353
|
+
expect(processor.customDocumentChunks.has('/project')).toBe(false);
|
|
354
|
+
});
|
|
355
|
+
|
|
356
|
+
it('should clear chunks from cache', async () => {
|
|
357
|
+
await processor.clearProjectChunks('/project');
|
|
358
|
+
|
|
359
|
+
expect(mockCacheManager.clearCustomDocuments).toHaveBeenCalled();
|
|
360
|
+
});
|
|
361
|
+
});
|
|
362
|
+
|
|
363
|
+
describe('getProjectsWithCustomDocuments', () => {
|
|
364
|
+
it('should return list of project paths', () => {
|
|
365
|
+
processor.customDocumentChunks.set('/project1', []);
|
|
366
|
+
processor.customDocumentChunks.set('/project2', []);
|
|
367
|
+
|
|
368
|
+
const projects = processor.getProjectsWithCustomDocuments();
|
|
369
|
+
|
|
370
|
+
expect(projects).toContain('/project1');
|
|
371
|
+
expect(projects).toContain('/project2');
|
|
372
|
+
});
|
|
373
|
+
|
|
374
|
+
it('should return empty array when no projects', () => {
|
|
375
|
+
const projects = processor.getProjectsWithCustomDocuments();
|
|
376
|
+
expect(projects).toEqual([]);
|
|
377
|
+
});
|
|
378
|
+
});
|
|
379
|
+
|
|
380
|
+
describe('getPerformanceMetrics', () => {
|
|
381
|
+
it('should return performance metrics', () => {
|
|
382
|
+
const metrics = processor.getPerformanceMetrics();
|
|
383
|
+
|
|
384
|
+
expect(metrics).toHaveProperty('documentsProcessed');
|
|
385
|
+
expect(metrics).toHaveProperty('chunksGenerated');
|
|
386
|
+
expect(metrics).toHaveProperty('embeddingsCalculated');
|
|
387
|
+
expect(metrics).toHaveProperty('batchSuccessRate');
|
|
388
|
+
expect(metrics).toHaveProperty('cacheSize');
|
|
389
|
+
expect(metrics).toHaveProperty('activeProjects');
|
|
390
|
+
});
|
|
391
|
+
|
|
392
|
+
it('should calculate averages correctly', () => {
|
|
393
|
+
processor.performanceMetrics.documentsProcessed = 10;
|
|
394
|
+
processor.performanceMetrics.processingTime = 1000;
|
|
395
|
+
|
|
396
|
+
const metrics = processor.getPerformanceMetrics();
|
|
397
|
+
|
|
398
|
+
expect(metrics.averageProcessingTime).toBe(100);
|
|
399
|
+
});
|
|
400
|
+
});
|
|
401
|
+
|
|
402
|
+
describe('clearCaches', () => {
|
|
403
|
+
it('should clear all caches', () => {
|
|
404
|
+
processor.h1EmbeddingCache.set('key', 'value');
|
|
405
|
+
processor.customDocumentChunks.set('/project', []);
|
|
406
|
+
|
|
407
|
+
processor.clearCaches();
|
|
408
|
+
|
|
409
|
+
expect(processor.h1EmbeddingCache.size).toBe(0);
|
|
410
|
+
expect(processor.customDocumentChunks.size).toBe(0);
|
|
411
|
+
});
|
|
412
|
+
});
|
|
413
|
+
|
|
414
|
+
describe('cleanup', () => {
|
|
415
|
+
it('should clear caches and reset metrics', async () => {
|
|
416
|
+
processor.h1EmbeddingCache.set('key', 'value');
|
|
417
|
+
processor.performanceMetrics.documentsProcessed = 10;
|
|
418
|
+
|
|
419
|
+
await processor.cleanup();
|
|
420
|
+
|
|
421
|
+
expect(processor.h1EmbeddingCache.size).toBe(0);
|
|
422
|
+
expect(processor.performanceMetrics.documentsProcessed).toBe(0);
|
|
423
|
+
});
|
|
424
|
+
|
|
425
|
+
it('should prevent duplicate cleanup calls', async () => {
|
|
426
|
+
processor.cleaningUp = true;
|
|
427
|
+
processor.h1EmbeddingCache.set('key', 'value');
|
|
428
|
+
|
|
429
|
+
await processor.cleanup();
|
|
430
|
+
|
|
431
|
+
// Should not clear when already cleaning up
|
|
432
|
+
expect(processor.h1EmbeddingCache.size).toBe(1);
|
|
433
|
+
});
|
|
434
|
+
|
|
435
|
+
it('should reset cleaningUp flag', async () => {
|
|
436
|
+
await processor.cleanup();
|
|
437
|
+
expect(processor.cleaningUp).toBe(false);
|
|
438
|
+
});
|
|
439
|
+
});
|
|
440
|
+
});
|