smart-coding-mcp 1.2.4 → 1.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +28 -168
- package/config.json +4 -3
- package/example.png +0 -0
- package/features/clear-cache.js +30 -7
- package/features/index-codebase.js +507 -37
- package/how-its-works.png +0 -0
- package/index.js +2 -2
- package/lib/cache.js +5 -0
- package/lib/config.js +29 -4
- package/lib/embedding-worker.js +67 -0
- package/lib/tokenizer.js +142 -0
- package/lib/utils.js +113 -25
- package/package.json +9 -3
- package/test/clear-cache.test.js +288 -0
- package/test/embedding-model.test.js +230 -0
- package/test/helpers.js +128 -0
- package/test/hybrid-search.test.js +243 -0
- package/test/index-codebase.test.js +246 -0
- package/test/integration.test.js +223 -0
- package/test/tokenizer.test.js +225 -0
- package/vitest.config.js +29 -0
|
@@ -0,0 +1,288 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Tests for CacheClearer feature
|
|
3
|
+
*
|
|
4
|
+
* Tests the cache clearing functionality including:
|
|
5
|
+
* - Basic cache clearing
|
|
6
|
+
* - Protection during indexing
|
|
7
|
+
* - Protection during save operations
|
|
8
|
+
* - Concurrent clear prevention
|
|
9
|
+
* - Tool handler responses
|
|
10
|
+
*/
|
|
11
|
+
|
|
12
|
+
import { describe, it, expect, beforeAll, afterAll, beforeEach } from 'vitest';
|
|
13
|
+
import {
|
|
14
|
+
createTestFixtures,
|
|
15
|
+
cleanupFixtures,
|
|
16
|
+
clearTestCache,
|
|
17
|
+
createMockRequest
|
|
18
|
+
} from './helpers.js';
|
|
19
|
+
import * as ClearCacheFeature from '../features/clear-cache.js';
|
|
20
|
+
import { CacheClearer } from '../features/clear-cache.js';
|
|
21
|
+
import fs from 'fs/promises';
|
|
22
|
+
import path from 'path';
|
|
23
|
+
|
|
24
|
+
describe('CacheClearer', () => {
|
|
25
|
+
let fixtures;
|
|
26
|
+
|
|
27
|
+
beforeAll(async () => {
|
|
28
|
+
fixtures = await createTestFixtures({ workerThreads: 2 });
|
|
29
|
+
});
|
|
30
|
+
|
|
31
|
+
afterAll(async () => {
|
|
32
|
+
await cleanupFixtures(fixtures);
|
|
33
|
+
});
|
|
34
|
+
|
|
35
|
+
beforeEach(async () => {
|
|
36
|
+
// Reset state
|
|
37
|
+
fixtures.indexer.isIndexing = false;
|
|
38
|
+
fixtures.cache.isSaving = false;
|
|
39
|
+
fixtures.cacheClearer.isClearing = false;
|
|
40
|
+
});
|
|
41
|
+
|
|
42
|
+
describe('Basic Cache Clearing', () => {
|
|
43
|
+
it('should clear cache successfully', async () => {
|
|
44
|
+
// First ensure we have a cache
|
|
45
|
+
await fixtures.indexer.indexAll(true);
|
|
46
|
+
|
|
47
|
+
// Verify cache exists
|
|
48
|
+
expect(fixtures.cache.getVectorStore().length).toBeGreaterThan(0);
|
|
49
|
+
|
|
50
|
+
// Clear cache
|
|
51
|
+
const result = await fixtures.cacheClearer.execute();
|
|
52
|
+
|
|
53
|
+
expect(result.success).toBe(true);
|
|
54
|
+
expect(result.message).toContain('Cache cleared successfully');
|
|
55
|
+
expect(result.cacheDirectory).toBe(fixtures.config.cacheDirectory);
|
|
56
|
+
});
|
|
57
|
+
|
|
58
|
+
it('should empty vectorStore and fileHashes', async () => {
|
|
59
|
+
// Create some cache
|
|
60
|
+
await fixtures.indexer.indexAll(true);
|
|
61
|
+
|
|
62
|
+
// Clear
|
|
63
|
+
await fixtures.cacheClearer.execute();
|
|
64
|
+
|
|
65
|
+
// Both should be empty
|
|
66
|
+
expect(fixtures.cache.getVectorStore().length).toBe(0);
|
|
67
|
+
expect(fixtures.cache.fileHashes.size).toBe(0);
|
|
68
|
+
});
|
|
69
|
+
|
|
70
|
+
it('should delete cache directory', async () => {
|
|
71
|
+
// Create cache
|
|
72
|
+
await fixtures.indexer.indexAll(true);
|
|
73
|
+
|
|
74
|
+
// Verify cache directory exists
|
|
75
|
+
await expect(fs.access(fixtures.config.cacheDirectory)).resolves.not.toThrow();
|
|
76
|
+
|
|
77
|
+
// Clear
|
|
78
|
+
await fixtures.cacheClearer.execute();
|
|
79
|
+
|
|
80
|
+
// Directory should not exist
|
|
81
|
+
await expect(fs.access(fixtures.config.cacheDirectory)).rejects.toThrow();
|
|
82
|
+
});
|
|
83
|
+
});
|
|
84
|
+
|
|
85
|
+
describe('Protection During Indexing', () => {
|
|
86
|
+
it('should prevent clear while indexing is in progress', async () => {
|
|
87
|
+
// Simulate indexing in progress
|
|
88
|
+
await clearTestCache(fixtures.config);
|
|
89
|
+
fixtures.cache.setVectorStore([]);
|
|
90
|
+
fixtures.cache.fileHashes = new Map();
|
|
91
|
+
|
|
92
|
+
const indexPromise = fixtures.indexer.indexAll(true);
|
|
93
|
+
|
|
94
|
+
// Wait for indexing to start
|
|
95
|
+
await new Promise(resolve => setTimeout(resolve, 100));
|
|
96
|
+
expect(fixtures.indexer.isIndexing).toBe(true);
|
|
97
|
+
|
|
98
|
+
// Try to clear - should fail
|
|
99
|
+
await expect(fixtures.cacheClearer.execute()).rejects.toThrow(
|
|
100
|
+
'Cannot clear cache while indexing is in progress'
|
|
101
|
+
);
|
|
102
|
+
|
|
103
|
+
await indexPromise;
|
|
104
|
+
});
|
|
105
|
+
|
|
106
|
+
it('should allow clear after indexing completes', async () => {
|
|
107
|
+
// Complete indexing
|
|
108
|
+
await fixtures.indexer.indexAll(true);
|
|
109
|
+
expect(fixtures.indexer.isIndexing).toBe(false);
|
|
110
|
+
|
|
111
|
+
// Clear should work
|
|
112
|
+
const result = await fixtures.cacheClearer.execute();
|
|
113
|
+
expect(result.success).toBe(true);
|
|
114
|
+
});
|
|
115
|
+
});
|
|
116
|
+
|
|
117
|
+
describe('Protection During Save', () => {
|
|
118
|
+
it('should prevent clear while cache is being saved', async () => {
|
|
119
|
+
// Simulate save in progress
|
|
120
|
+
fixtures.cache.isSaving = true;
|
|
121
|
+
|
|
122
|
+
// Try to clear - should fail
|
|
123
|
+
await expect(fixtures.cacheClearer.execute()).rejects.toThrow(
|
|
124
|
+
'Cannot clear cache while cache is being saved'
|
|
125
|
+
);
|
|
126
|
+
|
|
127
|
+
// Reset
|
|
128
|
+
fixtures.cache.isSaving = false;
|
|
129
|
+
});
|
|
130
|
+
|
|
131
|
+
it('should allow clear after save completes', async () => {
|
|
132
|
+
// Index first
|
|
133
|
+
await fixtures.indexer.indexAll(true);
|
|
134
|
+
|
|
135
|
+
// isSaving should be false after indexing
|
|
136
|
+
expect(fixtures.cache.isSaving).toBe(false);
|
|
137
|
+
|
|
138
|
+
// Clear should work
|
|
139
|
+
const result = await fixtures.cacheClearer.execute();
|
|
140
|
+
expect(result.success).toBe(true);
|
|
141
|
+
});
|
|
142
|
+
});
|
|
143
|
+
|
|
144
|
+
describe('Concurrent Clear Prevention', () => {
|
|
145
|
+
it('should prevent multiple concurrent clears', async () => {
|
|
146
|
+
// Index first
|
|
147
|
+
await fixtures.indexer.indexAll(true);
|
|
148
|
+
|
|
149
|
+
// Reset the isClearing flag
|
|
150
|
+
fixtures.cacheClearer.isClearing = false;
|
|
151
|
+
|
|
152
|
+
// Start multiple concurrent clears
|
|
153
|
+
const promises = [
|
|
154
|
+
fixtures.cacheClearer.execute(),
|
|
155
|
+
fixtures.cacheClearer.execute(),
|
|
156
|
+
fixtures.cacheClearer.execute()
|
|
157
|
+
];
|
|
158
|
+
|
|
159
|
+
const results = await Promise.allSettled(promises);
|
|
160
|
+
|
|
161
|
+
// Exactly one should succeed
|
|
162
|
+
const successes = results.filter(r => r.status === 'fulfilled');
|
|
163
|
+
const failures = results.filter(r => r.status === 'rejected');
|
|
164
|
+
|
|
165
|
+
expect(successes.length).toBe(1);
|
|
166
|
+
expect(failures.length).toBe(2);
|
|
167
|
+
|
|
168
|
+
// Failures should have correct error message
|
|
169
|
+
for (const failure of failures) {
|
|
170
|
+
expect(failure.reason.message).toContain('already in progress');
|
|
171
|
+
}
|
|
172
|
+
});
|
|
173
|
+
|
|
174
|
+
it('should reset isClearing flag after completion', async () => {
|
|
175
|
+
// Index first
|
|
176
|
+
await fixtures.indexer.indexAll(true);
|
|
177
|
+
|
|
178
|
+
expect(fixtures.cacheClearer.isClearing).toBe(false);
|
|
179
|
+
|
|
180
|
+
// Clear
|
|
181
|
+
await fixtures.cacheClearer.execute();
|
|
182
|
+
|
|
183
|
+
// Flag should be reset
|
|
184
|
+
expect(fixtures.cacheClearer.isClearing).toBe(false);
|
|
185
|
+
});
|
|
186
|
+
|
|
187
|
+
it('should reset isClearing flag even on error', async () => {
|
|
188
|
+
// Set up for failure
|
|
189
|
+
fixtures.cache.isSaving = true;
|
|
190
|
+
|
|
191
|
+
try {
|
|
192
|
+
await fixtures.cacheClearer.execute();
|
|
193
|
+
} catch {
|
|
194
|
+
// Expected to fail
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
// isClearing should not have been set (failed before setting)
|
|
198
|
+
expect(fixtures.cacheClearer.isClearing).toBe(false);
|
|
199
|
+
|
|
200
|
+
// Reset
|
|
201
|
+
fixtures.cache.isSaving = false;
|
|
202
|
+
});
|
|
203
|
+
});
|
|
204
|
+
});
|
|
205
|
+
|
|
206
|
+
describe('Clear Cache Tool Handler', () => {
|
|
207
|
+
let fixtures;
|
|
208
|
+
|
|
209
|
+
beforeAll(async () => {
|
|
210
|
+
fixtures = await createTestFixtures({ workerThreads: 2 });
|
|
211
|
+
});
|
|
212
|
+
|
|
213
|
+
afterAll(async () => {
|
|
214
|
+
await cleanupFixtures(fixtures);
|
|
215
|
+
});
|
|
216
|
+
|
|
217
|
+
beforeEach(async () => {
|
|
218
|
+
fixtures.indexer.isIndexing = false;
|
|
219
|
+
fixtures.cache.isSaving = false;
|
|
220
|
+
fixtures.cacheClearer.isClearing = false;
|
|
221
|
+
});
|
|
222
|
+
|
|
223
|
+
describe('Tool Definition', () => {
|
|
224
|
+
it('should have correct tool definition', () => {
|
|
225
|
+
const toolDef = ClearCacheFeature.getToolDefinition();
|
|
226
|
+
|
|
227
|
+
expect(toolDef.name).toBe('c_clear_cache');
|
|
228
|
+
expect(toolDef.description).toContain('cache');
|
|
229
|
+
expect(toolDef.annotations.destructiveHint).toBe(true);
|
|
230
|
+
expect(toolDef.inputSchema.properties).toEqual({});
|
|
231
|
+
});
|
|
232
|
+
});
|
|
233
|
+
|
|
234
|
+
describe('Tool Handler', () => {
|
|
235
|
+
it('should return success message on cleared cache', async () => {
|
|
236
|
+
// Index first
|
|
237
|
+
await fixtures.indexer.indexAll(true);
|
|
238
|
+
|
|
239
|
+
const request = createMockRequest('c_clear_cache', {});
|
|
240
|
+
const result = await ClearCacheFeature.handleToolCall(request, fixtures.cacheClearer);
|
|
241
|
+
|
|
242
|
+
expect(result.content[0].text).toContain('Cache cleared successfully');
|
|
243
|
+
expect(result.content[0].text).toContain('Cache directory:');
|
|
244
|
+
});
|
|
245
|
+
|
|
246
|
+
it('should return error message when indexing is in progress', async () => {
|
|
247
|
+
// Simulate indexing
|
|
248
|
+
await clearTestCache(fixtures.config);
|
|
249
|
+
fixtures.cache.setVectorStore([]);
|
|
250
|
+
fixtures.cache.fileHashes = new Map();
|
|
251
|
+
|
|
252
|
+
const indexPromise = fixtures.indexer.indexAll(true);
|
|
253
|
+
await new Promise(resolve => setTimeout(resolve, 100));
|
|
254
|
+
|
|
255
|
+
const request = createMockRequest('c_clear_cache', {});
|
|
256
|
+
const result = await ClearCacheFeature.handleToolCall(request, fixtures.cacheClearer);
|
|
257
|
+
|
|
258
|
+
expect(result.content[0].text).toContain('Failed to clear cache');
|
|
259
|
+
expect(result.content[0].text).toContain('indexing is in progress');
|
|
260
|
+
|
|
261
|
+
await indexPromise;
|
|
262
|
+
});
|
|
263
|
+
|
|
264
|
+
it('should return error message when save is in progress', async () => {
|
|
265
|
+
fixtures.cache.isSaving = true;
|
|
266
|
+
|
|
267
|
+
const request = createMockRequest('c_clear_cache', {});
|
|
268
|
+
const result = await ClearCacheFeature.handleToolCall(request, fixtures.cacheClearer);
|
|
269
|
+
|
|
270
|
+
expect(result.content[0].text).toContain('Failed to clear cache');
|
|
271
|
+
expect(result.content[0].text).toContain('being saved');
|
|
272
|
+
|
|
273
|
+
fixtures.cache.isSaving = false;
|
|
274
|
+
});
|
|
275
|
+
|
|
276
|
+
it('should return error message when clear is already in progress', async () => {
|
|
277
|
+
fixtures.cacheClearer.isClearing = true;
|
|
278
|
+
|
|
279
|
+
const request = createMockRequest('c_clear_cache', {});
|
|
280
|
+
const result = await ClearCacheFeature.handleToolCall(request, fixtures.cacheClearer);
|
|
281
|
+
|
|
282
|
+
expect(result.content[0].text).toContain('Failed to clear cache');
|
|
283
|
+
expect(result.content[0].text).toContain('already in progress');
|
|
284
|
+
|
|
285
|
+
fixtures.cacheClearer.isClearing = false;
|
|
286
|
+
});
|
|
287
|
+
});
|
|
288
|
+
});
|
|
@@ -0,0 +1,230 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Tests for Local LLM (Embedding Model)
|
|
3
|
+
*
|
|
4
|
+
* Tests the embedding model functionality including:
|
|
5
|
+
* - Model loading
|
|
6
|
+
* - Embedding generation
|
|
7
|
+
* - Vector properties
|
|
8
|
+
* - Similarity calculations
|
|
9
|
+
*/
|
|
10
|
+
|
|
11
|
+
import { describe, it, expect, beforeAll } from 'vitest';
|
|
12
|
+
import { pipeline } from '@xenova/transformers';
|
|
13
|
+
import { cosineSimilarity } from '../lib/utils.js';
|
|
14
|
+
import { loadConfig } from '../lib/config.js';
|
|
15
|
+
|
|
16
|
+
describe('Local Embedding Model', () => {
|
|
17
|
+
let embedder;
|
|
18
|
+
let config;
|
|
19
|
+
|
|
20
|
+
beforeAll(async () => {
|
|
21
|
+
config = await loadConfig();
|
|
22
|
+
console.log(`[Test] Loading embedding model: ${config.embeddingModel}`);
|
|
23
|
+
embedder = await pipeline('feature-extraction', config.embeddingModel);
|
|
24
|
+
console.log('[Test] Embedding model loaded successfully');
|
|
25
|
+
});
|
|
26
|
+
|
|
27
|
+
describe('Model Loading', () => {
|
|
28
|
+
it('should load the embedding model', () => {
|
|
29
|
+
expect(embedder).toBeDefined();
|
|
30
|
+
expect(typeof embedder).toBe('function');
|
|
31
|
+
});
|
|
32
|
+
|
|
33
|
+
it('should use the configured model', () => {
|
|
34
|
+
expect(config.embeddingModel).toBe('Xenova/all-MiniLM-L6-v2');
|
|
35
|
+
});
|
|
36
|
+
});
|
|
37
|
+
|
|
38
|
+
describe('Embedding Generation', () => {
|
|
39
|
+
it('should generate embeddings for text', async () => {
|
|
40
|
+
const text = 'Hello, world!';
|
|
41
|
+
const output = await embedder(text, { pooling: 'mean', normalize: true });
|
|
42
|
+
|
|
43
|
+
expect(output).toBeDefined();
|
|
44
|
+
expect(output.data).toBeDefined();
|
|
45
|
+
});
|
|
46
|
+
|
|
47
|
+
it('should return vectors of correct dimensions', async () => {
|
|
48
|
+
const text = 'Test input for embedding';
|
|
49
|
+
const output = await embedder(text, { pooling: 'mean', normalize: true });
|
|
50
|
+
const vector = Array.from(output.data);
|
|
51
|
+
|
|
52
|
+
// MiniLM-L6 produces 384-dimensional vectors
|
|
53
|
+
expect(vector.length).toBe(384);
|
|
54
|
+
});
|
|
55
|
+
|
|
56
|
+
it('should return normalized vectors', async () => {
|
|
57
|
+
const text = 'Normalized vector test';
|
|
58
|
+
const output = await embedder(text, { pooling: 'mean', normalize: true });
|
|
59
|
+
const vector = Array.from(output.data);
|
|
60
|
+
|
|
61
|
+
// Calculate magnitude (should be ~1 for normalized vectors)
|
|
62
|
+
const magnitude = Math.sqrt(vector.reduce((sum, v) => sum + v * v, 0));
|
|
63
|
+
expect(magnitude).toBeCloseTo(1, 4);
|
|
64
|
+
});
|
|
65
|
+
|
|
66
|
+
it('should generate different embeddings for different text', async () => {
|
|
67
|
+
const output1 = await embedder('apple fruit', { pooling: 'mean', normalize: true });
|
|
68
|
+
const output2 = await embedder('programming code', { pooling: 'mean', normalize: true });
|
|
69
|
+
|
|
70
|
+
const vector1 = Array.from(output1.data);
|
|
71
|
+
const vector2 = Array.from(output2.data);
|
|
72
|
+
|
|
73
|
+
// Vectors should be different
|
|
74
|
+
const areSame = vector1.every((v, i) => Math.abs(v - vector2[i]) < 0.0001);
|
|
75
|
+
expect(areSame).toBe(false);
|
|
76
|
+
});
|
|
77
|
+
|
|
78
|
+
it('should handle code snippets', async () => {
|
|
79
|
+
const code = `
|
|
80
|
+
function add(a, b) {
|
|
81
|
+
return a + b;
|
|
82
|
+
}
|
|
83
|
+
`;
|
|
84
|
+
|
|
85
|
+
const output = await embedder(code, { pooling: 'mean', normalize: true });
|
|
86
|
+
const vector = Array.from(output.data);
|
|
87
|
+
|
|
88
|
+
expect(vector.length).toBe(384);
|
|
89
|
+
});
|
|
90
|
+
|
|
91
|
+
it('should handle multiline text', async () => {
|
|
92
|
+
const multiline = 'Line one\nLine two\nLine three';
|
|
93
|
+
const output = await embedder(multiline, { pooling: 'mean', normalize: true });
|
|
94
|
+
const vector = Array.from(output.data);
|
|
95
|
+
|
|
96
|
+
expect(vector.length).toBe(384);
|
|
97
|
+
});
|
|
98
|
+
|
|
99
|
+
it('should handle special characters', async () => {
|
|
100
|
+
const special = '{}[]()<>!@#$%^&*';
|
|
101
|
+
const output = await embedder(special, { pooling: 'mean', normalize: true });
|
|
102
|
+
const vector = Array.from(output.data);
|
|
103
|
+
|
|
104
|
+
expect(vector.length).toBe(384);
|
|
105
|
+
});
|
|
106
|
+
});
|
|
107
|
+
|
|
108
|
+
describe('Semantic Similarity', () => {
|
|
109
|
+
it('should give high similarity for semantically similar text', async () => {
|
|
110
|
+
const output1 = await embedder('user authentication login', { pooling: 'mean', normalize: true });
|
|
111
|
+
const output2 = await embedder('user login authentication', { pooling: 'mean', normalize: true });
|
|
112
|
+
|
|
113
|
+
const vector1 = Array.from(output1.data);
|
|
114
|
+
const vector2 = Array.from(output2.data);
|
|
115
|
+
|
|
116
|
+
const similarity = cosineSimilarity(vector1, vector2);
|
|
117
|
+
|
|
118
|
+
// Same words, different order - should be very similar
|
|
119
|
+
expect(similarity).toBeGreaterThan(0.9);
|
|
120
|
+
});
|
|
121
|
+
|
|
122
|
+
it('should give lower similarity for different topics', async () => {
|
|
123
|
+
const output1 = await embedder('database query SQL', { pooling: 'mean', normalize: true });
|
|
124
|
+
const output2 = await embedder('pizza delivery food', { pooling: 'mean', normalize: true });
|
|
125
|
+
|
|
126
|
+
const vector1 = Array.from(output1.data);
|
|
127
|
+
const vector2 = Array.from(output2.data);
|
|
128
|
+
|
|
129
|
+
const similarity = cosineSimilarity(vector1, vector2);
|
|
130
|
+
|
|
131
|
+
// Different topics - should have low similarity
|
|
132
|
+
expect(similarity).toBeLessThan(0.5);
|
|
133
|
+
});
|
|
134
|
+
|
|
135
|
+
it('should capture code semantic similarity', async () => {
|
|
136
|
+
const output1 = await embedder('function that handles user login', { pooling: 'mean', normalize: true });
|
|
137
|
+
const output2 = await embedder('async authenticate(user, password)', { pooling: 'mean', normalize: true });
|
|
138
|
+
const output3 = await embedder('function to sort array elements', { pooling: 'mean', normalize: true });
|
|
139
|
+
|
|
140
|
+
const v1 = Array.from(output1.data);
|
|
141
|
+
const v2 = Array.from(output2.data);
|
|
142
|
+
const v3 = Array.from(output3.data);
|
|
143
|
+
|
|
144
|
+
const sim12 = cosineSimilarity(v1, v2); // login-related
|
|
145
|
+
const sim13 = cosineSimilarity(v1, v3); // login vs sorting
|
|
146
|
+
|
|
147
|
+
// Login concepts should be more similar to each other than to sorting
|
|
148
|
+
expect(sim12).toBeGreaterThan(sim13);
|
|
149
|
+
});
|
|
150
|
+
|
|
151
|
+
it('should recognize programming language constructs', async () => {
|
|
152
|
+
const output1 = await embedder('import React from "react"', { pooling: 'mean', normalize: true });
|
|
153
|
+
const output2 = await embedder('import Vue from "vue"', { pooling: 'mean', normalize: true });
|
|
154
|
+
const output3 = await embedder('The weather is sunny today', { pooling: 'mean', normalize: true });
|
|
155
|
+
|
|
156
|
+
const v1 = Array.from(output1.data);
|
|
157
|
+
const v2 = Array.from(output2.data);
|
|
158
|
+
const v3 = Array.from(output3.data);
|
|
159
|
+
|
|
160
|
+
const sim12 = cosineSimilarity(v1, v2); // Both imports
|
|
161
|
+
const sim13 = cosineSimilarity(v1, v3); // Import vs weather
|
|
162
|
+
|
|
163
|
+
// Import statements should be more similar to each other
|
|
164
|
+
expect(sim12).toBeGreaterThan(sim13);
|
|
165
|
+
});
|
|
166
|
+
});
|
|
167
|
+
|
|
168
|
+
describe('Cosine Similarity Function', () => {
|
|
169
|
+
it('should return 1 for identical vectors', () => {
|
|
170
|
+
const vector = [0.1, 0.2, 0.3, 0.4, 0.5];
|
|
171
|
+
expect(cosineSimilarity(vector, vector)).toBeCloseTo(1, 5);
|
|
172
|
+
});
|
|
173
|
+
|
|
174
|
+
it('should return -1 for opposite vectors', () => {
|
|
175
|
+
const vector1 = [1, 0, 0];
|
|
176
|
+
const vector2 = [-1, 0, 0];
|
|
177
|
+
expect(cosineSimilarity(vector1, vector2)).toBeCloseTo(-1, 5);
|
|
178
|
+
});
|
|
179
|
+
|
|
180
|
+
it('should return 0 for orthogonal vectors', () => {
|
|
181
|
+
const vector1 = [1, 0, 0];
|
|
182
|
+
const vector2 = [0, 1, 0];
|
|
183
|
+
expect(cosineSimilarity(vector1, vector2)).toBeCloseTo(0, 5);
|
|
184
|
+
});
|
|
185
|
+
|
|
186
|
+
it('should handle high-dimensional vectors', () => {
|
|
187
|
+
const dim = 384;
|
|
188
|
+
const vector1 = Array(dim).fill(0).map(() => Math.random());
|
|
189
|
+
const vector2 = Array(dim).fill(0).map(() => Math.random());
|
|
190
|
+
|
|
191
|
+
const similarity = cosineSimilarity(vector1, vector2);
|
|
192
|
+
|
|
193
|
+
expect(similarity).toBeGreaterThanOrEqual(-1);
|
|
194
|
+
expect(similarity).toBeLessThanOrEqual(1);
|
|
195
|
+
});
|
|
196
|
+
});
|
|
197
|
+
|
|
198
|
+
describe('Performance', () => {
|
|
199
|
+
it('should generate embeddings in reasonable time', async () => {
|
|
200
|
+
const text = 'This is a test sentence for measuring embedding generation speed.';
|
|
201
|
+
|
|
202
|
+
const start = Date.now();
|
|
203
|
+
await embedder(text, { pooling: 'mean', normalize: true });
|
|
204
|
+
const duration = Date.now() - start;
|
|
205
|
+
|
|
206
|
+
// Should be fast (under 500ms for single embedding)
|
|
207
|
+
expect(duration).toBeLessThan(500);
|
|
208
|
+
});
|
|
209
|
+
|
|
210
|
+
it('should handle multiple sequential embeddings', async () => {
|
|
211
|
+
const texts = [
|
|
212
|
+
'First test input',
|
|
213
|
+
'Second test input',
|
|
214
|
+
'Third test input',
|
|
215
|
+
'Fourth test input',
|
|
216
|
+
'Fifth test input'
|
|
217
|
+
];
|
|
218
|
+
|
|
219
|
+
const start = Date.now();
|
|
220
|
+
for (const text of texts) {
|
|
221
|
+
await embedder(text, { pooling: 'mean', normalize: true });
|
|
222
|
+
}
|
|
223
|
+
const duration = Date.now() - start;
|
|
224
|
+
|
|
225
|
+
// 5 embeddings should complete in reasonable time
|
|
226
|
+
expect(duration).toBeLessThan(2500);
|
|
227
|
+
console.log(`[Test] 5 embeddings generated in ${duration}ms (${(duration/5).toFixed(0)}ms avg)`);
|
|
228
|
+
});
|
|
229
|
+
});
|
|
230
|
+
});
|
package/test/helpers.js
ADDED
|
@@ -0,0 +1,128 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Test helper utilities for Smart Coding MCP tests
|
|
3
|
+
* Provides shared setup, teardown, and mock utilities
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import { loadConfig } from '../lib/config.js';
|
|
7
|
+
import { EmbeddingsCache } from '../lib/cache.js';
|
|
8
|
+
import { CodebaseIndexer } from '../features/index-codebase.js';
|
|
9
|
+
import { CacheClearer } from '../features/clear-cache.js';
|
|
10
|
+
import { HybridSearch } from '../features/hybrid-search.js';
|
|
11
|
+
import { pipeline } from '@xenova/transformers';
|
|
12
|
+
import fs from 'fs/promises';
|
|
13
|
+
import path from 'path';
|
|
14
|
+
|
|
15
|
+
// Cached embedder instance (shared across tests for speed)
|
|
16
|
+
let sharedEmbedder = null;
|
|
17
|
+
|
|
18
|
+
/**
|
|
19
|
+
* Get or initialize the shared embedder instance
|
|
20
|
+
* Loading the model once and reusing saves significant time
|
|
21
|
+
*/
|
|
22
|
+
export async function getEmbedder(config) {
|
|
23
|
+
if (!sharedEmbedder) {
|
|
24
|
+
console.log('[TestHelper] Loading embedding model (first time)...');
|
|
25
|
+
sharedEmbedder = await pipeline('feature-extraction', config.embeddingModel);
|
|
26
|
+
console.log('[TestHelper] Embedding model loaded');
|
|
27
|
+
}
|
|
28
|
+
return sharedEmbedder;
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
/**
|
|
32
|
+
* Create test fixtures with initialized components
|
|
33
|
+
* @param {Object} options - Options for fixture creation
|
|
34
|
+
* @returns {Object} Initialized components for testing
|
|
35
|
+
*/
|
|
36
|
+
export async function createTestFixtures(options = {}) {
|
|
37
|
+
const config = await loadConfig();
|
|
38
|
+
|
|
39
|
+
// Override config for testing if needed
|
|
40
|
+
if (options.verbose !== undefined) config.verbose = options.verbose;
|
|
41
|
+
if (options.workerThreads !== undefined) config.workerThreads = options.workerThreads;
|
|
42
|
+
|
|
43
|
+
const embedder = await getEmbedder(config);
|
|
44
|
+
|
|
45
|
+
const cache = new EmbeddingsCache(config);
|
|
46
|
+
await cache.load();
|
|
47
|
+
|
|
48
|
+
const indexer = new CodebaseIndexer(embedder, cache, config, null);
|
|
49
|
+
const cacheClearer = new CacheClearer(embedder, cache, config, indexer);
|
|
50
|
+
const hybridSearch = new HybridSearch(embedder, cache, config);
|
|
51
|
+
|
|
52
|
+
return {
|
|
53
|
+
config,
|
|
54
|
+
embedder,
|
|
55
|
+
cache,
|
|
56
|
+
indexer,
|
|
57
|
+
cacheClearer,
|
|
58
|
+
hybridSearch
|
|
59
|
+
};
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
/**
|
|
63
|
+
* Clean up test resources
|
|
64
|
+
* @param {Object} fixtures - Test fixtures to clean up
|
|
65
|
+
*/
|
|
66
|
+
export async function cleanupFixtures(fixtures) {
|
|
67
|
+
if (fixtures.indexer) {
|
|
68
|
+
fixtures.indexer.terminateWorkers();
|
|
69
|
+
if (fixtures.indexer.watcher) {
|
|
70
|
+
await fixtures.indexer.watcher.close();
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
/**
|
|
76
|
+
* Clear the cache directory for a clean test state
|
|
77
|
+
* @param {Object} config - Configuration object
|
|
78
|
+
*/
|
|
79
|
+
export async function clearTestCache(config) {
|
|
80
|
+
try {
|
|
81
|
+
await fs.rm(config.cacheDirectory, { recursive: true, force: true });
|
|
82
|
+
} catch (err) {
|
|
83
|
+
// Ignore if doesn't exist
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
/**
|
|
88
|
+
* Create a mock MCP request object
|
|
89
|
+
* @param {string} toolName - Tool name
|
|
90
|
+
* @param {Object} args - Tool arguments
|
|
91
|
+
* @returns {Object} Mock request object
|
|
92
|
+
*/
|
|
93
|
+
export function createMockRequest(toolName, args = {}) {
|
|
94
|
+
return {
|
|
95
|
+
params: {
|
|
96
|
+
name: toolName,
|
|
97
|
+
arguments: args
|
|
98
|
+
}
|
|
99
|
+
};
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
/**
|
|
103
|
+
* Wait for a condition with timeout
|
|
104
|
+
* @param {Function} condition - Async function returning boolean
|
|
105
|
+
* @param {number} timeout - Max wait time in ms
|
|
106
|
+
* @param {number} interval - Check interval in ms
|
|
107
|
+
* @returns {boolean} Whether condition was met
|
|
108
|
+
*/
|
|
109
|
+
export async function waitFor(condition, timeout = 5000, interval = 100) {
|
|
110
|
+
const start = Date.now();
|
|
111
|
+
while (Date.now() - start < timeout) {
|
|
112
|
+
if (await condition()) return true;
|
|
113
|
+
await new Promise(resolve => setTimeout(resolve, interval));
|
|
114
|
+
}
|
|
115
|
+
return false;
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
/**
|
|
119
|
+
* Measure execution time of an async function
|
|
120
|
+
* @param {Function} fn - Async function to measure
|
|
121
|
+
* @returns {Object} Result and duration
|
|
122
|
+
*/
|
|
123
|
+
export async function measureTime(fn) {
|
|
124
|
+
const start = Date.now();
|
|
125
|
+
const result = await fn();
|
|
126
|
+
const duration = Date.now() - start;
|
|
127
|
+
return { result, duration };
|
|
128
|
+
}
|