@code-rag/mcp-server 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +24 -0
- package/dist/index.d.ts +8 -0
- package/dist/index.js +25 -0
- package/dist/index.js.map +1 -0
- package/dist/server.d.ts +52 -0
- package/dist/server.js +420 -0
- package/dist/server.js.map +1 -0
- package/dist/server.test.d.ts +1 -0
- package/dist/server.test.js +764 -0
- package/dist/server.test.js.map +1 -0
- package/dist/tools/backlog.d.ts +28 -0
- package/dist/tools/backlog.js +180 -0
- package/dist/tools/backlog.js.map +1 -0
- package/dist/tools/backlog.test.d.ts +1 -0
- package/dist/tools/backlog.test.js +269 -0
- package/dist/tools/backlog.test.js.map +1 -0
- package/dist/tools/context.d.ts +15 -0
- package/dist/tools/context.js +110 -0
- package/dist/tools/context.js.map +1 -0
- package/dist/tools/docs.d.ts +27 -0
- package/dist/tools/docs.js +111 -0
- package/dist/tools/docs.js.map +1 -0
- package/dist/tools/docs.test.d.ts +1 -0
- package/dist/tools/docs.test.js +357 -0
- package/dist/tools/docs.test.js.map +1 -0
- package/dist/tools/explain.d.ts +17 -0
- package/dist/tools/explain.js +138 -0
- package/dist/tools/explain.js.map +1 -0
- package/dist/tools/explain.test.d.ts +1 -0
- package/dist/tools/explain.test.js +244 -0
- package/dist/tools/explain.test.js.map +1 -0
- package/dist/tools/search.d.ts +25 -0
- package/dist/tools/search.js +101 -0
- package/dist/tools/search.js.map +1 -0
- package/dist/tools/status.d.ts +14 -0
- package/dist/tools/status.js +41 -0
- package/dist/tools/status.js.map +1 -0
- package/package.json +58 -0
|
@@ -0,0 +1,764 @@
|
|
|
1
|
+
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
|
2
|
+
import { ok, err } from 'neverthrow';
|
|
3
|
+
import { handleSearch, searchInputSchema } from './tools/search.js';
|
|
4
|
+
import { handleContext, contextInputSchema } from './tools/context.js';
|
|
5
|
+
import { handleStatus } from './tools/status.js';
|
|
6
|
+
import { EmbedError, StoreError, ReRankerError } from '@coderag/core';
|
|
7
|
+
// --- Helpers ---
|
|
8
|
+
function makeSearchResult(overrides = {}) {
|
|
9
|
+
return {
|
|
10
|
+
chunkId: 'chunk-1',
|
|
11
|
+
content: 'function hello() {}',
|
|
12
|
+
nlSummary: 'A greeting function',
|
|
13
|
+
score: 0.95,
|
|
14
|
+
method: 'hybrid',
|
|
15
|
+
metadata: {
|
|
16
|
+
chunkType: 'function',
|
|
17
|
+
name: 'hello',
|
|
18
|
+
declarations: [],
|
|
19
|
+
imports: [],
|
|
20
|
+
exports: [],
|
|
21
|
+
},
|
|
22
|
+
chunk: {
|
|
23
|
+
id: 'chunk-1',
|
|
24
|
+
content: 'function hello() {}',
|
|
25
|
+
nlSummary: 'A greeting function',
|
|
26
|
+
filePath: 'src/utils/hello.ts',
|
|
27
|
+
startLine: 1,
|
|
28
|
+
endLine: 3,
|
|
29
|
+
language: 'typescript',
|
|
30
|
+
metadata: {
|
|
31
|
+
chunkType: 'function',
|
|
32
|
+
name: 'hello',
|
|
33
|
+
declarations: [],
|
|
34
|
+
imports: [],
|
|
35
|
+
exports: [],
|
|
36
|
+
},
|
|
37
|
+
},
|
|
38
|
+
...overrides,
|
|
39
|
+
};
|
|
40
|
+
}
|
|
41
|
+
function makeConfig(overrides = {}) {
|
|
42
|
+
return {
|
|
43
|
+
version: '1',
|
|
44
|
+
project: { name: 'test-project', languages: ['typescript'] },
|
|
45
|
+
ingestion: { maxTokensPerChunk: 512, exclude: [] },
|
|
46
|
+
embedding: { provider: 'ollama', model: 'nomic-embed-text', dimensions: 768, autoStart: true, autoStop: false, docker: { image: 'ollama/ollama', gpu: 'auto' } },
|
|
47
|
+
llm: { provider: 'ollama', model: 'qwen2.5-coder:7b' },
|
|
48
|
+
search: { topK: 10, vectorWeight: 0.7, bm25Weight: 0.3 },
|
|
49
|
+
storage: { path: '.coderag' },
|
|
50
|
+
...overrides,
|
|
51
|
+
};
|
|
52
|
+
}
|
|
53
|
+
function parseResponse(response) {
|
|
54
|
+
return JSON.parse(response.content[0].text);
|
|
55
|
+
}
|
|
56
|
+
// --- Search Tool Tests ---
|
|
57
|
+
describe('handleSearch', () => {
|
|
58
|
+
let mockHybridSearch;
|
|
59
|
+
beforeEach(() => {
|
|
60
|
+
mockHybridSearch = {
|
|
61
|
+
search: vi.fn(),
|
|
62
|
+
};
|
|
63
|
+
});
|
|
64
|
+
it('should return results for a valid query', async () => {
|
|
65
|
+
const results = [makeSearchResult()];
|
|
66
|
+
vi.mocked(mockHybridSearch.search).mockResolvedValue(ok(results));
|
|
67
|
+
const response = await handleSearch({ query: 'hello function' }, mockHybridSearch, null);
|
|
68
|
+
const parsed = parseResponse(response);
|
|
69
|
+
expect(parsed.results).toHaveLength(1);
|
|
70
|
+
expect(parsed.results[0]).toEqual({
|
|
71
|
+
file_path: 'src/utils/hello.ts',
|
|
72
|
+
chunk_type: 'function',
|
|
73
|
+
name: 'hello',
|
|
74
|
+
content: 'function hello() {}',
|
|
75
|
+
nl_summary: 'A greeting function',
|
|
76
|
+
score: 0.95,
|
|
77
|
+
});
|
|
78
|
+
});
|
|
79
|
+
it('should return validation error for missing query', async () => {
|
|
80
|
+
const response = await handleSearch({}, mockHybridSearch, null);
|
|
81
|
+
const parsed = parseResponse(response);
|
|
82
|
+
expect(parsed.error).toBe('Invalid input');
|
|
83
|
+
});
|
|
84
|
+
it('should return validation error for empty query', async () => {
|
|
85
|
+
const response = await handleSearch({ query: '' }, mockHybridSearch, null);
|
|
86
|
+
const parsed = parseResponse(response);
|
|
87
|
+
expect(parsed.error).toBe('Invalid input');
|
|
88
|
+
});
|
|
89
|
+
it('should return empty results when search index is not initialized', async () => {
|
|
90
|
+
const response = await handleSearch({ query: 'hello' }, null, null);
|
|
91
|
+
const parsed = parseResponse(response);
|
|
92
|
+
expect(parsed.results).toEqual([]);
|
|
93
|
+
expect(parsed.message).toContain('not initialized');
|
|
94
|
+
});
|
|
95
|
+
it('should handle search API errors gracefully', async () => {
|
|
96
|
+
vi.mocked(mockHybridSearch.search).mockResolvedValue(err(new EmbedError('Connection refused')));
|
|
97
|
+
const response = await handleSearch({ query: 'hello' }, mockHybridSearch, null);
|
|
98
|
+
const parsed = parseResponse(response);
|
|
99
|
+
expect(parsed.error).toBe('Search failed');
|
|
100
|
+
expect(parsed.message).toContain('Connection refused');
|
|
101
|
+
});
|
|
102
|
+
it('should filter by language', async () => {
|
|
103
|
+
const results = [
|
|
104
|
+
makeSearchResult(),
|
|
105
|
+
makeSearchResult({
|
|
106
|
+
chunkId: 'chunk-2',
|
|
107
|
+
chunk: {
|
|
108
|
+
id: 'chunk-2',
|
|
109
|
+
content: 'def hello(): pass',
|
|
110
|
+
nlSummary: 'Python greeting',
|
|
111
|
+
filePath: 'src/hello.py',
|
|
112
|
+
startLine: 1,
|
|
113
|
+
endLine: 1,
|
|
114
|
+
language: 'python',
|
|
115
|
+
metadata: {
|
|
116
|
+
chunkType: 'function',
|
|
117
|
+
name: 'hello',
|
|
118
|
+
declarations: [],
|
|
119
|
+
imports: [],
|
|
120
|
+
exports: [],
|
|
121
|
+
},
|
|
122
|
+
},
|
|
123
|
+
}),
|
|
124
|
+
];
|
|
125
|
+
vi.mocked(mockHybridSearch.search).mockResolvedValue(ok(results));
|
|
126
|
+
const response = await handleSearch({ query: 'hello', language: 'typescript' }, mockHybridSearch, null);
|
|
127
|
+
const parsed = parseResponse(response);
|
|
128
|
+
expect(parsed.results).toHaveLength(1);
|
|
129
|
+
});
|
|
130
|
+
it('should filter by file_path', async () => {
|
|
131
|
+
const results = [
|
|
132
|
+
makeSearchResult(),
|
|
133
|
+
makeSearchResult({
|
|
134
|
+
chunkId: 'chunk-2',
|
|
135
|
+
chunk: {
|
|
136
|
+
id: 'chunk-2',
|
|
137
|
+
content: 'function other() {}',
|
|
138
|
+
nlSummary: 'Other function',
|
|
139
|
+
filePath: 'src/other/world.ts',
|
|
140
|
+
startLine: 1,
|
|
141
|
+
endLine: 1,
|
|
142
|
+
language: 'typescript',
|
|
143
|
+
metadata: {
|
|
144
|
+
chunkType: 'function',
|
|
145
|
+
name: 'other',
|
|
146
|
+
declarations: [],
|
|
147
|
+
imports: [],
|
|
148
|
+
exports: [],
|
|
149
|
+
},
|
|
150
|
+
},
|
|
151
|
+
}),
|
|
152
|
+
];
|
|
153
|
+
vi.mocked(mockHybridSearch.search).mockResolvedValue(ok(results));
|
|
154
|
+
const response = await handleSearch({ query: 'hello', file_path: 'utils' }, mockHybridSearch, null);
|
|
155
|
+
const parsed = parseResponse(response);
|
|
156
|
+
expect(parsed.results).toHaveLength(1);
|
|
157
|
+
});
|
|
158
|
+
it('should filter by chunk_type', async () => {
|
|
159
|
+
const results = [
|
|
160
|
+
makeSearchResult(),
|
|
161
|
+
makeSearchResult({
|
|
162
|
+
chunkId: 'chunk-2',
|
|
163
|
+
metadata: {
|
|
164
|
+
chunkType: 'class',
|
|
165
|
+
name: 'MyClass',
|
|
166
|
+
declarations: [],
|
|
167
|
+
imports: [],
|
|
168
|
+
exports: [],
|
|
169
|
+
},
|
|
170
|
+
}),
|
|
171
|
+
];
|
|
172
|
+
vi.mocked(mockHybridSearch.search).mockResolvedValue(ok(results));
|
|
173
|
+
const response = await handleSearch({ query: 'hello', chunk_type: 'function' }, mockHybridSearch, null);
|
|
174
|
+
const parsed = parseResponse(response);
|
|
175
|
+
expect(parsed.results).toHaveLength(1);
|
|
176
|
+
});
|
|
177
|
+
it('should use default top_k of 10', async () => {
|
|
178
|
+
vi.mocked(mockHybridSearch.search).mockResolvedValue(ok([]));
|
|
179
|
+
await handleSearch({ query: 'hello' }, mockHybridSearch, null);
|
|
180
|
+
expect(mockHybridSearch.search).toHaveBeenCalledWith('hello', { topK: 10 });
|
|
181
|
+
});
|
|
182
|
+
it('should use custom top_k', async () => {
|
|
183
|
+
vi.mocked(mockHybridSearch.search).mockResolvedValue(ok([]));
|
|
184
|
+
await handleSearch({ query: 'hello', top_k: 5 }, mockHybridSearch, null);
|
|
185
|
+
expect(mockHybridSearch.search).toHaveBeenCalledWith('hello', { topK: 5 });
|
|
186
|
+
});
|
|
187
|
+
it('should handle thrown exceptions', async () => {
|
|
188
|
+
vi.mocked(mockHybridSearch.search).mockRejectedValue(new Error('Unexpected'));
|
|
189
|
+
const response = await handleSearch({ query: 'hello' }, mockHybridSearch, null);
|
|
190
|
+
const parsed = parseResponse(response);
|
|
191
|
+
expect(parsed.error).toBe('Search failed');
|
|
192
|
+
expect(parsed.message).toBe('Unexpected');
|
|
193
|
+
});
|
|
194
|
+
it('should reject top_k above 100', async () => {
|
|
195
|
+
const response = await handleSearch({ query: 'hello', top_k: 200 }, mockHybridSearch, null);
|
|
196
|
+
const parsed = parseResponse(response);
|
|
197
|
+
expect(parsed.error).toBe('Invalid input');
|
|
198
|
+
});
|
|
199
|
+
it('should reject file_path with path traversal', async () => {
|
|
200
|
+
const response = await handleSearch({ query: 'hello', file_path: '../../etc/passwd' }, mockHybridSearch, null);
|
|
201
|
+
const parsed = parseResponse(response);
|
|
202
|
+
expect(parsed.error).toBe('Invalid input');
|
|
203
|
+
});
|
|
204
|
+
it('should apply reranker when provided', async () => {
|
|
205
|
+
const results = [
|
|
206
|
+
makeSearchResult({ chunkId: 'chunk-1' }),
|
|
207
|
+
makeSearchResult({ chunkId: 'chunk-2' }),
|
|
208
|
+
];
|
|
209
|
+
vi.mocked(mockHybridSearch.search).mockResolvedValue(ok(results));
|
|
210
|
+
const mockReranker = {
|
|
211
|
+
rerank: vi.fn().mockResolvedValue(ok([results[1], results[0]])),
|
|
212
|
+
};
|
|
213
|
+
const response = await handleSearch({ query: 'hello' }, mockHybridSearch, mockReranker);
|
|
214
|
+
const parsed = parseResponse(response);
|
|
215
|
+
expect(mockReranker.rerank).toHaveBeenCalledWith('hello', results);
|
|
216
|
+
expect(parsed.results).toHaveLength(2);
|
|
217
|
+
});
|
|
218
|
+
it('should fall back to original results when reranker fails', async () => {
|
|
219
|
+
const results = [makeSearchResult()];
|
|
220
|
+
vi.mocked(mockHybridSearch.search).mockResolvedValue(ok(results));
|
|
221
|
+
const mockReranker = {
|
|
222
|
+
rerank: vi.fn().mockResolvedValue(err(new ReRankerError('Ollama unreachable'))),
|
|
223
|
+
};
|
|
224
|
+
const response = await handleSearch({ query: 'hello' }, mockHybridSearch, mockReranker);
|
|
225
|
+
const parsed = parseResponse(response);
|
|
226
|
+
// Should still return results (fallback)
|
|
227
|
+
expect(parsed.results).toHaveLength(1);
|
|
228
|
+
});
|
|
229
|
+
});
|
|
230
|
+
// --- Context Tool Tests ---
|
|
231
|
+
describe('handleContext', () => {
|
|
232
|
+
let mockHybridSearch;
|
|
233
|
+
let mockContextExpander;
|
|
234
|
+
beforeEach(() => {
|
|
235
|
+
mockHybridSearch = {
|
|
236
|
+
search: vi.fn(),
|
|
237
|
+
};
|
|
238
|
+
mockContextExpander = {
|
|
239
|
+
expand: vi.fn(),
|
|
240
|
+
};
|
|
241
|
+
});
|
|
242
|
+
it('should return validation error for missing file_path', async () => {
|
|
243
|
+
const response = await handleContext({}, mockHybridSearch, mockContextExpander);
|
|
244
|
+
const parsed = parseResponse(response);
|
|
245
|
+
expect(parsed.error).toBe('Invalid input');
|
|
246
|
+
});
|
|
247
|
+
it('should return validation error for empty file_path', async () => {
|
|
248
|
+
const response = await handleContext({ file_path: '' }, mockHybridSearch, mockContextExpander);
|
|
249
|
+
const parsed = parseResponse(response);
|
|
250
|
+
expect(parsed.error).toBe('Invalid input');
|
|
251
|
+
});
|
|
252
|
+
it('should return degraded message when services are not initialized', async () => {
|
|
253
|
+
const response = await handleContext({ file_path: 'src/index.ts' }, null, null);
|
|
254
|
+
const parsed = parseResponse(response);
|
|
255
|
+
expect(parsed.context).toBe('');
|
|
256
|
+
expect(parsed.message).toContain('not initialized');
|
|
257
|
+
});
|
|
258
|
+
it('should assemble context for a valid file path', async () => {
|
|
259
|
+
const results = [makeSearchResult()];
|
|
260
|
+
vi.mocked(mockHybridSearch.search).mockResolvedValue(ok(results));
|
|
261
|
+
const expandedContext = {
|
|
262
|
+
primaryResults: results,
|
|
263
|
+
relatedChunks: [],
|
|
264
|
+
graphExcerpt: { nodes: [], edges: [] },
|
|
265
|
+
};
|
|
266
|
+
vi.mocked(mockContextExpander.expand).mockReturnValue(expandedContext);
|
|
267
|
+
const response = await handleContext({ file_path: 'src/utils/hello.ts' }, mockHybridSearch, mockContextExpander);
|
|
268
|
+
const parsed = parseResponse(response);
|
|
269
|
+
expect(parsed.context).toContain('hello');
|
|
270
|
+
expect(parsed.primary_chunks).toBe(1);
|
|
271
|
+
expect(typeof parsed.token_count).toBe('number');
|
|
272
|
+
expect(typeof parsed.truncated).toBe('boolean');
|
|
273
|
+
});
|
|
274
|
+
it('should return empty context when no chunks match file_path', async () => {
|
|
275
|
+
const results = [makeSearchResult()]; // filePath is 'src/utils/hello.ts'
|
|
276
|
+
vi.mocked(mockHybridSearch.search).mockResolvedValue(ok(results));
|
|
277
|
+
const response = await handleContext({ file_path: 'src/other.ts' }, // won't match
|
|
278
|
+
mockHybridSearch, mockContextExpander);
|
|
279
|
+
const parsed = parseResponse(response);
|
|
280
|
+
expect(parsed.context).toBe('');
|
|
281
|
+
expect(parsed.message).toContain('No chunks found');
|
|
282
|
+
});
|
|
283
|
+
it('should handle search failures', async () => {
|
|
284
|
+
vi.mocked(mockHybridSearch.search).mockResolvedValue(err(new EmbedError('Embedding service unavailable')));
|
|
285
|
+
const response = await handleContext({ file_path: 'src/index.ts' }, mockHybridSearch, mockContextExpander);
|
|
286
|
+
const parsed = parseResponse(response);
|
|
287
|
+
expect(parsed.error).toBe('Search failed');
|
|
288
|
+
expect(parsed.message).toContain('Embedding service unavailable');
|
|
289
|
+
});
|
|
290
|
+
it('should filter out test files when include_tests is false', async () => {
|
|
291
|
+
const results = [
|
|
292
|
+
makeSearchResult(),
|
|
293
|
+
makeSearchResult({
|
|
294
|
+
chunkId: 'chunk-test',
|
|
295
|
+
chunk: {
|
|
296
|
+
id: 'chunk-test',
|
|
297
|
+
content: 'describe("hello")',
|
|
298
|
+
nlSummary: 'Test for hello',
|
|
299
|
+
filePath: 'src/utils/hello.test.ts',
|
|
300
|
+
startLine: 1,
|
|
301
|
+
endLine: 5,
|
|
302
|
+
language: 'typescript',
|
|
303
|
+
metadata: {
|
|
304
|
+
chunkType: 'function',
|
|
305
|
+
name: 'describe',
|
|
306
|
+
declarations: [],
|
|
307
|
+
imports: [],
|
|
308
|
+
exports: [],
|
|
309
|
+
},
|
|
310
|
+
},
|
|
311
|
+
}),
|
|
312
|
+
];
|
|
313
|
+
vi.mocked(mockHybridSearch.search).mockResolvedValue(ok(results));
|
|
314
|
+
const expandedContext = {
|
|
315
|
+
primaryResults: [results[0]],
|
|
316
|
+
relatedChunks: [],
|
|
317
|
+
graphExcerpt: { nodes: [], edges: [] },
|
|
318
|
+
};
|
|
319
|
+
vi.mocked(mockContextExpander.expand).mockReturnValue(expandedContext);
|
|
320
|
+
const response = await handleContext({ file_path: 'src/utils/hello', include_tests: false }, mockHybridSearch, mockContextExpander);
|
|
321
|
+
const parsed = parseResponse(response);
|
|
322
|
+
// Only the non-test chunk should be passed to expander
|
|
323
|
+
expect(mockContextExpander.expand).toHaveBeenCalledWith([results[0]]);
|
|
324
|
+
expect(parsed.primary_chunks).toBe(1);
|
|
325
|
+
});
|
|
326
|
+
it('should filter out interfaces when include_interfaces is false', async () => {
|
|
327
|
+
const results = [
|
|
328
|
+
makeSearchResult(),
|
|
329
|
+
makeSearchResult({
|
|
330
|
+
chunkId: 'chunk-iface',
|
|
331
|
+
metadata: {
|
|
332
|
+
chunkType: 'interface',
|
|
333
|
+
name: 'HelloInterface',
|
|
334
|
+
declarations: [],
|
|
335
|
+
imports: [],
|
|
336
|
+
exports: [],
|
|
337
|
+
},
|
|
338
|
+
chunk: {
|
|
339
|
+
id: 'chunk-iface',
|
|
340
|
+
content: 'interface HelloInterface {}',
|
|
341
|
+
nlSummary: 'Interface for hello',
|
|
342
|
+
filePath: 'src/utils/hello.ts',
|
|
343
|
+
startLine: 5,
|
|
344
|
+
endLine: 10,
|
|
345
|
+
language: 'typescript',
|
|
346
|
+
metadata: {
|
|
347
|
+
chunkType: 'interface',
|
|
348
|
+
name: 'HelloInterface',
|
|
349
|
+
declarations: [],
|
|
350
|
+
imports: [],
|
|
351
|
+
exports: [],
|
|
352
|
+
},
|
|
353
|
+
},
|
|
354
|
+
}),
|
|
355
|
+
];
|
|
356
|
+
vi.mocked(mockHybridSearch.search).mockResolvedValue(ok(results));
|
|
357
|
+
const expandedContext = {
|
|
358
|
+
primaryResults: [results[0]],
|
|
359
|
+
relatedChunks: [],
|
|
360
|
+
graphExcerpt: { nodes: [], edges: [] },
|
|
361
|
+
};
|
|
362
|
+
vi.mocked(mockContextExpander.expand).mockReturnValue(expandedContext);
|
|
363
|
+
const response = await handleContext({ file_path: 'src/utils/hello', include_interfaces: false }, mockHybridSearch, mockContextExpander);
|
|
364
|
+
const parsed = parseResponse(response);
|
|
365
|
+
expect(mockContextExpander.expand).toHaveBeenCalledWith([results[0]]);
|
|
366
|
+
expect(parsed.primary_chunks).toBe(1);
|
|
367
|
+
});
|
|
368
|
+
it('should handle thrown exceptions', async () => {
|
|
369
|
+
vi.mocked(mockHybridSearch.search).mockRejectedValue(new Error('Boom'));
|
|
370
|
+
const response = await handleContext({ file_path: 'src/index.ts' }, mockHybridSearch, mockContextExpander);
|
|
371
|
+
const parsed = parseResponse(response);
|
|
372
|
+
expect(parsed.error).toBe('Context assembly failed');
|
|
373
|
+
expect(parsed.message).toBe('Boom');
|
|
374
|
+
});
|
|
375
|
+
it('should reject file_path with path traversal', async () => {
|
|
376
|
+
const response = await handleContext({ file_path: '../../etc/passwd' }, mockHybridSearch, mockContextExpander);
|
|
377
|
+
const parsed = parseResponse(response);
|
|
378
|
+
expect(parsed.error).toBe('Invalid input');
|
|
379
|
+
});
|
|
380
|
+
it('should reject max_tokens above 128000', async () => {
|
|
381
|
+
const response = await handleContext({ file_path: 'src/index.ts', max_tokens: 200000 }, mockHybridSearch, mockContextExpander);
|
|
382
|
+
const parsed = parseResponse(response);
|
|
383
|
+
expect(parsed.error).toBe('Invalid input');
|
|
384
|
+
});
|
|
385
|
+
});
|
|
386
|
+
// --- Status Tool Tests ---
|
|
387
|
+
describe('handleStatus', () => {
|
|
388
|
+
let mockStore;
|
|
389
|
+
beforeEach(() => {
|
|
390
|
+
mockStore = {
|
|
391
|
+
count: vi.fn(),
|
|
392
|
+
};
|
|
393
|
+
});
|
|
394
|
+
it('should return not_initialized when store is null', async () => {
|
|
395
|
+
const response = await handleStatus(null, null);
|
|
396
|
+
const parsed = parseResponse(response);
|
|
397
|
+
expect(parsed.health).toBe('not_initialized');
|
|
398
|
+
expect(parsed.total_chunks).toBe(0);
|
|
399
|
+
expect(parsed).toHaveProperty('model', 'unknown');
|
|
400
|
+
});
|
|
401
|
+
it('should return ok health when chunks exist', async () => {
|
|
402
|
+
vi.mocked(mockStore.count).mockResolvedValue(ok(42));
|
|
403
|
+
const config = makeConfig();
|
|
404
|
+
const response = await handleStatus(mockStore, config);
|
|
405
|
+
const parsed = parseResponse(response);
|
|
406
|
+
expect(parsed.health).toBe('ok');
|
|
407
|
+
expect(parsed.total_chunks).toBe(42);
|
|
408
|
+
expect(parsed.model).toBe('nomic-embed-text');
|
|
409
|
+
expect(parsed.languages).toEqual(['typescript']);
|
|
410
|
+
});
|
|
411
|
+
it('should return degraded health when store is empty', async () => {
|
|
412
|
+
vi.mocked(mockStore.count).mockResolvedValue(ok(0));
|
|
413
|
+
const config = makeConfig();
|
|
414
|
+
const response = await handleStatus(mockStore, config);
|
|
415
|
+
const parsed = parseResponse(response);
|
|
416
|
+
expect(parsed.health).toBe('degraded');
|
|
417
|
+
expect(parsed.total_chunks).toBe(0);
|
|
418
|
+
});
|
|
419
|
+
it('should return degraded health when count fails', async () => {
|
|
420
|
+
vi.mocked(mockStore.count).mockResolvedValue(err(new StoreError('DB connection lost')));
|
|
421
|
+
const config = makeConfig();
|
|
422
|
+
const response = await handleStatus(mockStore, config);
|
|
423
|
+
const parsed = parseResponse(response);
|
|
424
|
+
expect(parsed.health).toBe('degraded');
|
|
425
|
+
});
|
|
426
|
+
it('should handle thrown exceptions', async () => {
|
|
427
|
+
vi.mocked(mockStore.count).mockRejectedValue(new Error('Fatal'));
|
|
428
|
+
const response = await handleStatus(mockStore, makeConfig());
|
|
429
|
+
const parsed = parseResponse(response);
|
|
430
|
+
expect(parsed.error).toBe('Status check failed');
|
|
431
|
+
expect(parsed.health).toBe('degraded');
|
|
432
|
+
});
|
|
433
|
+
it('should return auto for languages when config has auto', async () => {
|
|
434
|
+
vi.mocked(mockStore.count).mockResolvedValue(ok(10));
|
|
435
|
+
const config = makeConfig({
|
|
436
|
+
project: { name: 'test', languages: 'auto' },
|
|
437
|
+
});
|
|
438
|
+
const response = await handleStatus(mockStore, config);
|
|
439
|
+
const parsed = parseResponse(response);
|
|
440
|
+
expect(parsed.languages).toBe('auto');
|
|
441
|
+
});
|
|
442
|
+
});
|
|
443
|
+
// --- Tool Registration Tests ---
|
|
444
|
+
describe('tool definitions', () => {
|
|
445
|
+
it('coderag_search inputSchema has required query field', () => {
|
|
446
|
+
const valid = searchInputSchema.safeParse({ query: 'test' });
|
|
447
|
+
expect(valid.success).toBe(true);
|
|
448
|
+
const invalid = searchInputSchema.safeParse({});
|
|
449
|
+
expect(invalid.success).toBe(false);
|
|
450
|
+
});
|
|
451
|
+
it('coderag_context inputSchema has required file_path field', () => {
|
|
452
|
+
const valid = contextInputSchema.safeParse({ file_path: 'src/index.ts' });
|
|
453
|
+
expect(valid.success).toBe(true);
|
|
454
|
+
const invalid = contextInputSchema.safeParse({});
|
|
455
|
+
expect(invalid.success).toBe(false);
|
|
456
|
+
});
|
|
457
|
+
it('coderag_search inputSchema validates top_k as positive integer', () => {
|
|
458
|
+
const valid = searchInputSchema.safeParse({ query: 'test', top_k: 5 });
|
|
459
|
+
expect(valid.success).toBe(true);
|
|
460
|
+
const invalidNegative = searchInputSchema.safeParse({ query: 'test', top_k: -1 });
|
|
461
|
+
expect(invalidNegative.success).toBe(false);
|
|
462
|
+
const invalidFloat = searchInputSchema.safeParse({ query: 'test', top_k: 1.5 });
|
|
463
|
+
expect(invalidFloat.success).toBe(false);
|
|
464
|
+
});
|
|
465
|
+
it('coderag_search inputSchema rejects top_k above 100', () => {
|
|
466
|
+
const at100 = searchInputSchema.safeParse({ query: 'test', top_k: 100 });
|
|
467
|
+
expect(at100.success).toBe(true);
|
|
468
|
+
const above100 = searchInputSchema.safeParse({ query: 'test', top_k: 101 });
|
|
469
|
+
expect(above100.success).toBe(false);
|
|
470
|
+
});
|
|
471
|
+
it('coderag_search inputSchema rejects file_path with path traversal', () => {
|
|
472
|
+
const valid = searchInputSchema.safeParse({ query: 'test', file_path: 'src/utils' });
|
|
473
|
+
expect(valid.success).toBe(true);
|
|
474
|
+
const traversal = searchInputSchema.safeParse({ query: 'test', file_path: '../etc/passwd' });
|
|
475
|
+
expect(traversal.success).toBe(false);
|
|
476
|
+
const midTraversal = searchInputSchema.safeParse({ query: 'test', file_path: 'src/../../secret' });
|
|
477
|
+
expect(midTraversal.success).toBe(false);
|
|
478
|
+
});
|
|
479
|
+
it('coderag_context inputSchema validates max_tokens as positive integer', () => {
|
|
480
|
+
const valid = contextInputSchema.safeParse({
|
|
481
|
+
file_path: 'src/index.ts',
|
|
482
|
+
max_tokens: 4000,
|
|
483
|
+
});
|
|
484
|
+
expect(valid.success).toBe(true);
|
|
485
|
+
const invalidNegative = contextInputSchema.safeParse({
|
|
486
|
+
file_path: 'src/index.ts',
|
|
487
|
+
max_tokens: -100,
|
|
488
|
+
});
|
|
489
|
+
expect(invalidNegative.success).toBe(false);
|
|
490
|
+
});
|
|
491
|
+
it('coderag_context inputSchema rejects max_tokens above 128000', () => {
|
|
492
|
+
const at128k = contextInputSchema.safeParse({
|
|
493
|
+
file_path: 'src/index.ts',
|
|
494
|
+
max_tokens: 128000,
|
|
495
|
+
});
|
|
496
|
+
expect(at128k.success).toBe(true);
|
|
497
|
+
const above128k = contextInputSchema.safeParse({
|
|
498
|
+
file_path: 'src/index.ts',
|
|
499
|
+
max_tokens: 128001,
|
|
500
|
+
});
|
|
501
|
+
expect(above128k.success).toBe(false);
|
|
502
|
+
});
|
|
503
|
+
it('coderag_context inputSchema rejects file_path with path traversal', () => {
|
|
504
|
+
const valid = contextInputSchema.safeParse({ file_path: 'src/index.ts' });
|
|
505
|
+
expect(valid.success).toBe(true);
|
|
506
|
+
const traversal = contextInputSchema.safeParse({ file_path: '../etc/passwd' });
|
|
507
|
+
expect(traversal.success).toBe(false);
|
|
508
|
+
const midTraversal = contextInputSchema.safeParse({ file_path: 'src/../../secret' });
|
|
509
|
+
expect(midTraversal.success).toBe(false);
|
|
510
|
+
});
|
|
511
|
+
it('coderag_context inputSchema applies defaults', () => {
|
|
512
|
+
const result = contextInputSchema.parse({ file_path: 'src/index.ts' });
|
|
513
|
+
expect(result.include_tests).toBe(true);
|
|
514
|
+
expect(result.include_interfaces).toBe(true);
|
|
515
|
+
expect(result.max_tokens).toBe(8000);
|
|
516
|
+
});
|
|
517
|
+
it('coderag_search inputSchema applies default top_k', () => {
|
|
518
|
+
const result = searchInputSchema.parse({ query: 'test' });
|
|
519
|
+
expect(result.top_k).toBe(10);
|
|
520
|
+
});
|
|
521
|
+
});
|
|
522
|
+
// --- CodeRAGServer Tests ---
|
|
523
|
+
describe('CodeRAGServer', () => {
|
|
524
|
+
it('should export MCP_SERVER_VERSION', async () => {
|
|
525
|
+
const mod = await import('./server.js');
|
|
526
|
+
expect(mod.MCP_SERVER_VERSION).toBe('0.1.0');
|
|
527
|
+
});
|
|
528
|
+
it('should export NO_INDEX_MESSAGE', async () => {
|
|
529
|
+
const mod = await import('./server.js');
|
|
530
|
+
expect(mod.NO_INDEX_MESSAGE).toContain('No index found');
|
|
531
|
+
expect(mod.NO_INDEX_MESSAGE).toContain('npx coderag index');
|
|
532
|
+
expect(mod.NO_INDEX_MESSAGE).toContain('CodeRAG: Index');
|
|
533
|
+
});
|
|
534
|
+
it('should create a server instance', async () => {
|
|
535
|
+
const { CodeRAGServer } = await import('./server.js');
|
|
536
|
+
const server = new CodeRAGServer({ rootDir: '/tmp/test' });
|
|
537
|
+
expect(server).toBeDefined();
|
|
538
|
+
expect(server.getServer()).toBeDefined();
|
|
539
|
+
});
|
|
540
|
+
it('should default indexCheck to exists:false', async () => {
|
|
541
|
+
const { CodeRAGServer } = await import('./server.js');
|
|
542
|
+
const server = new CodeRAGServer({ rootDir: '/tmp/test-index-check' });
|
|
543
|
+
const check = server.getIndexCheck();
|
|
544
|
+
expect(check.exists).toBe(false);
|
|
545
|
+
expect(check.empty).toBe(false);
|
|
546
|
+
});
|
|
547
|
+
it('checkIndex should return null when config is missing', async () => {
|
|
548
|
+
const { CodeRAGServer } = await import('./server.js');
|
|
549
|
+
const server = new CodeRAGServer({ rootDir: '/tmp/nonexistent-project-dir' });
|
|
550
|
+
const result = await server.checkIndex();
|
|
551
|
+
expect(result).toBeNull();
|
|
552
|
+
});
|
|
553
|
+
});
|
|
554
|
+
// --- Index Check Integration ---
|
|
555
|
+
import { mkdtemp, writeFile, mkdir, rm } from 'node:fs/promises';
|
|
556
|
+
import { join } from 'node:path';
|
|
557
|
+
import { tmpdir } from 'node:os';
|
|
558
|
+
describe('CodeRAGServer.checkIndex', () => {
|
|
559
|
+
let tempDir;
|
|
560
|
+
beforeEach(async () => {
|
|
561
|
+
tempDir = await mkdtemp(join(tmpdir(), 'coderag-server-check-'));
|
|
562
|
+
});
|
|
563
|
+
afterEach(async () => {
|
|
564
|
+
await rm(tempDir, { recursive: true, force: true });
|
|
565
|
+
});
|
|
566
|
+
it('should return exists:false when no index exists but config does', async () => {
|
|
567
|
+
// Write a minimal .coderag.yaml
|
|
568
|
+
await writeFile(join(tempDir, '.coderag.yaml'), 'version: "1"\nproject:\n name: test\n languages: auto\n');
|
|
569
|
+
const { CodeRAGServer: CS } = await import('./server.js');
|
|
570
|
+
const server = new CS({ rootDir: tempDir });
|
|
571
|
+
const result = await server.checkIndex();
|
|
572
|
+
expect(result).not.toBeNull();
|
|
573
|
+
expect(result.exists).toBe(false);
|
|
574
|
+
});
|
|
575
|
+
it('should return exists:true when index data exists', async () => {
|
|
576
|
+
await writeFile(join(tempDir, '.coderag.yaml'), 'version: "1"\nproject:\n name: test\n languages: auto\n');
|
|
577
|
+
// Create index data
|
|
578
|
+
const storagePath = join(tempDir, '.coderag');
|
|
579
|
+
await mkdir(join(storagePath, 'chunks.lance'), { recursive: true });
|
|
580
|
+
await writeFile(join(storagePath, 'bm25-index.json'), '{"documentCount": 10}');
|
|
581
|
+
const { CodeRAGServer: CS } = await import('./server.js');
|
|
582
|
+
const server = new CS({ rootDir: tempDir });
|
|
583
|
+
const result = await server.checkIndex();
|
|
584
|
+
expect(result).not.toBeNull();
|
|
585
|
+
expect(result.exists).toBe(true);
|
|
586
|
+
expect(result.empty).toBe(false);
|
|
587
|
+
});
|
|
588
|
+
});
|
|
589
|
+
// --- SSE Transport Tests ---
|
|
590
|
+
import * as http from 'node:http';
|
|
591
|
+
import { CodeRAGServer } from './server.js';
|
|
592
|
+
function httpRequest(url, options = {}) {
|
|
593
|
+
return new Promise((resolve, reject) => {
|
|
594
|
+
const req = http.request(url, options, (res) => {
|
|
595
|
+
let body = '';
|
|
596
|
+
res.on('data', (chunk) => { body += chunk.toString(); });
|
|
597
|
+
res.on('end', () => {
|
|
598
|
+
resolve({ statusCode: res.statusCode ?? 0, headers: res.headers, body });
|
|
599
|
+
});
|
|
600
|
+
});
|
|
601
|
+
req.on('error', reject);
|
|
602
|
+
req.end();
|
|
603
|
+
});
|
|
604
|
+
}
|
|
605
|
+
describe('SSE transport', () => {
|
|
606
|
+
let server;
|
|
607
|
+
let port;
|
|
608
|
+
beforeEach(async () => {
|
|
609
|
+
// Use a dynamic port to avoid conflicts (0 would let the OS assign,
|
|
610
|
+
// but we need to know the port; use a high range instead)
|
|
611
|
+
port = 40000 + Math.floor(Math.random() * 10000);
|
|
612
|
+
server = new CodeRAGServer({ rootDir: '/tmp/test-sse' });
|
|
613
|
+
await server.connectSSE(port);
|
|
614
|
+
});
|
|
615
|
+
afterEach(async () => {
|
|
616
|
+
await server.close();
|
|
617
|
+
});
|
|
618
|
+
it('should start server on the specified port', async () => {
|
|
619
|
+
// SSE keeps connection open, so we read headers and destroy immediately
|
|
620
|
+
const result = await new Promise((resolve, reject) => {
|
|
621
|
+
const req = http.get(`http://localhost:${port}/sse`, (res) => {
|
|
622
|
+
resolve({ statusCode: res.statusCode ?? 0 });
|
|
623
|
+
res.destroy();
|
|
624
|
+
});
|
|
625
|
+
req.on('error', (err) => {
|
|
626
|
+
if (err.code !== 'ECONNRESET') {
|
|
627
|
+
reject(err);
|
|
628
|
+
}
|
|
629
|
+
});
|
|
630
|
+
});
|
|
631
|
+
expect(result.statusCode).toBe(200);
|
|
632
|
+
});
|
|
633
|
+
it('GET /sse should return SSE headers', async () => {
|
|
634
|
+
// We need to use a raw request that we can abort, since SSE keeps the connection open
|
|
635
|
+
const result = await new Promise((resolve, reject) => {
|
|
636
|
+
const req = http.get(`http://localhost:${port}/sse`, (res) => {
|
|
637
|
+
resolve({ statusCode: res.statusCode ?? 0, headers: res.headers });
|
|
638
|
+
// Destroy the connection immediately since we only need headers
|
|
639
|
+
res.destroy();
|
|
640
|
+
});
|
|
641
|
+
req.on('error', (err) => {
|
|
642
|
+
// Ignore ECONNRESET from our destroy
|
|
643
|
+
if (err.code !== 'ECONNRESET') {
|
|
644
|
+
reject(err);
|
|
645
|
+
}
|
|
646
|
+
});
|
|
647
|
+
});
|
|
648
|
+
expect(result.statusCode).toBe(200);
|
|
649
|
+
expect(result.headers['content-type']).toBe('text/event-stream');
|
|
650
|
+
expect(result.headers['cache-control']).toContain('no-cache');
|
|
651
|
+
});
|
|
652
|
+
it('GET /sse should send endpoint event with session ID', async () => {
|
|
653
|
+
const data = await new Promise((resolve, reject) => {
|
|
654
|
+
const req = http.get(`http://localhost:${port}/sse`, (res) => {
|
|
655
|
+
let body = '';
|
|
656
|
+
res.on('data', (chunk) => {
|
|
657
|
+
body += chunk.toString();
|
|
658
|
+
// Once we have the endpoint event, resolve
|
|
659
|
+
if (body.includes('event: endpoint')) {
|
|
660
|
+
resolve(body);
|
|
661
|
+
res.destroy();
|
|
662
|
+
}
|
|
663
|
+
});
|
|
664
|
+
});
|
|
665
|
+
req.on('error', (err) => {
|
|
666
|
+
if (err.code !== 'ECONNRESET') {
|
|
667
|
+
reject(err);
|
|
668
|
+
}
|
|
669
|
+
});
|
|
670
|
+
// Timeout safety
|
|
671
|
+
setTimeout(() => { reject(new Error('Timeout waiting for SSE endpoint event')); }, 3000);
|
|
672
|
+
});
|
|
673
|
+
expect(data).toContain('event: endpoint');
|
|
674
|
+
expect(data).toContain('/messages?sessionId=');
|
|
675
|
+
});
|
|
676
|
+
it('POST /messages should return 400 when sessionId is missing', async () => {
|
|
677
|
+
const res = await httpRequest(`http://localhost:${port}/messages`, { method: 'POST' });
|
|
678
|
+
expect(res.statusCode).toBe(400);
|
|
679
|
+
expect(res.body).toContain('Missing sessionId');
|
|
680
|
+
});
|
|
681
|
+
it('POST /messages should return 400 for unknown sessionId', async () => {
|
|
682
|
+
const res = await httpRequest(`http://localhost:${port}/messages?sessionId=nonexistent`, { method: 'POST' });
|
|
683
|
+
expect(res.statusCode).toBe(400);
|
|
684
|
+
expect(res.body).toContain('No transport found');
|
|
685
|
+
});
|
|
686
|
+
it('should return 404 for unknown routes', async () => {
|
|
687
|
+
const res = await httpRequest(`http://localhost:${port}/unknown`);
|
|
688
|
+
expect(res.statusCode).toBe(404);
|
|
689
|
+
expect(res.body).toBe('Not Found');
|
|
690
|
+
});
|
|
691
|
+
it('should handle CORS preflight requests', async () => {
|
|
692
|
+
const res = await httpRequest(`http://localhost:${port}/sse`, { method: 'OPTIONS' });
|
|
693
|
+
expect(res.statusCode).toBe(204);
|
|
694
|
+
expect(res.headers['access-control-allow-origin']).toBe('*');
|
|
695
|
+
expect(res.headers['access-control-allow-methods']).toContain('POST');
|
|
696
|
+
});
|
|
697
|
+
it('close() should shut down the server', async () => {
|
|
698
|
+
await server.close();
|
|
699
|
+
// Attempting to connect should now fail
|
|
700
|
+
await expect(httpRequest(`http://localhost:${port}/sse`)).rejects.toThrow();
|
|
701
|
+
// Create a new server for the afterEach cleanup to close without error
|
|
702
|
+
server = new CodeRAGServer({ rootDir: '/tmp/test-sse' });
|
|
703
|
+
port = 40000 + Math.floor(Math.random() * 10000);
|
|
704
|
+
await server.connectSSE(port);
|
|
705
|
+
});
|
|
706
|
+
it('POST /messages should forward to transport for a valid session', async () => {
|
|
707
|
+
// First, establish an SSE connection and extract the sessionId
|
|
708
|
+
const sessionId = await new Promise((resolve, reject) => {
|
|
709
|
+
const req = http.get(`http://localhost:${port}/sse`, (res) => {
|
|
710
|
+
let body = '';
|
|
711
|
+
res.on('data', (chunk) => {
|
|
712
|
+
body += chunk.toString();
|
|
713
|
+
if (body.includes('event: endpoint')) {
|
|
714
|
+
// Extract sessionId from the endpoint event data
|
|
715
|
+
const match = body.match(/sessionId=([a-f0-9-]+)/);
|
|
716
|
+
if (match) {
|
|
717
|
+
resolve(match[1]);
|
|
718
|
+
}
|
|
719
|
+
// Keep the SSE connection alive; don't destroy it
|
|
720
|
+
}
|
|
721
|
+
});
|
|
722
|
+
});
|
|
723
|
+
req.on('error', (err) => {
|
|
724
|
+
if (err.code !== 'ECONNRESET') {
|
|
725
|
+
reject(err);
|
|
726
|
+
}
|
|
727
|
+
});
|
|
728
|
+
setTimeout(() => { reject(new Error('Timeout')); }, 3000);
|
|
729
|
+
});
|
|
730
|
+
expect(sessionId).toBeDefined();
|
|
731
|
+
// Now POST a valid JSON-RPC message to the /messages endpoint
|
|
732
|
+
const postResult = await new Promise((resolve, reject) => {
|
|
733
|
+
const postData = JSON.stringify({
|
|
734
|
+
jsonrpc: '2.0',
|
|
735
|
+
method: 'initialize',
|
|
736
|
+
params: {
|
|
737
|
+
protocolVersion: '2024-11-05',
|
|
738
|
+
capabilities: {},
|
|
739
|
+
clientInfo: { name: 'test-client', version: '1.0.0' },
|
|
740
|
+
},
|
|
741
|
+
id: 1,
|
|
742
|
+
});
|
|
743
|
+
const req = http.request(`http://localhost:${port}/messages?sessionId=${sessionId}`, {
|
|
744
|
+
method: 'POST',
|
|
745
|
+
headers: {
|
|
746
|
+
'Content-Type': 'application/json',
|
|
747
|
+
'Content-Length': Buffer.byteLength(postData),
|
|
748
|
+
},
|
|
749
|
+
}, (res) => {
|
|
750
|
+
let body = '';
|
|
751
|
+
res.on('data', (chunk) => { body += chunk.toString(); });
|
|
752
|
+
res.on('end', () => {
|
|
753
|
+
resolve({ statusCode: res.statusCode ?? 0, body });
|
|
754
|
+
});
|
|
755
|
+
});
|
|
756
|
+
req.on('error', reject);
|
|
757
|
+
req.write(postData);
|
|
758
|
+
req.end();
|
|
759
|
+
});
|
|
760
|
+
// 202 Accepted is the expected response from SSEServerTransport.handlePostMessage
|
|
761
|
+
expect(postResult.statusCode).toBe(202);
|
|
762
|
+
});
|
|
763
|
+
});
|
|
764
|
+
//# sourceMappingURL=server.test.js.map
|