smart-coding-mcp 1.3.0 → 1.3.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1,6 +1,11 @@
1
1
  # Smart Coding MCP
2
2
 
3
- An extensible Model Context Protocol (MCP) server that provides intelligent semantic code search for AI assistants. Built with local AI models, inspired by Cursor's semantic search research.
3
+ [![npm version](https://img.shields.io/npm/v/smart-coding-mcp.svg)](https://www.npmjs.com/package/smart-coding-mcp)
4
+ [![npm downloads](https://img.shields.io/npm/dm/smart-coding-mcp.svg)](https://www.npmjs.com/package/smart-coding-mcp)
5
+ [![License: MIT](https://img.shields.io/badge/License-MIT-blue.svg)](https://opensource.org/licenses/MIT)
6
+ [![Node.js](https://img.shields.io/badge/Node.js-%3E%3D18-green.svg)](https://nodejs.org/)
7
+
8
+ An extensible Model Context Protocol (MCP) server that provides intelligent semantic code search for AI assistants. Built with local AI models (RAG), inspired by Cursor's semantic search research.
4
9
 
5
10
  ## What This Does
6
11
 
@@ -1,16 +1,39 @@
1
1
  export class CacheClearer {
2
- constructor(embedder, cache, config) {
2
+ constructor(embedder, cache, config, indexer) {
3
3
  this.cache = cache;
4
4
  this.config = config;
5
+ this.indexer = indexer;
6
+ this.isClearing = false;
5
7
  }
6
8
 
7
9
  async execute() {
8
- await this.cache.clear();
9
- return {
10
- success: true,
11
- message: `Cache cleared successfully. Next indexing will be a full rebuild.`,
12
- cacheDirectory: this.config.cacheDirectory
13
- };
10
+ // Check if indexing is in progress
11
+ if (this.indexer && this.indexer.isIndexing) {
12
+ throw new Error("Cannot clear cache while indexing is in progress. Please wait for indexing to complete.");
13
+ }
14
+
15
+ // Check if cache is currently being saved (race condition prevention)
16
+ if (this.cache.isSaving) {
17
+ throw new Error("Cannot clear cache while cache is being saved. Please try again in a moment.");
18
+ }
19
+
20
+ // Check if a clear operation is already in progress (prevent concurrent clears)
21
+ if (this.isClearing) {
22
+ throw new Error("Cache clear operation already in progress. Please wait for it to complete.");
23
+ }
24
+
25
+ this.isClearing = true;
26
+
27
+ try {
28
+ await this.cache.clear();
29
+ return {
30
+ success: true,
31
+ message: `Cache cleared successfully. Next indexing will be a full rebuild.`,
32
+ cacheDirectory: this.config.cacheDirectory
33
+ };
34
+ } finally {
35
+ this.isClearing = false;
36
+ }
14
37
  }
15
38
  }
16
39
 
@@ -18,6 +18,7 @@ export class CodebaseIndexer {
18
18
  this.watcher = null;
19
19
  this.workers = [];
20
20
  this.workerReady = [];
21
+ this.isIndexing = false;
21
22
  }
22
23
 
23
24
  /**
@@ -409,8 +410,22 @@ export class CodebaseIndexer {
409
410
  return filesToProcess;
410
411
  }
411
412
 
412
- async indexAll() {
413
- const totalStartTime = Date.now();
413
+ async indexAll(force = false) {
414
+ if (this.isIndexing) {
415
+ console.error("[Indexer] Indexing already in progress, skipping concurrent request");
416
+ return { skipped: true, reason: "Indexing already in progress" };
417
+ }
418
+
419
+ this.isIndexing = true;
420
+
421
+ try {
422
+ if (force) {
423
+ console.error("[Indexer] Force reindex requested: clearing cache");
424
+ this.cache.setVectorStore([]);
425
+ this.cache.fileHashes = new Map();
426
+ }
427
+
428
+ const totalStartTime = Date.now();
414
429
  console.error(`[Indexer] Starting optimized indexing in ${this.config.searchDirectory}...`);
415
430
 
416
431
  // Step 1: Fast file discovery with fdir
@@ -419,7 +434,7 @@ export class CodebaseIndexer {
419
434
  if (files.length === 0) {
420
435
  console.error("[Indexer] No files found to index");
421
436
  this.sendProgress(100, 100, "No files found to index");
422
- return;
437
+ return { skipped: false, filesProcessed: 0, chunksCreated: 0, message: "No files found to index" };
423
438
  }
424
439
 
425
440
  // Send progress: discovery complete
@@ -432,7 +447,15 @@ export class CodebaseIndexer {
432
447
  console.error("[Indexer] All files unchanged, nothing to index");
433
448
  this.sendProgress(100, 100, "All files up to date");
434
449
  await this.cache.save();
435
- return;
450
+ const vectorStore = this.cache.getVectorStore();
451
+ return {
452
+ skipped: false,
453
+ filesProcessed: 0,
454
+ chunksCreated: 0,
455
+ totalFiles: new Set(vectorStore.map(v => v.file)).size,
456
+ totalChunks: vectorStore.length,
457
+ message: "All files up to date"
458
+ };
436
459
  }
437
460
 
438
461
  // Send progress: filtering complete
@@ -541,6 +564,20 @@ export class CodebaseIndexer {
541
564
  this.sendProgress(100, 100, `Complete: ${totalChunks} chunks from ${filesToProcess.length} files in ${totalTime}s`);
542
565
 
543
566
  await this.cache.save();
567
+
568
+ const vectorStore = this.cache.getVectorStore();
569
+ return {
570
+ skipped: false,
571
+ filesProcessed: filesToProcess.length,
572
+ chunksCreated: totalChunks,
573
+ totalFiles: new Set(vectorStore.map(v => v.file)).size,
574
+ totalChunks: vectorStore.length,
575
+ duration: totalTime,
576
+ message: `Indexed ${filesToProcess.length} files (${totalChunks} chunks) in ${totalTime}s`
577
+ };
578
+ } finally {
579
+ this.isIndexing = false;
580
+ }
544
581
  }
545
582
 
546
583
  setupFileWatcher() {
@@ -608,25 +645,41 @@ export function getToolDefinition() {
608
645
  // Tool handler
609
646
  export async function handleToolCall(request, indexer) {
610
647
  const force = request.params.arguments?.force || false;
648
+ const result = await indexer.indexAll(force);
611
649
 
612
- if (force) {
613
- // Clear cache to force full reindex
614
- indexer.cache.setVectorStore([]);
615
- indexer.cache.fileHashes = new Map();
650
+ // Handle case when indexing was skipped due to concurrent request
651
+ if (result?.skipped) {
652
+ return {
653
+ content: [{
654
+ type: "text",
655
+ text: `Indexing skipped: ${result.reason}\n\nPlease wait for the current indexing operation to complete before requesting another reindex.`
656
+ }]
657
+ };
616
658
  }
617
659
 
618
- await indexer.indexAll();
619
-
660
+ // Get current stats from cache
620
661
  const vectorStore = indexer.cache.getVectorStore();
621
662
  const stats = {
622
- totalChunks: vectorStore.length,
623
- totalFiles: new Set(vectorStore.map(v => v.file)).size
663
+ totalChunks: result?.totalChunks ?? vectorStore.length,
664
+ totalFiles: result?.totalFiles ?? new Set(vectorStore.map(v => v.file)).size,
665
+ filesProcessed: result?.filesProcessed ?? 0,
666
+ chunksCreated: result?.chunksCreated ?? 0
624
667
  };
625
668
 
669
+ let message = result?.message
670
+ ? `Codebase reindexed successfully.\n\n${result.message}`
671
+ : `Codebase reindexed successfully.`;
672
+
673
+ message += `\n\nStatistics:\n- Total files in index: ${stats.totalFiles}\n- Total code chunks: ${stats.totalChunks}`;
674
+
675
+ if (stats.filesProcessed > 0) {
676
+ message += `\n- Files processed this run: ${stats.filesProcessed}\n- Chunks created this run: ${stats.chunksCreated}`;
677
+ }
678
+
626
679
  return {
627
680
  content: [{
628
681
  type: "text",
629
- text: `Codebase reindexed successfully.\n\nStatistics:\n- Files indexed: ${stats.totalFiles}\n- Code chunks: ${stats.totalChunks}`
682
+ text: message
630
683
  }]
631
684
  };
632
685
  }
package/index.js CHANGED
@@ -97,7 +97,7 @@ async function initialize() {
97
97
  // Initialize features
98
98
  indexer = new CodebaseIndexer(embedder, cache, config, server);
99
99
  hybridSearch = new HybridSearch(embedder, cache, config);
100
- const cacheClearer = new ClearCacheFeature.CacheClearer(embedder, cache, config);
100
+ const cacheClearer = new ClearCacheFeature.CacheClearer(embedder, cache, config, indexer);
101
101
 
102
102
  // Store feature instances (matches features array order)
103
103
  features[0].instance = hybridSearch;
package/lib/cache.js CHANGED
@@ -6,6 +6,7 @@ export class EmbeddingsCache {
6
6
  this.config = config;
7
7
  this.vectorStore = [];
8
8
  this.fileHashes = new Map();
9
+ this.isSaving = false;
9
10
  }
10
11
 
11
12
  async load() {
@@ -55,6 +56,8 @@ export class EmbeddingsCache {
55
56
  async save() {
56
57
  if (!this.config.enableCache) return;
57
58
 
59
+ this.isSaving = true;
60
+
58
61
  try {
59
62
  await fs.mkdir(this.config.cacheDirectory, { recursive: true });
60
63
  const cacheFile = path.join(this.config.cacheDirectory, "embeddings.json");
@@ -66,6 +69,8 @@ export class EmbeddingsCache {
66
69
  ]);
67
70
  } catch (error) {
68
71
  console.error("[Cache] Failed to save cache:", error.message);
72
+ } finally {
73
+ this.isSaving = false;
69
74
  }
70
75
  }
71
76
 
package/lib/config.js CHANGED
@@ -49,7 +49,8 @@ const DEFAULT_CONFIG = {
49
49
  "**/coverage/**",
50
50
  "**/.next/**",
51
51
  "**/target/**",
52
- "**/vendor/**"
52
+ "**/vendor/**",
53
+ "**/.smart-coding-cache/**"
53
54
  ],
54
55
  chunkSize: 25, // Lines per chunk (larger = fewer embeddings = faster indexing)
55
56
  chunkOverlap: 5, // Overlap between chunks for context continuity
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "smart-coding-mcp",
3
- "version": "1.3.0",
3
+ "version": "1.3.1",
4
4
  "description": "An extensible MCP server that enhances coding productivity with AI-powered features including semantic code search, intelligent indexing, and more, using local LLMs",
5
5
  "type": "module",
6
6
  "main": "index.js",
@@ -10,6 +10,8 @@
10
10
  "scripts": {
11
11
  "start": "node index.js",
12
12
  "dev": "node --watch index.js",
13
+ "test": "vitest run",
14
+ "test:watch": "vitest",
13
15
  "clear-cache": "node scripts/clear-cache.js"
14
16
  },
15
17
  "keywords": [
@@ -51,5 +53,8 @@
51
53
  },
52
54
  "engines": {
53
55
  "node": ">=18.0.0"
56
+ },
57
+ "devDependencies": {
58
+ "vitest": "^4.0.16"
54
59
  }
55
60
  }
@@ -0,0 +1,288 @@
1
+ /**
2
+ * Tests for CacheClearer feature
3
+ *
4
+ * Tests the cache clearing functionality including:
5
+ * - Basic cache clearing
6
+ * - Protection during indexing
7
+ * - Protection during save operations
8
+ * - Concurrent clear prevention
9
+ * - Tool handler responses
10
+ */
11
+
12
+ import { describe, it, expect, beforeAll, afterAll, beforeEach } from 'vitest';
13
+ import {
14
+ createTestFixtures,
15
+ cleanupFixtures,
16
+ clearTestCache,
17
+ createMockRequest
18
+ } from './helpers.js';
19
+ import * as ClearCacheFeature from '../features/clear-cache.js';
20
+ import { CacheClearer } from '../features/clear-cache.js';
21
+ import fs from 'fs/promises';
22
+ import path from 'path';
23
+
24
+ describe('CacheClearer', () => {
25
+ let fixtures;
26
+
27
+ beforeAll(async () => {
28
+ fixtures = await createTestFixtures({ workerThreads: 2 });
29
+ });
30
+
31
+ afterAll(async () => {
32
+ await cleanupFixtures(fixtures);
33
+ });
34
+
35
+ beforeEach(async () => {
36
+ // Reset state
37
+ fixtures.indexer.isIndexing = false;
38
+ fixtures.cache.isSaving = false;
39
+ fixtures.cacheClearer.isClearing = false;
40
+ });
41
+
42
+ describe('Basic Cache Clearing', () => {
43
+ it('should clear cache successfully', async () => {
44
+ // First ensure we have a cache
45
+ await fixtures.indexer.indexAll(true);
46
+
47
+ // Verify cache exists
48
+ expect(fixtures.cache.getVectorStore().length).toBeGreaterThan(0);
49
+
50
+ // Clear cache
51
+ const result = await fixtures.cacheClearer.execute();
52
+
53
+ expect(result.success).toBe(true);
54
+ expect(result.message).toContain('Cache cleared successfully');
55
+ expect(result.cacheDirectory).toBe(fixtures.config.cacheDirectory);
56
+ });
57
+
58
+ it('should empty vectorStore and fileHashes', async () => {
59
+ // Create some cache
60
+ await fixtures.indexer.indexAll(true);
61
+
62
+ // Clear
63
+ await fixtures.cacheClearer.execute();
64
+
65
+ // Both should be empty
66
+ expect(fixtures.cache.getVectorStore().length).toBe(0);
67
+ expect(fixtures.cache.fileHashes.size).toBe(0);
68
+ });
69
+
70
+ it('should delete cache directory', async () => {
71
+ // Create cache
72
+ await fixtures.indexer.indexAll(true);
73
+
74
+ // Verify cache directory exists
75
+ await expect(fs.access(fixtures.config.cacheDirectory)).resolves.not.toThrow();
76
+
77
+ // Clear
78
+ await fixtures.cacheClearer.execute();
79
+
80
+ // Directory should not exist
81
+ await expect(fs.access(fixtures.config.cacheDirectory)).rejects.toThrow();
82
+ });
83
+ });
84
+
85
+ describe('Protection During Indexing', () => {
86
+ it('should prevent clear while indexing is in progress', async () => {
87
+ // Simulate indexing in progress
88
+ await clearTestCache(fixtures.config);
89
+ fixtures.cache.setVectorStore([]);
90
+ fixtures.cache.fileHashes = new Map();
91
+
92
+ const indexPromise = fixtures.indexer.indexAll(true);
93
+
94
+ // Wait for indexing to start
95
+ await new Promise(resolve => setTimeout(resolve, 100));
96
+ expect(fixtures.indexer.isIndexing).toBe(true);
97
+
98
+ // Try to clear - should fail
99
+ await expect(fixtures.cacheClearer.execute()).rejects.toThrow(
100
+ 'Cannot clear cache while indexing is in progress'
101
+ );
102
+
103
+ await indexPromise;
104
+ });
105
+
106
+ it('should allow clear after indexing completes', async () => {
107
+ // Complete indexing
108
+ await fixtures.indexer.indexAll(true);
109
+ expect(fixtures.indexer.isIndexing).toBe(false);
110
+
111
+ // Clear should work
112
+ const result = await fixtures.cacheClearer.execute();
113
+ expect(result.success).toBe(true);
114
+ });
115
+ });
116
+
117
+ describe('Protection During Save', () => {
118
+ it('should prevent clear while cache is being saved', async () => {
119
+ // Simulate save in progress
120
+ fixtures.cache.isSaving = true;
121
+
122
+ // Try to clear - should fail
123
+ await expect(fixtures.cacheClearer.execute()).rejects.toThrow(
124
+ 'Cannot clear cache while cache is being saved'
125
+ );
126
+
127
+ // Reset
128
+ fixtures.cache.isSaving = false;
129
+ });
130
+
131
+ it('should allow clear after save completes', async () => {
132
+ // Index first
133
+ await fixtures.indexer.indexAll(true);
134
+
135
+ // isSaving should be false after indexing
136
+ expect(fixtures.cache.isSaving).toBe(false);
137
+
138
+ // Clear should work
139
+ const result = await fixtures.cacheClearer.execute();
140
+ expect(result.success).toBe(true);
141
+ });
142
+ });
143
+
144
+ describe('Concurrent Clear Prevention', () => {
145
+ it('should prevent multiple concurrent clears', async () => {
146
+ // Index first
147
+ await fixtures.indexer.indexAll(true);
148
+
149
+ // Reset the isClearing flag
150
+ fixtures.cacheClearer.isClearing = false;
151
+
152
+ // Start multiple concurrent clears
153
+ const promises = [
154
+ fixtures.cacheClearer.execute(),
155
+ fixtures.cacheClearer.execute(),
156
+ fixtures.cacheClearer.execute()
157
+ ];
158
+
159
+ const results = await Promise.allSettled(promises);
160
+
161
+ // Exactly one should succeed
162
+ const successes = results.filter(r => r.status === 'fulfilled');
163
+ const failures = results.filter(r => r.status === 'rejected');
164
+
165
+ expect(successes.length).toBe(1);
166
+ expect(failures.length).toBe(2);
167
+
168
+ // Failures should have correct error message
169
+ for (const failure of failures) {
170
+ expect(failure.reason.message).toContain('already in progress');
171
+ }
172
+ });
173
+
174
+ it('should reset isClearing flag after completion', async () => {
175
+ // Index first
176
+ await fixtures.indexer.indexAll(true);
177
+
178
+ expect(fixtures.cacheClearer.isClearing).toBe(false);
179
+
180
+ // Clear
181
+ await fixtures.cacheClearer.execute();
182
+
183
+ // Flag should be reset
184
+ expect(fixtures.cacheClearer.isClearing).toBe(false);
185
+ });
186
+
187
+ it('should reset isClearing flag even on error', async () => {
188
+ // Set up for failure
189
+ fixtures.cache.isSaving = true;
190
+
191
+ try {
192
+ await fixtures.cacheClearer.execute();
193
+ } catch {
194
+ // Expected to fail
195
+ }
196
+
197
+ // isClearing should not have been set (failed before setting)
198
+ expect(fixtures.cacheClearer.isClearing).toBe(false);
199
+
200
+ // Reset
201
+ fixtures.cache.isSaving = false;
202
+ });
203
+ });
204
+ });
205
+
206
+ describe('Clear Cache Tool Handler', () => {
207
+ let fixtures;
208
+
209
+ beforeAll(async () => {
210
+ fixtures = await createTestFixtures({ workerThreads: 2 });
211
+ });
212
+
213
+ afterAll(async () => {
214
+ await cleanupFixtures(fixtures);
215
+ });
216
+
217
+ beforeEach(async () => {
218
+ fixtures.indexer.isIndexing = false;
219
+ fixtures.cache.isSaving = false;
220
+ fixtures.cacheClearer.isClearing = false;
221
+ });
222
+
223
+ describe('Tool Definition', () => {
224
+ it('should have correct tool definition', () => {
225
+ const toolDef = ClearCacheFeature.getToolDefinition();
226
+
227
+ expect(toolDef.name).toBe('c_clear_cache');
228
+ expect(toolDef.description).toContain('cache');
229
+ expect(toolDef.annotations.destructiveHint).toBe(true);
230
+ expect(toolDef.inputSchema.properties).toEqual({});
231
+ });
232
+ });
233
+
234
+ describe('Tool Handler', () => {
235
+ it('should return success message on cleared cache', async () => {
236
+ // Index first
237
+ await fixtures.indexer.indexAll(true);
238
+
239
+ const request = createMockRequest('c_clear_cache', {});
240
+ const result = await ClearCacheFeature.handleToolCall(request, fixtures.cacheClearer);
241
+
242
+ expect(result.content[0].text).toContain('Cache cleared successfully');
243
+ expect(result.content[0].text).toContain('Cache directory:');
244
+ });
245
+
246
+ it('should return error message when indexing is in progress', async () => {
247
+ // Simulate indexing
248
+ await clearTestCache(fixtures.config);
249
+ fixtures.cache.setVectorStore([]);
250
+ fixtures.cache.fileHashes = new Map();
251
+
252
+ const indexPromise = fixtures.indexer.indexAll(true);
253
+ await new Promise(resolve => setTimeout(resolve, 100));
254
+
255
+ const request = createMockRequest('c_clear_cache', {});
256
+ const result = await ClearCacheFeature.handleToolCall(request, fixtures.cacheClearer);
257
+
258
+ expect(result.content[0].text).toContain('Failed to clear cache');
259
+ expect(result.content[0].text).toContain('indexing is in progress');
260
+
261
+ await indexPromise;
262
+ });
263
+
264
+ it('should return error message when save is in progress', async () => {
265
+ fixtures.cache.isSaving = true;
266
+
267
+ const request = createMockRequest('c_clear_cache', {});
268
+ const result = await ClearCacheFeature.handleToolCall(request, fixtures.cacheClearer);
269
+
270
+ expect(result.content[0].text).toContain('Failed to clear cache');
271
+ expect(result.content[0].text).toContain('being saved');
272
+
273
+ fixtures.cache.isSaving = false;
274
+ });
275
+
276
+ it('should return error message when clear is already in progress', async () => {
277
+ fixtures.cacheClearer.isClearing = true;
278
+
279
+ const request = createMockRequest('c_clear_cache', {});
280
+ const result = await ClearCacheFeature.handleToolCall(request, fixtures.cacheClearer);
281
+
282
+ expect(result.content[0].text).toContain('Failed to clear cache');
283
+ expect(result.content[0].text).toContain('already in progress');
284
+
285
+ fixtures.cacheClearer.isClearing = false;
286
+ });
287
+ });
288
+ });