bluera-knowledge 0.11.20 → 0.12.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/council-cache/1a43ed5977b8f29afc79a9bf5c4082ee5ad8338c42ab991a4241a48f80c1e46d.json +7 -0
- package/.claude-plugin/plugin.json +1 -1
- package/CHANGELOG.md +31 -0
- package/README.md +64 -5
- package/commands/crawl.md +7 -7
- package/commands/search.md +9 -2
- package/commands/skill-activation.md +130 -0
- package/dist/{chunk-MQGRQ2EG.js → chunk-C4SYGLAI.js} +27 -7
- package/dist/chunk-C4SYGLAI.js.map +1 -0
- package/dist/{chunk-ZSKQIMD7.js → chunk-CC6EGZ4D.js} +48 -8
- package/dist/chunk-CC6EGZ4D.js.map +1 -0
- package/dist/{chunk-Q2ZGPJ66.js → chunk-QCSFBMYW.js} +2 -2
- package/dist/index.js +64 -12
- package/dist/index.js.map +1 -1
- package/dist/mcp/server.js +2 -2
- package/dist/workers/background-worker-cli.js +2 -2
- package/hooks/hooks.json +28 -1
- package/hooks/pretooluse-bk-reminder.py +97 -0
- package/hooks/skill-activation.py +190 -0
- package/hooks/skill-rules.json +122 -0
- package/package.json +1 -1
- package/src/analysis/code-graph.test.ts +30 -0
- package/src/analysis/code-graph.ts +10 -2
- package/src/cli/commands/store.test.ts +78 -0
- package/src/cli/commands/store.ts +19 -0
- package/src/cli/commands/sync.test.ts +1 -1
- package/src/cli/commands/sync.ts +50 -1
- package/src/mcp/commands/sync.commands.test.ts +94 -6
- package/src/mcp/commands/sync.commands.ts +36 -6
- package/src/mcp/handlers/search.handler.ts +3 -1
- package/src/mcp/handlers/store.handler.test.ts +3 -0
- package/src/mcp/handlers/store.handler.ts +5 -2
- package/src/mcp/schemas/index.test.ts +36 -0
- package/src/mcp/schemas/index.ts +6 -0
- package/src/mcp/server.ts +11 -0
- package/src/services/code-graph.service.ts +11 -1
- package/src/services/job.service.test.ts +23 -0
- package/src/services/job.service.ts +10 -6
- package/src/services/watch.service.test.ts +14 -11
- package/vitest.config.ts +1 -1
- package/dist/chunk-MQGRQ2EG.js.map +0 -1
- package/dist/chunk-ZSKQIMD7.js.map +0 -1
- /package/dist/{chunk-Q2ZGPJ66.js.map → chunk-QCSFBMYW.js.map} +0 -0
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
import { rm } from 'node:fs/promises';
|
|
2
|
+
import { join } from 'node:path';
|
|
1
3
|
import { Command } from 'commander';
|
|
2
4
|
import { createServices, destroyServices } from '../../services/index.js';
|
|
3
5
|
import type { StoreType } from '../../types/store.js';
|
|
@@ -49,6 +51,7 @@ export function createStoreCommand(getOptions: () => GlobalOptions): Command {
|
|
|
49
51
|
'Store type: file (local dir), repo (git), web (crawled site)'
|
|
50
52
|
)
|
|
51
53
|
.requiredOption('-s, --source <path>', 'Local path for file/repo stores, URL for web stores')
|
|
54
|
+
.option('-b, --branch <branch>', 'Git branch to clone (repo stores only)')
|
|
52
55
|
.option('-d, --description <desc>', 'Optional description for the store')
|
|
53
56
|
.option('--tags <tags>', 'Comma-separated tags for filtering')
|
|
54
57
|
.action(
|
|
@@ -57,6 +60,7 @@ export function createStoreCommand(getOptions: () => GlobalOptions): Command {
|
|
|
57
60
|
options: {
|
|
58
61
|
type: StoreType;
|
|
59
62
|
source: string;
|
|
63
|
+
branch?: string;
|
|
60
64
|
description?: string;
|
|
61
65
|
tags?: string;
|
|
62
66
|
}
|
|
@@ -79,6 +83,7 @@ export function createStoreCommand(getOptions: () => GlobalOptions): Command {
|
|
|
79
83
|
options.type === 'web' || (options.type === 'repo' && isUrl)
|
|
80
84
|
? options.source
|
|
81
85
|
: undefined,
|
|
86
|
+
branch: options.type === 'repo' ? options.branch : undefined,
|
|
82
87
|
description: options.description,
|
|
83
88
|
tags: options.tags?.split(',').map((t) => t.trim()),
|
|
84
89
|
});
|
|
@@ -188,6 +193,20 @@ export function createStoreCommand(getOptions: () => GlobalOptions): Command {
|
|
|
188
193
|
}
|
|
189
194
|
}
|
|
190
195
|
|
|
196
|
+
// Delete LanceDB table first (so searches don't return results for deleted store)
|
|
197
|
+
await services.lance.deleteStore(s.id);
|
|
198
|
+
|
|
199
|
+
// Delete code graph file
|
|
200
|
+
await services.codeGraph.deleteGraph(s.id);
|
|
201
|
+
|
|
202
|
+
// For repo stores cloned from URL, remove the cloned directory
|
|
203
|
+
if (s.type === 'repo' && 'url' in s && s.url !== undefined) {
|
|
204
|
+
const dataDir = services.config.resolveDataDir();
|
|
205
|
+
const repoPath = join(dataDir, 'repos', s.id);
|
|
206
|
+
await rm(repoPath, { recursive: true, force: true });
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
// Delete from registry last
|
|
191
210
|
const result = await services.store.delete(s.id);
|
|
192
211
|
|
|
193
212
|
if (result.success) {
|
|
@@ -49,6 +49,6 @@ describe('createSyncCommand', () => {
|
|
|
49
49
|
expect(pruneOpt?.description).toContain('Remove');
|
|
50
50
|
|
|
51
51
|
const reindexOpt = options.find((o) => o.long === '--reindex');
|
|
52
|
-
expect(reindexOpt?.description).toContain('index');
|
|
52
|
+
expect(reindexOpt?.description).toContain('Re-index');
|
|
53
53
|
});
|
|
54
54
|
});
|
package/src/cli/commands/sync.ts
CHANGED
|
@@ -1,11 +1,13 @@
|
|
|
1
1
|
import { Command } from 'commander';
|
|
2
2
|
import { createServices, destroyServices } from '../../services/index.js';
|
|
3
|
+
import { JobService } from '../../services/job.service.js';
|
|
3
4
|
import { StoreDefinitionService } from '../../services/store-definition.service.js';
|
|
4
5
|
import {
|
|
5
6
|
isFileStoreDefinition,
|
|
6
7
|
isRepoStoreDefinition,
|
|
7
8
|
isWebStoreDefinition,
|
|
8
9
|
} from '../../types/store-definition.js';
|
|
10
|
+
import { spawnBackgroundWorker } from '../../workers/spawn-worker.js';
|
|
9
11
|
import type { StoreService } from '../../services/store.service.js';
|
|
10
12
|
import type { StoreDefinition } from '../../types/store-definition.js';
|
|
11
13
|
import type { GlobalOptions } from '../program.js';
|
|
@@ -19,6 +21,8 @@ interface SyncResult {
|
|
|
19
21
|
dryRun: boolean;
|
|
20
22
|
wouldCreate: string[];
|
|
21
23
|
wouldPrune: string[];
|
|
24
|
+
reindexJobs: Array<{ store: string; jobId: string }>;
|
|
25
|
+
wouldReindex: string[];
|
|
22
26
|
}
|
|
23
27
|
|
|
24
28
|
/**
|
|
@@ -125,6 +129,8 @@ export function createSyncCommand(getOptions: () => GlobalOptions): Command {
|
|
|
125
129
|
dryRun: options.dryRun === true,
|
|
126
130
|
wouldCreate: [],
|
|
127
131
|
wouldPrune: [],
|
|
132
|
+
reindexJobs: [],
|
|
133
|
+
wouldReindex: [],
|
|
128
134
|
};
|
|
129
135
|
|
|
130
136
|
// Process each definition
|
|
@@ -173,6 +179,29 @@ export function createSyncCommand(getOptions: () => GlobalOptions): Command {
|
|
|
173
179
|
}
|
|
174
180
|
}
|
|
175
181
|
|
|
182
|
+
// Re-index existing stores if requested
|
|
183
|
+
if (options.reindex === true && result.skipped.length > 0) {
|
|
184
|
+
if (options.dryRun === true) {
|
|
185
|
+
result.wouldReindex = [...result.skipped];
|
|
186
|
+
} else {
|
|
187
|
+
const dataDir = globalOpts.dataDir ?? services.config.resolveDataDir();
|
|
188
|
+
const jobService = new JobService(dataDir);
|
|
189
|
+
|
|
190
|
+
for (const storeName of result.skipped) {
|
|
191
|
+
const store = await services.store.getByName(storeName);
|
|
192
|
+
if (store !== undefined) {
|
|
193
|
+
const job = jobService.createJob({
|
|
194
|
+
type: 'index',
|
|
195
|
+
details: { storeId: store.id, storeName: store.name },
|
|
196
|
+
message: `Re-indexing ${storeName}...`,
|
|
197
|
+
});
|
|
198
|
+
spawnBackgroundWorker(job.id, dataDir);
|
|
199
|
+
result.reindexJobs.push({ store: storeName, jobId: job.id });
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
}
|
|
203
|
+
}
|
|
204
|
+
|
|
176
205
|
// Output result
|
|
177
206
|
if (globalOpts.format === 'json') {
|
|
178
207
|
console.log(JSON.stringify(result, null, 2));
|
|
@@ -189,19 +218,25 @@ export function createSyncCommand(getOptions: () => GlobalOptions): Command {
|
|
|
189
218
|
|
|
190
219
|
function printHumanReadable(result: SyncResult, quiet: boolean): void {
|
|
191
220
|
if (quiet) {
|
|
192
|
-
// Just print created/pruned store names
|
|
221
|
+
// Just print created/pruned/reindexed store names
|
|
193
222
|
for (const name of result.created) {
|
|
194
223
|
console.log(`created: ${name}`);
|
|
195
224
|
}
|
|
196
225
|
for (const name of result.pruned) {
|
|
197
226
|
console.log(`pruned: ${name}`);
|
|
198
227
|
}
|
|
228
|
+
for (const { store, jobId } of result.reindexJobs) {
|
|
229
|
+
console.log(`reindexing: ${store} (${jobId})`);
|
|
230
|
+
}
|
|
199
231
|
for (const name of result.wouldCreate) {
|
|
200
232
|
console.log(`would create: ${name}`);
|
|
201
233
|
}
|
|
202
234
|
for (const name of result.wouldPrune) {
|
|
203
235
|
console.log(`would prune: ${name}`);
|
|
204
236
|
}
|
|
237
|
+
for (const name of result.wouldReindex) {
|
|
238
|
+
console.log(`would reindex: ${name}`);
|
|
239
|
+
}
|
|
205
240
|
return;
|
|
206
241
|
}
|
|
207
242
|
|
|
@@ -260,5 +295,19 @@ function printHumanReadable(result: SyncResult, quiet: boolean): void {
|
|
|
260
295
|
}
|
|
261
296
|
}
|
|
262
297
|
|
|
298
|
+
if (result.reindexJobs.length > 0) {
|
|
299
|
+
console.log(`Reindexing started (${String(result.reindexJobs.length)}):`);
|
|
300
|
+
for (const { store, jobId } of result.reindexJobs) {
|
|
301
|
+
console.log(` ↻ ${store} (Job: ${jobId})`);
|
|
302
|
+
}
|
|
303
|
+
}
|
|
304
|
+
|
|
305
|
+
if (result.wouldReindex.length > 0) {
|
|
306
|
+
console.log(`Would reindex (${String(result.wouldReindex.length)}):`);
|
|
307
|
+
for (const name of result.wouldReindex) {
|
|
308
|
+
console.log(` ↻ ${name}`);
|
|
309
|
+
}
|
|
310
|
+
}
|
|
311
|
+
|
|
263
312
|
console.log('');
|
|
264
313
|
}
|
|
@@ -1,6 +1,5 @@
|
|
|
1
|
-
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
|
2
|
-
import {
|
|
3
|
-
import { rm, mkdtemp, mkdir, writeFile } from 'node:fs/promises';
|
|
1
|
+
import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest';
|
|
2
|
+
import { rm, mkdtemp, mkdir } from 'node:fs/promises';
|
|
4
3
|
import { tmpdir } from 'node:os';
|
|
5
4
|
import { join } from 'node:path';
|
|
6
5
|
import { syncCommands, handleStoresSync } from './sync.commands.js';
|
|
@@ -8,7 +7,11 @@ import { StoreService } from '../../services/store.service.js';
|
|
|
8
7
|
import { StoreDefinitionService } from '../../services/store-definition.service.js';
|
|
9
8
|
import type { HandlerContext } from '../types.js';
|
|
10
9
|
import type { ServiceContainer } from '../../services/index.js';
|
|
11
|
-
|
|
10
|
+
|
|
11
|
+
// Mock spawnBackgroundWorker
|
|
12
|
+
vi.mock('../../workers/spawn-worker.js', () => ({
|
|
13
|
+
spawnBackgroundWorker: vi.fn(),
|
|
14
|
+
}));
|
|
12
15
|
|
|
13
16
|
/**
|
|
14
17
|
* Create a minimal mock service container for testing
|
|
@@ -45,11 +48,18 @@ describe('sync.commands', () => {
|
|
|
45
48
|
|
|
46
49
|
// Valid with options
|
|
47
50
|
const result2 = syncCmd?.argsSchema?.safeParse({
|
|
48
|
-
reindex: true,
|
|
49
51
|
prune: true,
|
|
50
52
|
dryRun: true,
|
|
51
53
|
});
|
|
52
54
|
expect(result2?.success).toBe(true);
|
|
55
|
+
|
|
56
|
+
// Valid with reindex option
|
|
57
|
+
const result3 = syncCmd?.argsSchema?.safeParse({
|
|
58
|
+
reindex: true,
|
|
59
|
+
prune: true,
|
|
60
|
+
dryRun: true,
|
|
61
|
+
});
|
|
62
|
+
expect(result3?.success).toBe(true);
|
|
53
63
|
});
|
|
54
64
|
});
|
|
55
65
|
|
|
@@ -69,7 +79,7 @@ describe('sync.commands', () => {
|
|
|
69
79
|
|
|
70
80
|
context = {
|
|
71
81
|
services: createMockServices(storeService),
|
|
72
|
-
options: { projectRoot },
|
|
82
|
+
options: { projectRoot, dataDir },
|
|
73
83
|
};
|
|
74
84
|
});
|
|
75
85
|
|
|
@@ -279,5 +289,83 @@ describe('sync.commands', () => {
|
|
|
279
289
|
expect(response.orphans).toHaveLength(0);
|
|
280
290
|
});
|
|
281
291
|
});
|
|
292
|
+
|
|
293
|
+
describe('reindex mode', () => {
|
|
294
|
+
it('reports wouldReindex in dry run mode', async () => {
|
|
295
|
+
const docsDir = join(projectRoot, 'docs');
|
|
296
|
+
await mkdir(docsDir, { recursive: true });
|
|
297
|
+
|
|
298
|
+
// Create store (auto-adds definition)
|
|
299
|
+
await storeService.create({
|
|
300
|
+
name: 'existing-store',
|
|
301
|
+
type: 'file',
|
|
302
|
+
path: docsDir,
|
|
303
|
+
});
|
|
304
|
+
|
|
305
|
+
const result = await handleStoresSync({ reindex: true, dryRun: true }, context);
|
|
306
|
+
const response = JSON.parse(result.content[0].text);
|
|
307
|
+
|
|
308
|
+
expect(response.dryRun).toBe(true);
|
|
309
|
+
expect(response.wouldReindex).toContain('existing-store');
|
|
310
|
+
expect(response.reindexJobs).toBeUndefined();
|
|
311
|
+
});
|
|
312
|
+
|
|
313
|
+
it('starts reindex jobs for existing stores', async () => {
|
|
314
|
+
const docsDir = join(projectRoot, 'docs');
|
|
315
|
+
await mkdir(docsDir, { recursive: true });
|
|
316
|
+
|
|
317
|
+
// Create store (auto-adds definition)
|
|
318
|
+
await storeService.create({
|
|
319
|
+
name: 'reindex-store',
|
|
320
|
+
type: 'file',
|
|
321
|
+
path: docsDir,
|
|
322
|
+
});
|
|
323
|
+
|
|
324
|
+
const result = await handleStoresSync({ reindex: true }, context);
|
|
325
|
+
const response = JSON.parse(result.content[0].text);
|
|
326
|
+
|
|
327
|
+
expect(response.reindexJobs).toHaveLength(1);
|
|
328
|
+
expect(response.reindexJobs[0].store).toBe('reindex-store');
|
|
329
|
+
expect(response.reindexJobs[0].jobId).toMatch(/^job_/);
|
|
330
|
+
});
|
|
331
|
+
|
|
332
|
+
it('does not reindex if reindex flag is not set', async () => {
|
|
333
|
+
const docsDir = join(projectRoot, 'docs');
|
|
334
|
+
await mkdir(docsDir, { recursive: true });
|
|
335
|
+
|
|
336
|
+
await storeService.create({
|
|
337
|
+
name: 'no-reindex-store',
|
|
338
|
+
type: 'file',
|
|
339
|
+
path: docsDir,
|
|
340
|
+
});
|
|
341
|
+
|
|
342
|
+
const result = await handleStoresSync({}, context);
|
|
343
|
+
const response = JSON.parse(result.content[0].text);
|
|
344
|
+
|
|
345
|
+
expect(response.reindexJobs).toBeUndefined();
|
|
346
|
+
expect(response.wouldReindex).toBeUndefined();
|
|
347
|
+
});
|
|
348
|
+
|
|
349
|
+
it('throws error when dataDir is undefined during reindex', async () => {
|
|
350
|
+
const docsDir = join(projectRoot, 'docs');
|
|
351
|
+
await mkdir(docsDir, { recursive: true });
|
|
352
|
+
|
|
353
|
+
await storeService.create({
|
|
354
|
+
name: 'error-store',
|
|
355
|
+
type: 'file',
|
|
356
|
+
path: docsDir,
|
|
357
|
+
});
|
|
358
|
+
|
|
359
|
+
// Create context without dataDir
|
|
360
|
+
const contextWithoutDataDir: HandlerContext = {
|
|
361
|
+
services: createMockServices(storeService),
|
|
362
|
+
options: { projectRoot },
|
|
363
|
+
};
|
|
364
|
+
|
|
365
|
+
await expect(handleStoresSync({ reindex: true }, contextWithoutDataDir)).rejects.toThrow(
|
|
366
|
+
'dataDir is required for reindexing'
|
|
367
|
+
);
|
|
368
|
+
});
|
|
369
|
+
});
|
|
282
370
|
});
|
|
283
371
|
});
|
|
@@ -1,10 +1,12 @@
|
|
|
1
1
|
import { z } from 'zod';
|
|
2
|
+
import { JobService } from '../../services/job.service.js';
|
|
2
3
|
import { StoreDefinitionService } from '../../services/store-definition.service.js';
|
|
3
4
|
import {
|
|
4
5
|
isFileStoreDefinition,
|
|
5
6
|
isRepoStoreDefinition,
|
|
6
7
|
isWebStoreDefinition,
|
|
7
8
|
} from '../../types/store-definition.js';
|
|
9
|
+
import { spawnBackgroundWorker } from '../../workers/spawn-worker.js';
|
|
8
10
|
import type { CommandDefinition } from './registry.js';
|
|
9
11
|
import type { StoreDefinition } from '../../types/store-definition.js';
|
|
10
12
|
import type { HandlerContext, ToolResponse } from '../types.js';
|
|
@@ -13,9 +15,9 @@ import type { HandlerContext, ToolResponse } from '../types.js';
|
|
|
13
15
|
* Arguments for stores:sync command
|
|
14
16
|
*/
|
|
15
17
|
export interface SyncStoresArgs {
|
|
16
|
-
reindex?: boolean;
|
|
17
18
|
prune?: boolean;
|
|
18
19
|
dryRun?: boolean;
|
|
20
|
+
reindex?: boolean;
|
|
19
21
|
}
|
|
20
22
|
|
|
21
23
|
/**
|
|
@@ -30,6 +32,8 @@ interface SyncResult {
|
|
|
30
32
|
dryRun?: boolean;
|
|
31
33
|
wouldCreate?: string[];
|
|
32
34
|
wouldPrune?: string[];
|
|
35
|
+
reindexJobs?: Array<{ store: string; jobId: string }>;
|
|
36
|
+
wouldReindex?: string[];
|
|
33
37
|
}
|
|
34
38
|
|
|
35
39
|
/**
|
|
@@ -39,7 +43,6 @@ interface SyncResult {
|
|
|
39
43
|
* - Creates missing stores from definitions
|
|
40
44
|
* - Reports stores not in definitions (orphans)
|
|
41
45
|
* - Optionally prunes orphan stores
|
|
42
|
-
* - Optionally re-indexes existing stores
|
|
43
46
|
*/
|
|
44
47
|
export async function handleStoresSync(
|
|
45
48
|
args: SyncStoresArgs,
|
|
@@ -119,6 +122,33 @@ export async function handleStoresSync(
|
|
|
119
122
|
}
|
|
120
123
|
}
|
|
121
124
|
|
|
125
|
+
// Re-index existing stores if requested
|
|
126
|
+
if (args.reindex === true && result.skipped.length > 0) {
|
|
127
|
+
if (args.dryRun === true) {
|
|
128
|
+
result.wouldReindex = [...result.skipped];
|
|
129
|
+
} else {
|
|
130
|
+
result.reindexJobs = [];
|
|
131
|
+
const dataDir = options.dataDir;
|
|
132
|
+
if (dataDir === undefined) {
|
|
133
|
+
throw new Error('dataDir is required for reindexing');
|
|
134
|
+
}
|
|
135
|
+
const jobService = new JobService(dataDir);
|
|
136
|
+
|
|
137
|
+
for (const storeName of result.skipped) {
|
|
138
|
+
const store = await services.store.getByName(storeName);
|
|
139
|
+
if (store !== undefined) {
|
|
140
|
+
const job = jobService.createJob({
|
|
141
|
+
type: 'index',
|
|
142
|
+
details: { storeId: store.id, storeName: store.name },
|
|
143
|
+
message: `Re-indexing ${storeName}...`,
|
|
144
|
+
});
|
|
145
|
+
spawnBackgroundWorker(job.id, dataDir);
|
|
146
|
+
result.reindexJobs.push({ store: storeName, jobId: job.id });
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
|
|
122
152
|
return {
|
|
123
153
|
content: [
|
|
124
154
|
{
|
|
@@ -212,21 +242,21 @@ export const syncCommands: CommandDefinition[] = [
|
|
|
212
242
|
name: 'stores:sync',
|
|
213
243
|
description: 'Sync stores from definitions config (bootstrap on fresh clone)',
|
|
214
244
|
argsSchema: z.object({
|
|
215
|
-
reindex: z.boolean().optional().describe('Re-index existing stores after sync'),
|
|
216
245
|
prune: z.boolean().optional().describe('Remove stores not in definitions'),
|
|
217
246
|
dryRun: z.boolean().optional().describe('Show what would happen without making changes'),
|
|
247
|
+
reindex: z.boolean().optional().describe('Re-index existing stores after sync'),
|
|
218
248
|
}),
|
|
219
249
|
handler: (args: Record<string, unknown>, context: HandlerContext): Promise<ToolResponse> => {
|
|
220
250
|
const syncArgs: SyncStoresArgs = {};
|
|
221
|
-
if (typeof args['reindex'] === 'boolean') {
|
|
222
|
-
syncArgs.reindex = args['reindex'];
|
|
223
|
-
}
|
|
224
251
|
if (typeof args['prune'] === 'boolean') {
|
|
225
252
|
syncArgs.prune = args['prune'];
|
|
226
253
|
}
|
|
227
254
|
if (typeof args['dryRun'] === 'boolean') {
|
|
228
255
|
syncArgs.dryRun = args['dryRun'];
|
|
229
256
|
}
|
|
257
|
+
if (typeof args['reindex'] === 'boolean') {
|
|
258
|
+
syncArgs.reindex = args['reindex'];
|
|
259
|
+
}
|
|
230
260
|
return handleStoresSync(syncArgs, context);
|
|
231
261
|
},
|
|
232
262
|
},
|
|
@@ -30,6 +30,7 @@ export const handleSearch: ToolHandler<SearchArgs> = async (
|
|
|
30
30
|
{
|
|
31
31
|
query: validated.query,
|
|
32
32
|
stores: validated.stores,
|
|
33
|
+
mode: validated.mode,
|
|
33
34
|
detail: validated.detail,
|
|
34
35
|
limit: validated.limit,
|
|
35
36
|
intent: validated.intent,
|
|
@@ -69,9 +70,10 @@ export const handleSearch: ToolHandler<SearchArgs> = async (
|
|
|
69
70
|
const searchQuery: SearchQuery = {
|
|
70
71
|
query: validated.query,
|
|
71
72
|
stores: storeIds,
|
|
72
|
-
mode:
|
|
73
|
+
mode: validated.mode,
|
|
73
74
|
limit: validated.limit,
|
|
74
75
|
detail: validated.detail,
|
|
76
|
+
threshold: validated.threshold,
|
|
75
77
|
minRelevance: validated.minRelevance,
|
|
76
78
|
};
|
|
77
79
|
|
|
@@ -274,9 +274,12 @@ export const handleDeleteStore: ToolHandler<DeleteStoreArgs> = async (
|
|
|
274
274
|
throw new Error(`Store not found: ${validated.store}`);
|
|
275
275
|
}
|
|
276
276
|
|
|
277
|
-
// Delete LanceDB table
|
|
277
|
+
// Delete LanceDB table first (so searches don't return results for deleted store)
|
|
278
278
|
await services.lance.deleteStore(store.id);
|
|
279
279
|
|
|
280
|
+
// Delete code graph file
|
|
281
|
+
await services.codeGraph.deleteGraph(store.id);
|
|
282
|
+
|
|
280
283
|
// For repo stores cloned from URL, remove the cloned directory
|
|
281
284
|
if (store.type === 'repo' && 'url' in store && store.url !== undefined) {
|
|
282
285
|
if (options.dataDir === undefined) {
|
|
@@ -286,7 +289,7 @@ export const handleDeleteStore: ToolHandler<DeleteStoreArgs> = async (
|
|
|
286
289
|
await rm(repoPath, { recursive: true, force: true });
|
|
287
290
|
}
|
|
288
291
|
|
|
289
|
-
// Delete from registry
|
|
292
|
+
// Delete from registry last
|
|
290
293
|
const result = await services.store.delete(store.id);
|
|
291
294
|
if (!result.success) {
|
|
292
295
|
throw new Error(result.error.message);
|
|
@@ -29,10 +29,46 @@ describe('MCP Schema Validation', () => {
|
|
|
29
29
|
it('should use defaults for optional fields', () => {
|
|
30
30
|
const result = SearchArgsSchema.parse({ query: 'test' });
|
|
31
31
|
|
|
32
|
+
expect(result.mode).toBe('hybrid');
|
|
32
33
|
expect(result.detail).toBe('minimal');
|
|
33
34
|
expect(result.limit).toBe(10);
|
|
34
35
|
});
|
|
35
36
|
|
|
37
|
+
it('should validate mode enum', () => {
|
|
38
|
+
expect(() => SearchArgsSchema.parse({ query: 'test', mode: 'invalid' })).toThrow();
|
|
39
|
+
|
|
40
|
+
const vector = SearchArgsSchema.parse({ query: 'test', mode: 'vector' });
|
|
41
|
+
expect(vector.mode).toBe('vector');
|
|
42
|
+
|
|
43
|
+
const fts = SearchArgsSchema.parse({ query: 'test', mode: 'fts' });
|
|
44
|
+
expect(fts.mode).toBe('fts');
|
|
45
|
+
|
|
46
|
+
const hybrid = SearchArgsSchema.parse({ query: 'test', mode: 'hybrid' });
|
|
47
|
+
expect(hybrid.mode).toBe('hybrid');
|
|
48
|
+
});
|
|
49
|
+
|
|
50
|
+
it('should validate threshold', () => {
|
|
51
|
+
const result = SearchArgsSchema.parse({ query: 'test', threshold: 0.5 });
|
|
52
|
+
expect(result.threshold).toBe(0.5);
|
|
53
|
+
|
|
54
|
+
// Edge cases
|
|
55
|
+
const min = SearchArgsSchema.parse({ query: 'test', threshold: 0 });
|
|
56
|
+
expect(min.threshold).toBe(0);
|
|
57
|
+
|
|
58
|
+
const max = SearchArgsSchema.parse({ query: 'test', threshold: 1 });
|
|
59
|
+
expect(max.threshold).toBe(1);
|
|
60
|
+
});
|
|
61
|
+
|
|
62
|
+
it('should reject invalid threshold', () => {
|
|
63
|
+
expect(() => SearchArgsSchema.parse({ query: 'test', threshold: -0.1 })).toThrow(
|
|
64
|
+
'threshold must be between 0 and 1'
|
|
65
|
+
);
|
|
66
|
+
|
|
67
|
+
expect(() => SearchArgsSchema.parse({ query: 'test', threshold: 1.1 })).toThrow(
|
|
68
|
+
'threshold must be between 0 and 1'
|
|
69
|
+
);
|
|
70
|
+
});
|
|
71
|
+
|
|
36
72
|
it('should reject empty query', () => {
|
|
37
73
|
expect(() => SearchArgsSchema.parse({ query: '' })).toThrow(
|
|
38
74
|
'Query must be a non-empty string'
|
package/src/mcp/schemas/index.ts
CHANGED
|
@@ -25,9 +25,15 @@ export const SearchArgsSchema = z.object({
|
|
|
25
25
|
'find-documentation',
|
|
26
26
|
])
|
|
27
27
|
.optional(),
|
|
28
|
+
mode: z.enum(['vector', 'fts', 'hybrid']).default('hybrid'),
|
|
28
29
|
detail: z.enum(['minimal', 'contextual', 'full']).default('minimal'),
|
|
29
30
|
limit: z.number().int().positive().default(10),
|
|
30
31
|
stores: z.array(z.string()).optional(),
|
|
32
|
+
threshold: z
|
|
33
|
+
.number()
|
|
34
|
+
.min(0, 'threshold must be between 0 and 1')
|
|
35
|
+
.max(1, 'threshold must be between 0 and 1')
|
|
36
|
+
.optional(),
|
|
31
37
|
minRelevance: z
|
|
32
38
|
.number()
|
|
33
39
|
.min(0, 'minRelevance must be between 0 and 1')
|
package/src/mcp/server.ts
CHANGED
|
@@ -61,6 +61,13 @@ export function createMCPServer(options: MCPServerOptions): Server {
|
|
|
61
61
|
],
|
|
62
62
|
description: 'Search intent for better ranking',
|
|
63
63
|
},
|
|
64
|
+
mode: {
|
|
65
|
+
type: 'string',
|
|
66
|
+
enum: ['vector', 'fts', 'hybrid'],
|
|
67
|
+
default: 'hybrid',
|
|
68
|
+
description:
|
|
69
|
+
'Search mode: vector (embeddings only), fts (full-text only), hybrid (both, default)',
|
|
70
|
+
},
|
|
64
71
|
detail: {
|
|
65
72
|
type: 'string',
|
|
66
73
|
enum: ['minimal', 'contextual', 'full'],
|
|
@@ -78,6 +85,10 @@ export function createMCPServer(options: MCPServerOptions): Server {
|
|
|
78
85
|
items: { type: 'string' },
|
|
79
86
|
description: 'Specific store IDs to search (optional)',
|
|
80
87
|
},
|
|
88
|
+
threshold: {
|
|
89
|
+
type: 'number',
|
|
90
|
+
description: 'Minimum normalized score (0-1). Filters out low-relevance results.',
|
|
91
|
+
},
|
|
81
92
|
minRelevance: {
|
|
82
93
|
type: 'number',
|
|
83
94
|
description:
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { readFile, writeFile, mkdir } from 'node:fs/promises';
|
|
1
|
+
import { readFile, writeFile, mkdir, rm } from 'node:fs/promises';
|
|
2
2
|
import { join, dirname } from 'node:path';
|
|
3
3
|
import { ASTParser } from '../analysis/ast-parser.js';
|
|
4
4
|
import { CodeGraph, type GraphNode } from '../analysis/code-graph.js';
|
|
@@ -120,6 +120,16 @@ export class CodeGraphService {
|
|
|
120
120
|
await writeFile(graphPath, JSON.stringify(serialized, null, 2));
|
|
121
121
|
}
|
|
122
122
|
|
|
123
|
+
/**
|
|
124
|
+
* Delete the code graph file for a store.
|
|
125
|
+
* Silently succeeds if the file doesn't exist.
|
|
126
|
+
*/
|
|
127
|
+
async deleteGraph(storeId: StoreId): Promise<void> {
|
|
128
|
+
const graphPath = this.getGraphPath(storeId);
|
|
129
|
+
await rm(graphPath, { force: true });
|
|
130
|
+
this.graphCache.delete(storeId);
|
|
131
|
+
}
|
|
132
|
+
|
|
123
133
|
/**
|
|
124
134
|
* Load a code graph for a store.
|
|
125
135
|
* Returns undefined if no graph exists.
|
|
@@ -24,6 +24,29 @@ describe('JobService', () => {
|
|
|
24
24
|
const jobsDir = join(tempDir, 'jobs');
|
|
25
25
|
expect(existsSync(jobsDir)).toBe(true);
|
|
26
26
|
});
|
|
27
|
+
|
|
28
|
+
it('throws when dataDir not provided and HOME/USERPROFILE undefined', () => {
|
|
29
|
+
const originalHome = process.env['HOME'];
|
|
30
|
+
const originalUserProfile = process.env['USERPROFILE'];
|
|
31
|
+
|
|
32
|
+
try {
|
|
33
|
+
delete process.env['HOME'];
|
|
34
|
+
delete process.env['USERPROFILE'];
|
|
35
|
+
|
|
36
|
+
// Should throw instead of falling back to current directory
|
|
37
|
+
expect(() => new JobService()).toThrow(
|
|
38
|
+
'HOME or USERPROFILE environment variable is required'
|
|
39
|
+
);
|
|
40
|
+
} finally {
|
|
41
|
+
// Restore environment
|
|
42
|
+
if (originalHome !== undefined) {
|
|
43
|
+
process.env['HOME'] = originalHome;
|
|
44
|
+
}
|
|
45
|
+
if (originalUserProfile !== undefined) {
|
|
46
|
+
process.env['USERPROFILE'] = originalUserProfile;
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
});
|
|
27
50
|
});
|
|
28
51
|
|
|
29
52
|
describe('createJob', () => {
|
|
@@ -9,12 +9,16 @@ export class JobService {
|
|
|
9
9
|
|
|
10
10
|
constructor(dataDir?: string) {
|
|
11
11
|
// Default to ~/.local/share/bluera-knowledge/jobs
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
)
|
|
12
|
+
let baseDir: string;
|
|
13
|
+
if (dataDir !== undefined) {
|
|
14
|
+
baseDir = dataDir;
|
|
15
|
+
} else {
|
|
16
|
+
const homeDir = process.env['HOME'] ?? process.env['USERPROFILE'];
|
|
17
|
+
if (homeDir === undefined) {
|
|
18
|
+
throw new Error('HOME or USERPROFILE environment variable is required');
|
|
19
|
+
}
|
|
20
|
+
baseDir = path.join(homeDir, '.local/share/bluera-knowledge');
|
|
21
|
+
}
|
|
18
22
|
this.jobsDir = path.join(baseDir, 'jobs');
|
|
19
23
|
|
|
20
24
|
// Ensure jobs directory exists
|