bluera-knowledge 0.11.19 → 0.11.21

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. package/.claude-plugin/plugin.json +1 -1
  2. package/CHANGELOG.md +44 -0
  3. package/README.md +42 -5
  4. package/commands/crawl.md +7 -7
  5. package/commands/search.md +9 -2
  6. package/dist/{chunk-QEHSDQTL.js → chunk-C4SYGLAI.js} +47 -28
  7. package/dist/chunk-C4SYGLAI.js.map +1 -0
  8. package/dist/{chunk-VP4VZULK.js → chunk-CC6EGZ4D.js} +51 -8
  9. package/dist/chunk-CC6EGZ4D.js.map +1 -0
  10. package/dist/{chunk-GOAOBPOA.js → chunk-QCSFBMYW.js} +2 -2
  11. package/dist/index.js +64 -12
  12. package/dist/index.js.map +1 -1
  13. package/dist/mcp/server.js +2 -2
  14. package/dist/workers/background-worker-cli.js +2 -2
  15. package/package.json +1 -1
  16. package/src/analysis/code-graph.test.ts +30 -0
  17. package/src/analysis/code-graph.ts +10 -2
  18. package/src/cli/commands/store.test.ts +78 -0
  19. package/src/cli/commands/store.ts +19 -0
  20. package/src/cli/commands/sync.test.ts +1 -1
  21. package/src/cli/commands/sync.ts +50 -1
  22. package/src/db/lance.test.ts +3 -4
  23. package/src/db/lance.ts +14 -19
  24. package/src/mcp/commands/sync.commands.test.ts +94 -6
  25. package/src/mcp/commands/sync.commands.ts +36 -6
  26. package/src/mcp/handlers/search.handler.ts +3 -1
  27. package/src/mcp/handlers/store.handler.test.ts +3 -0
  28. package/src/mcp/handlers/store.handler.ts +5 -2
  29. package/src/mcp/schemas/index.test.ts +36 -0
  30. package/src/mcp/schemas/index.ts +6 -0
  31. package/src/mcp/server.test.ts +56 -1
  32. package/src/mcp/server.ts +16 -1
  33. package/src/services/code-graph.service.ts +11 -1
  34. package/src/services/job.service.test.ts +23 -0
  35. package/src/services/job.service.ts +10 -6
  36. package/src/services/search.service.ts +15 -9
  37. package/vitest.config.ts +1 -1
  38. package/dist/chunk-QEHSDQTL.js.map +0 -1
  39. package/dist/chunk-VP4VZULK.js.map +0 -1
  40. /package/dist/{chunk-GOAOBPOA.js.map → chunk-QCSFBMYW.js.map} +0 -0
@@ -1,8 +1,8 @@
1
1
  import {
2
2
  createMCPServer,
3
3
  runMCPServer
4
- } from "../chunk-VP4VZULK.js";
5
- import "../chunk-QEHSDQTL.js";
4
+ } from "../chunk-CC6EGZ4D.js";
5
+ import "../chunk-C4SYGLAI.js";
6
6
  import "../chunk-HRQD3MPH.js";
7
7
  export {
8
8
  createMCPServer,
@@ -1,7 +1,7 @@
1
1
  #!/usr/bin/env node
2
2
  import {
3
3
  IntelligentCrawler
4
- } from "../chunk-GOAOBPOA.js";
4
+ } from "../chunk-QCSFBMYW.js";
5
5
  import {
6
6
  JobService,
7
7
  createDocumentId,
@@ -9,7 +9,7 @@ import {
9
9
  createServices,
10
10
  createStoreId,
11
11
  shutdownLogger
12
- } from "../chunk-QEHSDQTL.js";
12
+ } from "../chunk-C4SYGLAI.js";
13
13
  import "../chunk-HRQD3MPH.js";
14
14
 
15
15
  // src/workers/background-worker.ts
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "bluera-knowledge",
3
- "version": "0.11.19",
3
+ "version": "0.11.21",
4
4
  "description": "CLI tool for managing knowledge stores with semantic search",
5
5
  "type": "module",
6
6
  "bin": {
@@ -328,6 +328,36 @@ describe('CodeGraph', () => {
328
328
  expect(edgeTypes).toContain('calls');
329
329
  });
330
330
 
331
+ it('includes confidence in serialized edges', () => {
332
+ const graph = new CodeGraph();
333
+ const nodes: CodeNode[] = [
334
+ {
335
+ type: 'function',
336
+ name: 'fn',
337
+ exported: false,
338
+ startLine: 1,
339
+ endLine: 2,
340
+ },
341
+ ];
342
+
343
+ graph.addNodes(nodes, '/src/test.ts');
344
+ graph.addImport('/src/test.ts', 'module', ['util']); // confidence: 1.0
345
+ graph.analyzeCallRelationships('other();', '/src/test.ts', 'fn'); // confidence: 0.5
346
+
347
+ const json = graph.toJSON();
348
+
349
+ // All edges should have confidence property preserved
350
+ expect(json.edges.every((e) => typeof e.confidence === 'number')).toBe(true);
351
+
352
+ // Import edges have confidence 1.0
353
+ const importEdge = json.edges.find((e) => e.type === 'imports');
354
+ expect(importEdge?.confidence).toBe(1.0);
355
+
356
+ // Call edges from regex detection have confidence 0.5
357
+ const callEdge = json.edges.find((e) => e.type === 'calls');
358
+ expect(callEdge?.confidence).toBe(0.5);
359
+ });
360
+
331
361
  it('handles empty graph', () => {
332
362
  const graph = new CodeGraph();
333
363
  const json = graph.toJSON();
@@ -227,7 +227,10 @@ export class CodeGraph {
227
227
  return importPath;
228
228
  }
229
229
 
230
- toJSON(): { nodes: GraphNode[]; edges: Array<{ from: string; to: string; type: string }> } {
230
+ toJSON(): {
231
+ nodes: GraphNode[];
232
+ edges: Array<{ from: string; to: string; type: string; confidence: number }>;
233
+ } {
231
234
  const allEdges: GraphEdge[] = [];
232
235
  for (const edges of this.edges.values()) {
233
236
  allEdges.push(...edges);
@@ -235,7 +238,12 @@ export class CodeGraph {
235
238
 
236
239
  return {
237
240
  nodes: Array.from(this.nodes.values()),
238
- edges: allEdges.map((e) => ({ from: e.from, to: e.to, type: e.type })),
241
+ edges: allEdges.map((e) => ({
242
+ from: e.from,
243
+ to: e.to,
244
+ type: e.type,
245
+ confidence: e.confidence,
246
+ })),
239
247
  };
240
248
  }
241
249
  }
@@ -17,8 +17,23 @@ interface MockStoreService {
17
17
  delete: MockInstance;
18
18
  }
19
19
 
20
+ interface MockLanceService {
21
+ deleteStore: MockInstance;
22
+ }
23
+
24
+ interface MockCodeGraphService {
25
+ deleteGraph: MockInstance;
26
+ }
27
+
28
+ interface MockConfigService {
29
+ resolveDataDir: MockInstance;
30
+ }
31
+
20
32
  interface MockServices {
21
33
  store: MockStoreService;
34
+ lance: MockLanceService;
35
+ codeGraph: MockCodeGraphService;
36
+ config: MockConfigService;
22
37
  }
23
38
 
24
39
  describe('store command execution', () => {
@@ -38,6 +53,15 @@ describe('store command execution', () => {
38
53
  create: vi.fn(),
39
54
  delete: vi.fn(),
40
55
  },
56
+ lance: {
57
+ deleteStore: vi.fn().mockResolvedValue(undefined),
58
+ },
59
+ codeGraph: {
60
+ deleteGraph: vi.fn().mockResolvedValue(undefined),
61
+ },
62
+ config: {
63
+ resolveDataDir: vi.fn().mockReturnValue('/tmp/test-data'),
64
+ },
41
65
  };
42
66
 
43
67
  vi.mocked(createServices).mockResolvedValue(mockServices);
@@ -366,6 +390,51 @@ describe('store command execution', () => {
366
390
  });
367
391
  });
368
392
 
393
+ it('creates repo store with branch option', async () => {
394
+ const mockStore: RepoStore = {
395
+ id: createStoreId('new-store-6'),
396
+ name: 'branched-repo',
397
+ type: 'repo',
398
+ path: '/path/to/cloned/repo',
399
+ url: 'https://github.com/user/repo',
400
+ branch: 'develop',
401
+ createdAt: new Date(),
402
+ updatedAt: new Date(),
403
+ };
404
+
405
+ mockServices.store.create.mockResolvedValue({
406
+ success: true,
407
+ data: mockStore,
408
+ });
409
+
410
+ const command = createStoreCommand(getOptions);
411
+ const createCommand = command.commands.find((c) => c.name() === 'create');
412
+ const actionHandler = createCommand?._actionHandler;
413
+
414
+ createCommand.parseOptions([
415
+ '--type',
416
+ 'repo',
417
+ '--source',
418
+ 'https://github.com/user/repo',
419
+ '--branch',
420
+ 'develop',
421
+ ]);
422
+ await actionHandler!(['branched-repo']);
423
+
424
+ expect(mockServices.store.create).toHaveBeenCalledWith({
425
+ name: 'branched-repo',
426
+ type: 'repo',
427
+ path: undefined,
428
+ url: 'https://github.com/user/repo',
429
+ branch: 'develop',
430
+ description: undefined,
431
+ tags: undefined,
432
+ });
433
+ expect(consoleLogSpy).toHaveBeenCalledWith(
434
+ expect.stringContaining('Created store: branched-repo')
435
+ );
436
+ });
437
+
369
438
  it('outputs JSON when format is json', async () => {
370
439
  const mockStore: FileStore = {
371
440
  id: createStoreId('new-store-5'),
@@ -860,6 +929,15 @@ describe('store command execution', () => {
860
929
  expect(tagsOption?.mandatory).toBe(false);
861
930
  });
862
931
 
932
+ it('create subcommand has --branch option for repo type', () => {
933
+ const command = createStoreCommand(getOptions);
934
+ const createCommand = command.commands.find((c) => c.name() === 'create');
935
+ const branchOption = createCommand?.options.find((o) => o.long === '--branch');
936
+
937
+ expect(branchOption).toBeDefined();
938
+ expect(branchOption?.mandatory).toBe(false);
939
+ });
940
+
863
941
  it('delete subcommand has force and yes options', () => {
864
942
  const command = createStoreCommand(getOptions);
865
943
  const deleteCommand = command.commands.find((c) => c.name() === 'delete');
@@ -1,3 +1,5 @@
1
+ import { rm } from 'node:fs/promises';
2
+ import { join } from 'node:path';
1
3
  import { Command } from 'commander';
2
4
  import { createServices, destroyServices } from '../../services/index.js';
3
5
  import type { StoreType } from '../../types/store.js';
@@ -49,6 +51,7 @@ export function createStoreCommand(getOptions: () => GlobalOptions): Command {
49
51
  'Store type: file (local dir), repo (git), web (crawled site)'
50
52
  )
51
53
  .requiredOption('-s, --source <path>', 'Local path for file/repo stores, URL for web stores')
54
+ .option('-b, --branch <branch>', 'Git branch to clone (repo stores only)')
52
55
  .option('-d, --description <desc>', 'Optional description for the store')
53
56
  .option('--tags <tags>', 'Comma-separated tags for filtering')
54
57
  .action(
@@ -57,6 +60,7 @@ export function createStoreCommand(getOptions: () => GlobalOptions): Command {
57
60
  options: {
58
61
  type: StoreType;
59
62
  source: string;
63
+ branch?: string;
60
64
  description?: string;
61
65
  tags?: string;
62
66
  }
@@ -79,6 +83,7 @@ export function createStoreCommand(getOptions: () => GlobalOptions): Command {
79
83
  options.type === 'web' || (options.type === 'repo' && isUrl)
80
84
  ? options.source
81
85
  : undefined,
86
+ branch: options.type === 'repo' ? options.branch : undefined,
82
87
  description: options.description,
83
88
  tags: options.tags?.split(',').map((t) => t.trim()),
84
89
  });
@@ -188,6 +193,20 @@ export function createStoreCommand(getOptions: () => GlobalOptions): Command {
188
193
  }
189
194
  }
190
195
 
196
+ // Delete LanceDB table first (so searches don't return results for deleted store)
197
+ await services.lance.deleteStore(s.id);
198
+
199
+ // Delete code graph file
200
+ await services.codeGraph.deleteGraph(s.id);
201
+
202
+ // For repo stores cloned from URL, remove the cloned directory
203
+ if (s.type === 'repo' && 'url' in s && s.url !== undefined) {
204
+ const dataDir = services.config.resolveDataDir();
205
+ const repoPath = join(dataDir, 'repos', s.id);
206
+ await rm(repoPath, { recursive: true, force: true });
207
+ }
208
+
209
+ // Delete from registry last
191
210
  const result = await services.store.delete(s.id);
192
211
 
193
212
  if (result.success) {
@@ -49,6 +49,6 @@ describe('createSyncCommand', () => {
49
49
  expect(pruneOpt?.description).toContain('Remove');
50
50
 
51
51
  const reindexOpt = options.find((o) => o.long === '--reindex');
52
- expect(reindexOpt?.description).toContain('index');
52
+ expect(reindexOpt?.description).toContain('Re-index');
53
53
  });
54
54
  });
@@ -1,11 +1,13 @@
1
1
  import { Command } from 'commander';
2
2
  import { createServices, destroyServices } from '../../services/index.js';
3
+ import { JobService } from '../../services/job.service.js';
3
4
  import { StoreDefinitionService } from '../../services/store-definition.service.js';
4
5
  import {
5
6
  isFileStoreDefinition,
6
7
  isRepoStoreDefinition,
7
8
  isWebStoreDefinition,
8
9
  } from '../../types/store-definition.js';
10
+ import { spawnBackgroundWorker } from '../../workers/spawn-worker.js';
9
11
  import type { StoreService } from '../../services/store.service.js';
10
12
  import type { StoreDefinition } from '../../types/store-definition.js';
11
13
  import type { GlobalOptions } from '../program.js';
@@ -19,6 +21,8 @@ interface SyncResult {
19
21
  dryRun: boolean;
20
22
  wouldCreate: string[];
21
23
  wouldPrune: string[];
24
+ reindexJobs: Array<{ store: string; jobId: string }>;
25
+ wouldReindex: string[];
22
26
  }
23
27
 
24
28
  /**
@@ -125,6 +129,8 @@ export function createSyncCommand(getOptions: () => GlobalOptions): Command {
125
129
  dryRun: options.dryRun === true,
126
130
  wouldCreate: [],
127
131
  wouldPrune: [],
132
+ reindexJobs: [],
133
+ wouldReindex: [],
128
134
  };
129
135
 
130
136
  // Process each definition
@@ -173,6 +179,29 @@ export function createSyncCommand(getOptions: () => GlobalOptions): Command {
173
179
  }
174
180
  }
175
181
 
182
+ // Re-index existing stores if requested
183
+ if (options.reindex === true && result.skipped.length > 0) {
184
+ if (options.dryRun === true) {
185
+ result.wouldReindex = [...result.skipped];
186
+ } else {
187
+ const dataDir = globalOpts.dataDir ?? services.config.resolveDataDir();
188
+ const jobService = new JobService(dataDir);
189
+
190
+ for (const storeName of result.skipped) {
191
+ const store = await services.store.getByName(storeName);
192
+ if (store !== undefined) {
193
+ const job = jobService.createJob({
194
+ type: 'index',
195
+ details: { storeId: store.id, storeName: store.name },
196
+ message: `Re-indexing ${storeName}...`,
197
+ });
198
+ spawnBackgroundWorker(job.id, dataDir);
199
+ result.reindexJobs.push({ store: storeName, jobId: job.id });
200
+ }
201
+ }
202
+ }
203
+ }
204
+
176
205
  // Output result
177
206
  if (globalOpts.format === 'json') {
178
207
  console.log(JSON.stringify(result, null, 2));
@@ -189,19 +218,25 @@ export function createSyncCommand(getOptions: () => GlobalOptions): Command {
189
218
 
190
219
  function printHumanReadable(result: SyncResult, quiet: boolean): void {
191
220
  if (quiet) {
192
- // Just print created/pruned store names
221
+ // Just print created/pruned/reindexed store names
193
222
  for (const name of result.created) {
194
223
  console.log(`created: ${name}`);
195
224
  }
196
225
  for (const name of result.pruned) {
197
226
  console.log(`pruned: ${name}`);
198
227
  }
228
+ for (const { store, jobId } of result.reindexJobs) {
229
+ console.log(`reindexing: ${store} (${jobId})`);
230
+ }
199
231
  for (const name of result.wouldCreate) {
200
232
  console.log(`would create: ${name}`);
201
233
  }
202
234
  for (const name of result.wouldPrune) {
203
235
  console.log(`would prune: ${name}`);
204
236
  }
237
+ for (const name of result.wouldReindex) {
238
+ console.log(`would reindex: ${name}`);
239
+ }
205
240
  return;
206
241
  }
207
242
 
@@ -260,5 +295,19 @@ function printHumanReadable(result: SyncResult, quiet: boolean): void {
260
295
  }
261
296
  }
262
297
 
298
+ if (result.reindexJobs.length > 0) {
299
+ console.log(`Reindexing started (${String(result.reindexJobs.length)}):`);
300
+ for (const { store, jobId } of result.reindexJobs) {
301
+ console.log(` ↻ ${store} (Job: ${jobId})`);
302
+ }
303
+ }
304
+
305
+ if (result.wouldReindex.length > 0) {
306
+ console.log(`Would reindex (${String(result.wouldReindex.length)}):`);
307
+ for (const name of result.wouldReindex) {
308
+ console.log(` ↻ ${name}`);
309
+ }
310
+ }
311
+
263
312
  console.log('');
264
313
  }
@@ -154,7 +154,7 @@ describe('LanceStore', () => {
154
154
  });
155
155
 
156
156
  describe('fullTextSearch error handling', () => {
157
- it('returns empty array when FTS index does not exist', async () => {
157
+ it('throws error when FTS index does not exist', async () => {
158
158
  const newStoreId = createStoreId('no-fts-store');
159
159
  await store.initialize(newStoreId);
160
160
 
@@ -171,9 +171,8 @@ describe('LanceStore', () => {
171
171
 
172
172
  await store.addDocuments(newStoreId, [doc]);
173
173
 
174
- // Don't create FTS index - should return empty array
175
- const results = await store.fullTextSearch(newStoreId, 'test', 10);
176
- expect(results).toEqual([]);
174
+ // Don't create FTS index - should throw error per CLAUDE.md "fail fast"
175
+ await expect(store.fullTextSearch(newStoreId, 'test', 10)).rejects.toThrow();
177
176
  });
178
177
  });
179
178
 
package/src/db/lance.ts CHANGED
@@ -113,26 +113,21 @@ export class LanceStore {
113
113
  > {
114
114
  const table = await this.getTable(storeId);
115
115
 
116
- try {
116
+ // eslint-disable-next-line @typescript-eslint/consistent-type-assertions
117
+ const results = (await table.search(query, 'fts').limit(limit).toArray()) as Array<{
118
+ id: string;
119
+ content: string;
120
+ metadata: string;
121
+ _score: number;
122
+ }>;
123
+
124
+ return results.map((r) => ({
125
+ id: createDocumentId(r.id),
126
+ content: r.content,
127
+ score: r._score,
117
128
  // eslint-disable-next-line @typescript-eslint/consistent-type-assertions
118
- const results = (await table.search(query, 'fts').limit(limit).toArray()) as Array<{
119
- id: string;
120
- content: string;
121
- metadata: string;
122
- _score: number;
123
- }>;
124
-
125
- return results.map((r) => ({
126
- id: createDocumentId(r.id),
127
- content: r.content,
128
- score: r._score,
129
- // eslint-disable-next-line @typescript-eslint/consistent-type-assertions
130
- metadata: JSON.parse(r.metadata) as DocumentMetadata,
131
- }));
132
- } catch {
133
- // FTS index may not exist, return empty
134
- return [];
135
- }
129
+ metadata: JSON.parse(r.metadata) as DocumentMetadata,
130
+ }));
136
131
  }
137
132
 
138
133
  async deleteStore(storeId: StoreId): Promise<void> {
@@ -1,6 +1,5 @@
1
- import { describe, it, expect, beforeEach, afterEach } from 'vitest';
2
- import { z } from 'zod';
3
- import { rm, mkdtemp, mkdir, writeFile } from 'node:fs/promises';
1
+ import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest';
2
+ import { rm, mkdtemp, mkdir } from 'node:fs/promises';
4
3
  import { tmpdir } from 'node:os';
5
4
  import { join } from 'node:path';
6
5
  import { syncCommands, handleStoresSync } from './sync.commands.js';
@@ -8,7 +7,11 @@ import { StoreService } from '../../services/store.service.js';
8
7
  import { StoreDefinitionService } from '../../services/store-definition.service.js';
9
8
  import type { HandlerContext } from '../types.js';
10
9
  import type { ServiceContainer } from '../../services/index.js';
11
- import type { StoreDefinitionsConfig } from '../../types/store-definition.js';
10
+
11
+ // Mock spawnBackgroundWorker
12
+ vi.mock('../../workers/spawn-worker.js', () => ({
13
+ spawnBackgroundWorker: vi.fn(),
14
+ }));
12
15
 
13
16
  /**
14
17
  * Create a minimal mock service container for testing
@@ -45,11 +48,18 @@ describe('sync.commands', () => {
45
48
 
46
49
  // Valid with options
47
50
  const result2 = syncCmd?.argsSchema?.safeParse({
48
- reindex: true,
49
51
  prune: true,
50
52
  dryRun: true,
51
53
  });
52
54
  expect(result2?.success).toBe(true);
55
+
56
+ // Valid with reindex option
57
+ const result3 = syncCmd?.argsSchema?.safeParse({
58
+ reindex: true,
59
+ prune: true,
60
+ dryRun: true,
61
+ });
62
+ expect(result3?.success).toBe(true);
53
63
  });
54
64
  });
55
65
 
@@ -69,7 +79,7 @@ describe('sync.commands', () => {
69
79
 
70
80
  context = {
71
81
  services: createMockServices(storeService),
72
- options: { projectRoot },
82
+ options: { projectRoot, dataDir },
73
83
  };
74
84
  });
75
85
 
@@ -279,5 +289,83 @@ describe('sync.commands', () => {
279
289
  expect(response.orphans).toHaveLength(0);
280
290
  });
281
291
  });
292
+
293
+ describe('reindex mode', () => {
294
+ it('reports wouldReindex in dry run mode', async () => {
295
+ const docsDir = join(projectRoot, 'docs');
296
+ await mkdir(docsDir, { recursive: true });
297
+
298
+ // Create store (auto-adds definition)
299
+ await storeService.create({
300
+ name: 'existing-store',
301
+ type: 'file',
302
+ path: docsDir,
303
+ });
304
+
305
+ const result = await handleStoresSync({ reindex: true, dryRun: true }, context);
306
+ const response = JSON.parse(result.content[0].text);
307
+
308
+ expect(response.dryRun).toBe(true);
309
+ expect(response.wouldReindex).toContain('existing-store');
310
+ expect(response.reindexJobs).toBeUndefined();
311
+ });
312
+
313
+ it('starts reindex jobs for existing stores', async () => {
314
+ const docsDir = join(projectRoot, 'docs');
315
+ await mkdir(docsDir, { recursive: true });
316
+
317
+ // Create store (auto-adds definition)
318
+ await storeService.create({
319
+ name: 'reindex-store',
320
+ type: 'file',
321
+ path: docsDir,
322
+ });
323
+
324
+ const result = await handleStoresSync({ reindex: true }, context);
325
+ const response = JSON.parse(result.content[0].text);
326
+
327
+ expect(response.reindexJobs).toHaveLength(1);
328
+ expect(response.reindexJobs[0].store).toBe('reindex-store');
329
+ expect(response.reindexJobs[0].jobId).toMatch(/^job_/);
330
+ });
331
+
332
+ it('does not reindex if reindex flag is not set', async () => {
333
+ const docsDir = join(projectRoot, 'docs');
334
+ await mkdir(docsDir, { recursive: true });
335
+
336
+ await storeService.create({
337
+ name: 'no-reindex-store',
338
+ type: 'file',
339
+ path: docsDir,
340
+ });
341
+
342
+ const result = await handleStoresSync({}, context);
343
+ const response = JSON.parse(result.content[0].text);
344
+
345
+ expect(response.reindexJobs).toBeUndefined();
346
+ expect(response.wouldReindex).toBeUndefined();
347
+ });
348
+
349
+ it('throws error when dataDir is undefined during reindex', async () => {
350
+ const docsDir = join(projectRoot, 'docs');
351
+ await mkdir(docsDir, { recursive: true });
352
+
353
+ await storeService.create({
354
+ name: 'error-store',
355
+ type: 'file',
356
+ path: docsDir,
357
+ });
358
+
359
+ // Create context without dataDir
360
+ const contextWithoutDataDir: HandlerContext = {
361
+ services: createMockServices(storeService),
362
+ options: { projectRoot },
363
+ };
364
+
365
+ await expect(handleStoresSync({ reindex: true }, contextWithoutDataDir)).rejects.toThrow(
366
+ 'dataDir is required for reindexing'
367
+ );
368
+ });
369
+ });
282
370
  });
283
371
  });
@@ -1,10 +1,12 @@
1
1
  import { z } from 'zod';
2
+ import { JobService } from '../../services/job.service.js';
2
3
  import { StoreDefinitionService } from '../../services/store-definition.service.js';
3
4
  import {
4
5
  isFileStoreDefinition,
5
6
  isRepoStoreDefinition,
6
7
  isWebStoreDefinition,
7
8
  } from '../../types/store-definition.js';
9
+ import { spawnBackgroundWorker } from '../../workers/spawn-worker.js';
8
10
  import type { CommandDefinition } from './registry.js';
9
11
  import type { StoreDefinition } from '../../types/store-definition.js';
10
12
  import type { HandlerContext, ToolResponse } from '../types.js';
@@ -13,9 +15,9 @@ import type { HandlerContext, ToolResponse } from '../types.js';
13
15
  * Arguments for stores:sync command
14
16
  */
15
17
  export interface SyncStoresArgs {
16
- reindex?: boolean;
17
18
  prune?: boolean;
18
19
  dryRun?: boolean;
20
+ reindex?: boolean;
19
21
  }
20
22
 
21
23
  /**
@@ -30,6 +32,8 @@ interface SyncResult {
30
32
  dryRun?: boolean;
31
33
  wouldCreate?: string[];
32
34
  wouldPrune?: string[];
35
+ reindexJobs?: Array<{ store: string; jobId: string }>;
36
+ wouldReindex?: string[];
33
37
  }
34
38
 
35
39
  /**
@@ -39,7 +43,6 @@ interface SyncResult {
39
43
  * - Creates missing stores from definitions
40
44
  * - Reports stores not in definitions (orphans)
41
45
  * - Optionally prunes orphan stores
42
- * - Optionally re-indexes existing stores
43
46
  */
44
47
  export async function handleStoresSync(
45
48
  args: SyncStoresArgs,
@@ -119,6 +122,33 @@ export async function handleStoresSync(
119
122
  }
120
123
  }
121
124
 
125
+ // Re-index existing stores if requested
126
+ if (args.reindex === true && result.skipped.length > 0) {
127
+ if (args.dryRun === true) {
128
+ result.wouldReindex = [...result.skipped];
129
+ } else {
130
+ result.reindexJobs = [];
131
+ const dataDir = options.dataDir;
132
+ if (dataDir === undefined) {
133
+ throw new Error('dataDir is required for reindexing');
134
+ }
135
+ const jobService = new JobService(dataDir);
136
+
137
+ for (const storeName of result.skipped) {
138
+ const store = await services.store.getByName(storeName);
139
+ if (store !== undefined) {
140
+ const job = jobService.createJob({
141
+ type: 'index',
142
+ details: { storeId: store.id, storeName: store.name },
143
+ message: `Re-indexing ${storeName}...`,
144
+ });
145
+ spawnBackgroundWorker(job.id, dataDir);
146
+ result.reindexJobs.push({ store: storeName, jobId: job.id });
147
+ }
148
+ }
149
+ }
150
+ }
151
+
122
152
  return {
123
153
  content: [
124
154
  {
@@ -212,21 +242,21 @@ export const syncCommands: CommandDefinition[] = [
212
242
  name: 'stores:sync',
213
243
  description: 'Sync stores from definitions config (bootstrap on fresh clone)',
214
244
  argsSchema: z.object({
215
- reindex: z.boolean().optional().describe('Re-index existing stores after sync'),
216
245
  prune: z.boolean().optional().describe('Remove stores not in definitions'),
217
246
  dryRun: z.boolean().optional().describe('Show what would happen without making changes'),
247
+ reindex: z.boolean().optional().describe('Re-index existing stores after sync'),
218
248
  }),
219
249
  handler: (args: Record<string, unknown>, context: HandlerContext): Promise<ToolResponse> => {
220
250
  const syncArgs: SyncStoresArgs = {};
221
- if (typeof args['reindex'] === 'boolean') {
222
- syncArgs.reindex = args['reindex'];
223
- }
224
251
  if (typeof args['prune'] === 'boolean') {
225
252
  syncArgs.prune = args['prune'];
226
253
  }
227
254
  if (typeof args['dryRun'] === 'boolean') {
228
255
  syncArgs.dryRun = args['dryRun'];
229
256
  }
257
+ if (typeof args['reindex'] === 'boolean') {
258
+ syncArgs.reindex = args['reindex'];
259
+ }
230
260
  return handleStoresSync(syncArgs, context);
231
261
  },
232
262
  },