bluera-knowledge 0.9.43 → 0.10.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude-plugin/plugin.json +1 -1
- package/CHANGELOG.md +75 -0
- package/README.md +114 -42
- package/commands/sync.md +96 -0
- package/dist/{chunk-MQE32YY6.js → chunk-6U45VP5Z.js} +42 -6
- package/dist/chunk-6U45VP5Z.js.map +1 -0
- package/dist/{chunk-CUHYSPRV.js → chunk-DP5XBPQV.js} +372 -2
- package/dist/chunk-DP5XBPQV.js.map +1 -0
- package/dist/{chunk-DWAIT2OD.js → chunk-UE4ZIJYA.js} +74 -5
- package/dist/{chunk-DWAIT2OD.js.map → chunk-UE4ZIJYA.js.map} +1 -1
- package/dist/index.js +216 -7
- package/dist/index.js.map +1 -1
- package/dist/mcp/server.js +2 -2
- package/dist/workers/background-worker-cli.js +4 -3
- package/dist/workers/background-worker-cli.js.map +1 -1
- package/hooks/check-dependencies.sh +29 -0
- package/package.json +1 -1
- package/python/crawl_worker.py +6 -1
- package/src/cli/commands/crawl.test.ts +43 -3
- package/src/cli/commands/crawl.ts +3 -3
- package/src/cli/commands/sync.test.ts +54 -0
- package/src/cli/commands/sync.ts +264 -0
- package/src/cli/index.ts +1 -0
- package/src/crawl/claude-client.test.ts +195 -24
- package/src/crawl/claude-client.ts +38 -3
- package/src/crawl/intelligent-crawler.test.ts +65 -0
- package/src/crawl/intelligent-crawler.ts +14 -2
- package/src/index.ts +2 -0
- package/src/mcp/commands/index.ts +2 -0
- package/src/mcp/commands/sync.commands.test.ts +283 -0
- package/src/mcp/commands/sync.commands.ts +233 -0
- package/src/services/gitignore.service.test.ts +157 -0
- package/src/services/gitignore.service.ts +132 -0
- package/src/services/store-definition.service.test.ts +440 -0
- package/src/services/store-definition.service.ts +198 -0
- package/src/services/store.service.test.ts +279 -1
- package/src/services/store.service.ts +101 -4
- package/src/types/index.ts +18 -0
- package/src/types/store-definition.test.ts +492 -0
- package/src/types/store-definition.ts +129 -0
- package/src/workers/background-worker.ts +1 -1
- package/dist/chunk-CUHYSPRV.js.map +0 -1
- package/dist/chunk-MQE32YY6.js.map +0 -1
|
@@ -0,0 +1,283 @@
|
|
|
1
|
+
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
|
2
|
+
import { z } from 'zod';
|
|
3
|
+
import { rm, mkdtemp, mkdir, writeFile } from 'node:fs/promises';
|
|
4
|
+
import { tmpdir } from 'node:os';
|
|
5
|
+
import { join } from 'node:path';
|
|
6
|
+
import { syncCommands, handleStoresSync } from './sync.commands.js';
|
|
7
|
+
import { StoreService } from '../../services/store.service.js';
|
|
8
|
+
import { StoreDefinitionService } from '../../services/store-definition.service.js';
|
|
9
|
+
import type { HandlerContext } from '../types.js';
|
|
10
|
+
import type { ServiceContainer } from '../../services/index.js';
|
|
11
|
+
import type { StoreDefinitionsConfig } from '../../types/store-definition.js';
|
|
12
|
+
|
|
13
|
+
/**
|
|
14
|
+
* Create a minimal mock service container for testing
|
|
15
|
+
*/
|
|
16
|
+
function createMockServices(storeService: StoreService): ServiceContainer {
|
|
17
|
+
return {
|
|
18
|
+
store: storeService,
|
|
19
|
+
// Other services not needed for sync tests
|
|
20
|
+
config: {} as ServiceContainer['config'],
|
|
21
|
+
search: {} as ServiceContainer['search'],
|
|
22
|
+
index: {} as ServiceContainer['index'],
|
|
23
|
+
lance: {} as ServiceContainer['lance'],
|
|
24
|
+
embeddings: {} as ServiceContainer['embeddings'],
|
|
25
|
+
codeGraph: {} as ServiceContainer['codeGraph'],
|
|
26
|
+
pythonBridge: {} as ServiceContainer['pythonBridge'],
|
|
27
|
+
};
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
describe('sync.commands', () => {
|
|
31
|
+
describe('command definition', () => {
|
|
32
|
+
it('exports stores:sync command', () => {
|
|
33
|
+
const syncCmd = syncCommands.find((c) => c.name === 'stores:sync');
|
|
34
|
+
expect(syncCmd).toBeDefined();
|
|
35
|
+
expect(syncCmd?.description).toContain('Sync');
|
|
36
|
+
});
|
|
37
|
+
|
|
38
|
+
it('has correct args schema', () => {
|
|
39
|
+
const syncCmd = syncCommands.find((c) => c.name === 'stores:sync');
|
|
40
|
+
expect(syncCmd?.argsSchema).toBeDefined();
|
|
41
|
+
|
|
42
|
+
// Valid empty args
|
|
43
|
+
const result1 = syncCmd?.argsSchema?.safeParse({});
|
|
44
|
+
expect(result1?.success).toBe(true);
|
|
45
|
+
|
|
46
|
+
// Valid with options
|
|
47
|
+
const result2 = syncCmd?.argsSchema?.safeParse({
|
|
48
|
+
reindex: true,
|
|
49
|
+
prune: true,
|
|
50
|
+
dryRun: true,
|
|
51
|
+
});
|
|
52
|
+
expect(result2?.success).toBe(true);
|
|
53
|
+
});
|
|
54
|
+
});
|
|
55
|
+
|
|
56
|
+
describe('handleStoresSync', () => {
|
|
57
|
+
let projectRoot: string;
|
|
58
|
+
let dataDir: string;
|
|
59
|
+
let storeService: StoreService;
|
|
60
|
+
let defService: StoreDefinitionService;
|
|
61
|
+
let context: HandlerContext;
|
|
62
|
+
|
|
63
|
+
beforeEach(async () => {
|
|
64
|
+
projectRoot = await mkdtemp(join(tmpdir(), 'sync-test-'));
|
|
65
|
+
dataDir = join(projectRoot, '.bluera/bluera-knowledge/data');
|
|
66
|
+
defService = new StoreDefinitionService(projectRoot);
|
|
67
|
+
storeService = new StoreService(dataDir, { definitionService: defService });
|
|
68
|
+
await storeService.initialize();
|
|
69
|
+
|
|
70
|
+
context = {
|
|
71
|
+
services: createMockServices(storeService),
|
|
72
|
+
options: { projectRoot },
|
|
73
|
+
};
|
|
74
|
+
});
|
|
75
|
+
|
|
76
|
+
afterEach(async () => {
|
|
77
|
+
await rm(projectRoot, { recursive: true, force: true });
|
|
78
|
+
});
|
|
79
|
+
|
|
80
|
+
describe('creates missing stores', () => {
|
|
81
|
+
it('creates file store from definition', async () => {
|
|
82
|
+
// Create a directory to reference
|
|
83
|
+
const docsDir = join(projectRoot, 'docs');
|
|
84
|
+
await mkdir(docsDir, { recursive: true });
|
|
85
|
+
|
|
86
|
+
// Add definition manually (simulating config from git)
|
|
87
|
+
await defService.addDefinition({
|
|
88
|
+
type: 'file',
|
|
89
|
+
name: 'my-docs',
|
|
90
|
+
path: './docs',
|
|
91
|
+
description: 'Documentation',
|
|
92
|
+
});
|
|
93
|
+
|
|
94
|
+
const result = await handleStoresSync({}, context);
|
|
95
|
+
const response = JSON.parse(result.content[0].text);
|
|
96
|
+
|
|
97
|
+
expect(response.created).toContain('my-docs');
|
|
98
|
+
expect(response.skipped).toHaveLength(0);
|
|
99
|
+
expect(response.failed).toHaveLength(0);
|
|
100
|
+
|
|
101
|
+
// Verify store was created
|
|
102
|
+
const store = await storeService.getByName('my-docs');
|
|
103
|
+
expect(store).toBeDefined();
|
|
104
|
+
expect(store?.type).toBe('file');
|
|
105
|
+
});
|
|
106
|
+
|
|
107
|
+
it('creates web store from definition', async () => {
|
|
108
|
+
// Add web store definition
|
|
109
|
+
await defService.addDefinition({
|
|
110
|
+
type: 'web',
|
|
111
|
+
name: 'api-docs',
|
|
112
|
+
url: 'https://example.com/docs',
|
|
113
|
+
depth: 2,
|
|
114
|
+
});
|
|
115
|
+
|
|
116
|
+
const result = await handleStoresSync({}, context);
|
|
117
|
+
const response = JSON.parse(result.content[0].text);
|
|
118
|
+
|
|
119
|
+
expect(response.created).toContain('api-docs');
|
|
120
|
+
|
|
121
|
+
const store = await storeService.getByName('api-docs');
|
|
122
|
+
expect(store).toBeDefined();
|
|
123
|
+
expect(store?.type).toBe('web');
|
|
124
|
+
});
|
|
125
|
+
});
|
|
126
|
+
|
|
127
|
+
describe('skips existing stores', () => {
|
|
128
|
+
it('skips store that already exists', async () => {
|
|
129
|
+
const docsDir = join(projectRoot, 'docs');
|
|
130
|
+
await mkdir(docsDir, { recursive: true });
|
|
131
|
+
|
|
132
|
+
// Create store first (this auto-adds the definition via the integration)
|
|
133
|
+
await storeService.create({
|
|
134
|
+
name: 'existing-docs',
|
|
135
|
+
type: 'file',
|
|
136
|
+
path: docsDir,
|
|
137
|
+
});
|
|
138
|
+
|
|
139
|
+
// Definition was auto-added, so sync should skip this store
|
|
140
|
+
const result = await handleStoresSync({}, context);
|
|
141
|
+
const response = JSON.parse(result.content[0].text);
|
|
142
|
+
|
|
143
|
+
expect(response.skipped).toContain('existing-docs');
|
|
144
|
+
expect(response.created).toHaveLength(0);
|
|
145
|
+
});
|
|
146
|
+
});
|
|
147
|
+
|
|
148
|
+
describe('reports orphans', () => {
|
|
149
|
+
it('reports stores not in definitions', async () => {
|
|
150
|
+
const docsDir = join(projectRoot, 'docs');
|
|
151
|
+
await mkdir(docsDir, { recursive: true });
|
|
152
|
+
|
|
153
|
+
// Create store without definition (using skipDefinitionSync)
|
|
154
|
+
await storeService.create(
|
|
155
|
+
{
|
|
156
|
+
name: 'orphan-store',
|
|
157
|
+
type: 'file',
|
|
158
|
+
path: docsDir,
|
|
159
|
+
},
|
|
160
|
+
{ skipDefinitionSync: true }
|
|
161
|
+
);
|
|
162
|
+
|
|
163
|
+
const result = await handleStoresSync({}, context);
|
|
164
|
+
const response = JSON.parse(result.content[0].text);
|
|
165
|
+
|
|
166
|
+
expect(response.orphans).toContain('orphan-store');
|
|
167
|
+
});
|
|
168
|
+
});
|
|
169
|
+
|
|
170
|
+
describe('dry run mode', () => {
|
|
171
|
+
it('does not create stores in dry run mode', async () => {
|
|
172
|
+
const docsDir = join(projectRoot, 'docs');
|
|
173
|
+
await mkdir(docsDir, { recursive: true });
|
|
174
|
+
|
|
175
|
+
await defService.addDefinition({
|
|
176
|
+
type: 'file',
|
|
177
|
+
name: 'dry-run-store',
|
|
178
|
+
path: './docs',
|
|
179
|
+
});
|
|
180
|
+
|
|
181
|
+
const result = await handleStoresSync({ dryRun: true }, context);
|
|
182
|
+
const response = JSON.parse(result.content[0].text);
|
|
183
|
+
|
|
184
|
+
expect(response.dryRun).toBe(true);
|
|
185
|
+
expect(response.wouldCreate).toContain('dry-run-store');
|
|
186
|
+
|
|
187
|
+
// Store should NOT exist
|
|
188
|
+
const store = await storeService.getByName('dry-run-store');
|
|
189
|
+
expect(store).toBeUndefined();
|
|
190
|
+
});
|
|
191
|
+
});
|
|
192
|
+
|
|
193
|
+
describe('prune mode', () => {
|
|
194
|
+
it('removes orphan stores when prune is true', async () => {
|
|
195
|
+
const docsDir = join(projectRoot, 'docs');
|
|
196
|
+
await mkdir(docsDir, { recursive: true });
|
|
197
|
+
|
|
198
|
+
// Create orphan store
|
|
199
|
+
await storeService.create(
|
|
200
|
+
{
|
|
201
|
+
name: 'to-prune',
|
|
202
|
+
type: 'file',
|
|
203
|
+
path: docsDir,
|
|
204
|
+
},
|
|
205
|
+
{ skipDefinitionSync: true }
|
|
206
|
+
);
|
|
207
|
+
|
|
208
|
+
const result = await handleStoresSync({ prune: true }, context);
|
|
209
|
+
const response = JSON.parse(result.content[0].text);
|
|
210
|
+
|
|
211
|
+
expect(response.pruned).toContain('to-prune');
|
|
212
|
+
|
|
213
|
+
// Store should be deleted
|
|
214
|
+
const store = await storeService.getByName('to-prune');
|
|
215
|
+
expect(store).toBeUndefined();
|
|
216
|
+
});
|
|
217
|
+
|
|
218
|
+
it('does not prune in dry run mode', async () => {
|
|
219
|
+
const docsDir = join(projectRoot, 'docs');
|
|
220
|
+
await mkdir(docsDir, { recursive: true });
|
|
221
|
+
|
|
222
|
+
await storeService.create(
|
|
223
|
+
{
|
|
224
|
+
name: 'keep-me',
|
|
225
|
+
type: 'file',
|
|
226
|
+
path: docsDir,
|
|
227
|
+
},
|
|
228
|
+
{ skipDefinitionSync: true }
|
|
229
|
+
);
|
|
230
|
+
|
|
231
|
+
const result = await handleStoresSync({ prune: true, dryRun: true }, context);
|
|
232
|
+
const response = JSON.parse(result.content[0].text);
|
|
233
|
+
|
|
234
|
+
expect(response.wouldPrune).toContain('keep-me');
|
|
235
|
+
|
|
236
|
+
// Store should still exist
|
|
237
|
+
const store = await storeService.getByName('keep-me');
|
|
238
|
+
expect(store).toBeDefined();
|
|
239
|
+
});
|
|
240
|
+
});
|
|
241
|
+
|
|
242
|
+
describe('error handling', () => {
|
|
243
|
+
it('continues on error and reports failures', async () => {
|
|
244
|
+
// Add definition for non-existent directory
|
|
245
|
+
await defService.addDefinition({
|
|
246
|
+
type: 'file',
|
|
247
|
+
name: 'bad-store',
|
|
248
|
+
path: './nonexistent',
|
|
249
|
+
});
|
|
250
|
+
|
|
251
|
+
// Also add a valid definition
|
|
252
|
+
const docsDir = join(projectRoot, 'docs');
|
|
253
|
+
await mkdir(docsDir, { recursive: true });
|
|
254
|
+
await defService.addDefinition({
|
|
255
|
+
type: 'file',
|
|
256
|
+
name: 'good-store',
|
|
257
|
+
path: './docs',
|
|
258
|
+
});
|
|
259
|
+
|
|
260
|
+
const result = await handleStoresSync({}, context);
|
|
261
|
+
const response = JSON.parse(result.content[0].text);
|
|
262
|
+
|
|
263
|
+
// Should have one failure and one success
|
|
264
|
+
expect(response.failed).toHaveLength(1);
|
|
265
|
+
expect(response.failed[0].name).toBe('bad-store');
|
|
266
|
+
expect(response.failed[0].error).toBeDefined();
|
|
267
|
+
expect(response.created).toContain('good-store');
|
|
268
|
+
});
|
|
269
|
+
});
|
|
270
|
+
|
|
271
|
+
describe('empty config', () => {
|
|
272
|
+
it('handles empty definitions gracefully', async () => {
|
|
273
|
+
const result = await handleStoresSync({}, context);
|
|
274
|
+
const response = JSON.parse(result.content[0].text);
|
|
275
|
+
|
|
276
|
+
expect(response.created).toHaveLength(0);
|
|
277
|
+
expect(response.skipped).toHaveLength(0);
|
|
278
|
+
expect(response.failed).toHaveLength(0);
|
|
279
|
+
expect(response.orphans).toHaveLength(0);
|
|
280
|
+
});
|
|
281
|
+
});
|
|
282
|
+
});
|
|
283
|
+
});
|
|
@@ -0,0 +1,233 @@
|
|
|
1
|
+
import { z } from 'zod';
|
|
2
|
+
import { StoreDefinitionService } from '../../services/store-definition.service.js';
|
|
3
|
+
import {
|
|
4
|
+
isFileStoreDefinition,
|
|
5
|
+
isRepoStoreDefinition,
|
|
6
|
+
isWebStoreDefinition,
|
|
7
|
+
} from '../../types/store-definition.js';
|
|
8
|
+
import type { CommandDefinition } from './registry.js';
|
|
9
|
+
import type { StoreDefinition } from '../../types/store-definition.js';
|
|
10
|
+
import type { HandlerContext, ToolResponse } from '../types.js';
|
|
11
|
+
|
|
12
|
+
/**
|
|
13
|
+
* Arguments for stores:sync command
|
|
14
|
+
*/
|
|
15
|
+
export interface SyncStoresArgs {
|
|
16
|
+
reindex?: boolean;
|
|
17
|
+
prune?: boolean;
|
|
18
|
+
dryRun?: boolean;
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
/**
|
|
22
|
+
* Result of a sync operation
|
|
23
|
+
*/
|
|
24
|
+
interface SyncResult {
|
|
25
|
+
created: string[];
|
|
26
|
+
skipped: string[];
|
|
27
|
+
failed: Array<{ name: string; error: string }>;
|
|
28
|
+
orphans: string[];
|
|
29
|
+
pruned?: string[];
|
|
30
|
+
dryRun?: boolean;
|
|
31
|
+
wouldCreate?: string[];
|
|
32
|
+
wouldPrune?: string[];
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
/**
|
|
36
|
+
* Handle stores:sync command
|
|
37
|
+
*
|
|
38
|
+
* Syncs stores from definitions config:
|
|
39
|
+
* - Creates missing stores from definitions
|
|
40
|
+
* - Reports stores not in definitions (orphans)
|
|
41
|
+
* - Optionally prunes orphan stores
|
|
42
|
+
* - Optionally re-indexes existing stores
|
|
43
|
+
*/
|
|
44
|
+
export async function handleStoresSync(
|
|
45
|
+
args: SyncStoresArgs,
|
|
46
|
+
context: HandlerContext
|
|
47
|
+
): Promise<ToolResponse> {
|
|
48
|
+
const { services, options } = context;
|
|
49
|
+
const projectRoot = options.projectRoot;
|
|
50
|
+
|
|
51
|
+
if (projectRoot === undefined) {
|
|
52
|
+
throw new Error('Project root is required for stores:sync');
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
const defService = new StoreDefinitionService(projectRoot);
|
|
56
|
+
const config = await defService.load();
|
|
57
|
+
|
|
58
|
+
const result: SyncResult = {
|
|
59
|
+
created: [],
|
|
60
|
+
skipped: [],
|
|
61
|
+
failed: [],
|
|
62
|
+
orphans: [],
|
|
63
|
+
};
|
|
64
|
+
|
|
65
|
+
if (args.dryRun === true) {
|
|
66
|
+
result.dryRun = true;
|
|
67
|
+
result.wouldCreate = [];
|
|
68
|
+
result.wouldPrune = [];
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
// Get existing stores
|
|
72
|
+
const existingStores = await services.store.list();
|
|
73
|
+
const existingNames = new Set(existingStores.map((s) => s.name));
|
|
74
|
+
|
|
75
|
+
// Process each definition
|
|
76
|
+
for (const def of config.stores) {
|
|
77
|
+
if (existingNames.has(def.name)) {
|
|
78
|
+
result.skipped.push(def.name);
|
|
79
|
+
continue;
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
if (args.dryRun === true) {
|
|
83
|
+
result.wouldCreate?.push(def.name);
|
|
84
|
+
continue;
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
// Try to create the store
|
|
88
|
+
const createResult = await createStoreFromDefinition(def, defService, services, context);
|
|
89
|
+
if (createResult.success) {
|
|
90
|
+
result.created.push(def.name);
|
|
91
|
+
} else {
|
|
92
|
+
result.failed.push({ name: def.name, error: createResult.error });
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
// Find orphans (stores not in definitions)
|
|
97
|
+
const definedNames = new Set(config.stores.map((d) => d.name));
|
|
98
|
+
for (const store of existingStores) {
|
|
99
|
+
if (!definedNames.has(store.name)) {
|
|
100
|
+
result.orphans.push(store.name);
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
// Prune orphans if requested
|
|
105
|
+
if (args.prune === true && result.orphans.length > 0) {
|
|
106
|
+
if (args.dryRun === true) {
|
|
107
|
+
result.wouldPrune = [...result.orphans];
|
|
108
|
+
} else {
|
|
109
|
+
result.pruned = [];
|
|
110
|
+
for (const orphanName of result.orphans) {
|
|
111
|
+
const store = await services.store.getByName(orphanName);
|
|
112
|
+
if (store !== undefined) {
|
|
113
|
+
const deleteResult = await services.store.delete(store.id, { skipDefinitionSync: true });
|
|
114
|
+
if (deleteResult.success) {
|
|
115
|
+
result.pruned.push(orphanName);
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
return {
|
|
123
|
+
content: [
|
|
124
|
+
{
|
|
125
|
+
type: 'text',
|
|
126
|
+
text: JSON.stringify(result, null, 2),
|
|
127
|
+
},
|
|
128
|
+
],
|
|
129
|
+
};
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
/**
|
|
133
|
+
* Create a store from a definition
|
|
134
|
+
*/
|
|
135
|
+
async function createStoreFromDefinition(
|
|
136
|
+
def: StoreDefinition,
|
|
137
|
+
defService: StoreDefinitionService,
|
|
138
|
+
services: HandlerContext['services'],
|
|
139
|
+
_context: HandlerContext
|
|
140
|
+
): Promise<{ success: true } | { success: false; error: string }> {
|
|
141
|
+
try {
|
|
142
|
+
if (isFileStoreDefinition(def)) {
|
|
143
|
+
// Resolve path relative to project root
|
|
144
|
+
const resolvedPath = defService.resolvePath(def.path);
|
|
145
|
+
const createResult = await services.store.create(
|
|
146
|
+
{
|
|
147
|
+
name: def.name,
|
|
148
|
+
type: 'file',
|
|
149
|
+
path: resolvedPath,
|
|
150
|
+
description: def.description,
|
|
151
|
+
tags: def.tags,
|
|
152
|
+
},
|
|
153
|
+
{ skipDefinitionSync: true } // Don't re-add to definitions
|
|
154
|
+
);
|
|
155
|
+
if (!createResult.success) {
|
|
156
|
+
return { success: false, error: createResult.error.message };
|
|
157
|
+
}
|
|
158
|
+
return { success: true };
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
if (isRepoStoreDefinition(def)) {
|
|
162
|
+
const createResult = await services.store.create(
|
|
163
|
+
{
|
|
164
|
+
name: def.name,
|
|
165
|
+
type: 'repo',
|
|
166
|
+
url: def.url,
|
|
167
|
+
branch: def.branch,
|
|
168
|
+
depth: def.depth,
|
|
169
|
+
description: def.description,
|
|
170
|
+
tags: def.tags,
|
|
171
|
+
},
|
|
172
|
+
{ skipDefinitionSync: true }
|
|
173
|
+
);
|
|
174
|
+
if (!createResult.success) {
|
|
175
|
+
return { success: false, error: createResult.error.message };
|
|
176
|
+
}
|
|
177
|
+
return { success: true };
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
if (isWebStoreDefinition(def)) {
|
|
181
|
+
const createResult = await services.store.create(
|
|
182
|
+
{
|
|
183
|
+
name: def.name,
|
|
184
|
+
type: 'web',
|
|
185
|
+
url: def.url,
|
|
186
|
+
depth: def.depth,
|
|
187
|
+
description: def.description,
|
|
188
|
+
tags: def.tags,
|
|
189
|
+
},
|
|
190
|
+
{ skipDefinitionSync: true }
|
|
191
|
+
);
|
|
192
|
+
if (!createResult.success) {
|
|
193
|
+
return { success: false, error: createResult.error.message };
|
|
194
|
+
}
|
|
195
|
+
return { success: true };
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
return { success: false, error: 'Unknown store definition type' };
|
|
199
|
+
} catch (error) {
|
|
200
|
+
return {
|
|
201
|
+
success: false,
|
|
202
|
+
error: error instanceof Error ? error.message : String(error),
|
|
203
|
+
};
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
/**
|
|
208
|
+
* Sync commands for the execute meta-tool
|
|
209
|
+
*/
|
|
210
|
+
export const syncCommands: CommandDefinition[] = [
|
|
211
|
+
{
|
|
212
|
+
name: 'stores:sync',
|
|
213
|
+
description: 'Sync stores from definitions config (bootstrap on fresh clone)',
|
|
214
|
+
argsSchema: z.object({
|
|
215
|
+
reindex: z.boolean().optional().describe('Re-index existing stores after sync'),
|
|
216
|
+
prune: z.boolean().optional().describe('Remove stores not in definitions'),
|
|
217
|
+
dryRun: z.boolean().optional().describe('Show what would happen without making changes'),
|
|
218
|
+
}),
|
|
219
|
+
handler: (args: Record<string, unknown>, context: HandlerContext): Promise<ToolResponse> => {
|
|
220
|
+
const syncArgs: SyncStoresArgs = {};
|
|
221
|
+
if (typeof args['reindex'] === 'boolean') {
|
|
222
|
+
syncArgs.reindex = args['reindex'];
|
|
223
|
+
}
|
|
224
|
+
if (typeof args['prune'] === 'boolean') {
|
|
225
|
+
syncArgs.prune = args['prune'];
|
|
226
|
+
}
|
|
227
|
+
if (typeof args['dryRun'] === 'boolean') {
|
|
228
|
+
syncArgs.dryRun = args['dryRun'];
|
|
229
|
+
}
|
|
230
|
+
return handleStoresSync(syncArgs, context);
|
|
231
|
+
},
|
|
232
|
+
},
|
|
233
|
+
];
|
|
@@ -0,0 +1,157 @@
|
|
|
1
|
+
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
|
2
|
+
import { GitignoreService } from './gitignore.service.js';
|
|
3
|
+
import { rm, mkdtemp, writeFile, readFile, access } from 'node:fs/promises';
|
|
4
|
+
import { tmpdir } from 'node:os';
|
|
5
|
+
import { join } from 'node:path';
|
|
6
|
+
|
|
7
|
+
describe('GitignoreService', () => {
|
|
8
|
+
let projectRoot: string;
|
|
9
|
+
let service: GitignoreService;
|
|
10
|
+
|
|
11
|
+
beforeEach(async () => {
|
|
12
|
+
projectRoot = await mkdtemp(join(tmpdir(), 'gitignore-test-'));
|
|
13
|
+
service = new GitignoreService(projectRoot);
|
|
14
|
+
});
|
|
15
|
+
|
|
16
|
+
afterEach(async () => {
|
|
17
|
+
await rm(projectRoot, { recursive: true, force: true });
|
|
18
|
+
});
|
|
19
|
+
|
|
20
|
+
describe('hasRequiredPatterns', () => {
|
|
21
|
+
it('returns false when .gitignore does not exist', async () => {
|
|
22
|
+
const has = await service.hasRequiredPatterns();
|
|
23
|
+
expect(has).toBe(false);
|
|
24
|
+
});
|
|
25
|
+
|
|
26
|
+
it('returns false when .gitignore exists but is empty', async () => {
|
|
27
|
+
await writeFile(join(projectRoot, '.gitignore'), '');
|
|
28
|
+
const has = await service.hasRequiredPatterns();
|
|
29
|
+
expect(has).toBe(false);
|
|
30
|
+
});
|
|
31
|
+
|
|
32
|
+
it('returns false when .gitignore is missing bluera patterns', async () => {
|
|
33
|
+
await writeFile(join(projectRoot, '.gitignore'), 'node_modules/\n*.log\n');
|
|
34
|
+
const has = await service.hasRequiredPatterns();
|
|
35
|
+
expect(has).toBe(false);
|
|
36
|
+
});
|
|
37
|
+
|
|
38
|
+
it('returns true when all required patterns are present', async () => {
|
|
39
|
+
const content = `
|
|
40
|
+
node_modules/
|
|
41
|
+
.bluera/
|
|
42
|
+
!.bluera/bluera-knowledge/
|
|
43
|
+
!.bluera/bluera-knowledge/stores.config.json
|
|
44
|
+
`;
|
|
45
|
+
await writeFile(join(projectRoot, '.gitignore'), content);
|
|
46
|
+
const has = await service.hasRequiredPatterns();
|
|
47
|
+
expect(has).toBe(true);
|
|
48
|
+
});
|
|
49
|
+
|
|
50
|
+
it('returns false when only some patterns are present', async () => {
|
|
51
|
+
const content = `
|
|
52
|
+
.bluera/
|
|
53
|
+
`;
|
|
54
|
+
await writeFile(join(projectRoot, '.gitignore'), content);
|
|
55
|
+
const has = await service.hasRequiredPatterns();
|
|
56
|
+
expect(has).toBe(false);
|
|
57
|
+
});
|
|
58
|
+
});
|
|
59
|
+
|
|
60
|
+
describe('ensureGitignorePatterns', () => {
|
|
61
|
+
it('creates .gitignore if it does not exist', async () => {
|
|
62
|
+
const result = await service.ensureGitignorePatterns();
|
|
63
|
+
|
|
64
|
+
expect(result.updated).toBe(true);
|
|
65
|
+
expect(result.message).toContain('Created');
|
|
66
|
+
|
|
67
|
+
// Verify file exists
|
|
68
|
+
const gitignorePath = join(projectRoot, '.gitignore');
|
|
69
|
+
await expect(access(gitignorePath)).resolves.toBeUndefined();
|
|
70
|
+
});
|
|
71
|
+
|
|
72
|
+
it('adds patterns to empty .gitignore', async () => {
|
|
73
|
+
await writeFile(join(projectRoot, '.gitignore'), '');
|
|
74
|
+
|
|
75
|
+
const result = await service.ensureGitignorePatterns();
|
|
76
|
+
|
|
77
|
+
expect(result.updated).toBe(true);
|
|
78
|
+
expect(result.message).toContain('Updated');
|
|
79
|
+
|
|
80
|
+
const content = await readFile(join(projectRoot, '.gitignore'), 'utf-8');
|
|
81
|
+
expect(content).toContain('.bluera/');
|
|
82
|
+
expect(content).toContain('!.bluera/bluera-knowledge/');
|
|
83
|
+
expect(content).toContain('!.bluera/bluera-knowledge/stores.config.json');
|
|
84
|
+
});
|
|
85
|
+
|
|
86
|
+
it('appends patterns to existing .gitignore', async () => {
|
|
87
|
+
const existingContent = 'node_modules/\n*.log\n';
|
|
88
|
+
await writeFile(join(projectRoot, '.gitignore'), existingContent);
|
|
89
|
+
|
|
90
|
+
const result = await service.ensureGitignorePatterns();
|
|
91
|
+
|
|
92
|
+
expect(result.updated).toBe(true);
|
|
93
|
+
|
|
94
|
+
const content = await readFile(join(projectRoot, '.gitignore'), 'utf-8');
|
|
95
|
+
// Should preserve existing content
|
|
96
|
+
expect(content).toContain('node_modules/');
|
|
97
|
+
expect(content).toContain('*.log');
|
|
98
|
+
// Should add new patterns
|
|
99
|
+
expect(content).toContain('.bluera/');
|
|
100
|
+
expect(content).toContain('!.bluera/bluera-knowledge/');
|
|
101
|
+
});
|
|
102
|
+
|
|
103
|
+
it('skips if all patterns already present', async () => {
|
|
104
|
+
const existingContent = `
|
|
105
|
+
node_modules/
|
|
106
|
+
.bluera/
|
|
107
|
+
!.bluera/bluera-knowledge/
|
|
108
|
+
!.bluera/bluera-knowledge/stores.config.json
|
|
109
|
+
`;
|
|
110
|
+
await writeFile(join(projectRoot, '.gitignore'), existingContent);
|
|
111
|
+
|
|
112
|
+
const result = await service.ensureGitignorePatterns();
|
|
113
|
+
|
|
114
|
+
expect(result.updated).toBe(false);
|
|
115
|
+
expect(result.message).toContain('already');
|
|
116
|
+
});
|
|
117
|
+
|
|
118
|
+
it('adds missing patterns when some are present', async () => {
|
|
119
|
+
const existingContent = '.bluera/\n';
|
|
120
|
+
await writeFile(join(projectRoot, '.gitignore'), existingContent);
|
|
121
|
+
|
|
122
|
+
const result = await service.ensureGitignorePatterns();
|
|
123
|
+
|
|
124
|
+
expect(result.updated).toBe(true);
|
|
125
|
+
|
|
126
|
+
const content = await readFile(join(projectRoot, '.gitignore'), 'utf-8');
|
|
127
|
+
expect(content).toContain('!.bluera/bluera-knowledge/');
|
|
128
|
+
expect(content).toContain('!.bluera/bluera-knowledge/stores.config.json');
|
|
129
|
+
});
|
|
130
|
+
|
|
131
|
+
it('includes header comment in new additions', async () => {
|
|
132
|
+
const result = await service.ensureGitignorePatterns();
|
|
133
|
+
|
|
134
|
+
expect(result.updated).toBe(true);
|
|
135
|
+
|
|
136
|
+
const content = await readFile(join(projectRoot, '.gitignore'), 'utf-8');
|
|
137
|
+
expect(content).toContain('# Bluera Knowledge');
|
|
138
|
+
});
|
|
139
|
+
|
|
140
|
+
it('preserves trailing newline', async () => {
|
|
141
|
+
const existingContent = 'node_modules/\n';
|
|
142
|
+
await writeFile(join(projectRoot, '.gitignore'), existingContent);
|
|
143
|
+
|
|
144
|
+
await service.ensureGitignorePatterns();
|
|
145
|
+
|
|
146
|
+
const content = await readFile(join(projectRoot, '.gitignore'), 'utf-8');
|
|
147
|
+
expect(content.endsWith('\n')).toBe(true);
|
|
148
|
+
});
|
|
149
|
+
});
|
|
150
|
+
|
|
151
|
+
describe('getGitignorePath', () => {
|
|
152
|
+
it('returns correct path', () => {
|
|
153
|
+
const path = service.getGitignorePath();
|
|
154
|
+
expect(path).toBe(join(projectRoot, '.gitignore'));
|
|
155
|
+
});
|
|
156
|
+
});
|
|
157
|
+
});
|