bluera-knowledge 0.17.2 → 0.18.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -2,6 +2,98 @@
2
2
 
3
3
  All notable changes to this project will be documented in this file. See [commit-and-tag-version](https://github.com/absolute-version/commit-and-tag-version) for commit guidelines.
4
4
 
5
+ ## [0.18.1](https://github.com/blueraai/bluera-knowledge/compare/v0.18.0...v0.18.1) (2026-01-28)
6
+
7
+ ## [0.18.0](https://github.com/blueraai/bluera-knowledge/compare/v0.17.2...v0.18.0) (2026-01-28)
8
+
9
+
10
+ ### ⚠ BREAKING CHANGES
11
+
12
+ * **index:** Existing stores need to be re-indexed as document IDs
13
+ have changed format. Run `bluera-knowledge index <store>` to rebuild.
14
+
15
+ Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
16
+
17
+ ### Features
18
+
19
+ * add atomic file writes for crash-safe operations ([0392d94](https://github.com/blueraai/bluera-knowledge/commit/0392d94d84319deaa98fdcbd9f4a6f17b6fe4dfe))
20
+ * add two-phase drift detection for incremental indexing ([a05e101](https://github.com/blueraai/bluera-knowledge/commit/a05e1010a4f211d571f0719c5d65d43f9c6d2149))
21
+ * **index:** expand supported file extensions ([c585435](https://github.com/blueraai/bluera-knowledge/commit/c5854354e44b8335b91b452869b1669b1ffedd55))
22
+ * **mcp:** add missing store schema fields ([ae5ecd3](https://github.com/blueraai/bluera-knowledge/commit/ae5ecd3ea4b119b65a9148257493b854f4d1b5bc))
23
+ * **mcp:** add web store support to store:create ([d71aac4](https://github.com/blueraai/bluera-knowledge/commit/d71aac4aec9c5a47c3fa6f13437584494dfbd22c))
24
+ * **store:** preserve web store crawl options in sync ([838a9b5](https://github.com/blueraai/bluera-knowledge/commit/838a9b5085ed87cb69252642b7f720630a6f8c6a))
25
+
26
+
27
+ ### Bug Fixes
28
+
29
+ * address 9 code bugs from CODEX-15 analysis ([6f324ff](https://github.com/blueraai/bluera-knowledge/commit/6f324ffbb49eeb63c3d30057f8cabab7b02de457))
30
+ * **analysis:** add .bluera to dependency analyzer ignore list ([10ad335](https://github.com/blueraai/bluera-knowledge/commit/10ad335cb8280a61b2b37f4b6d14ae941cc1b109))
31
+ * **bridge:** use platform-agnostic path detection ([7f75c43](https://github.com/blueraai/bluera-knowledge/commit/7f75c43e26c948b6a5b523c7595061cfc3589c29))
32
+ * **cache:** add event-based graph cache invalidation ([1288932](https://github.com/blueraai/bluera-knowledge/commit/128893216e41195317ab86bdf4b215d1b0bfb4f4))
33
+ * **chunking:** match export default declarations ([8fb074f](https://github.com/blueraai/bluera-knowledge/commit/8fb074f337b4dbdaed98720f0d6cbdb81014b45e))
34
+ * **chunking:** validate chunkOverlap < chunkSize ([10a7c9c](https://github.com/blueraai/bluera-knowledge/commit/10a7c9c6374bf256f2a7081f737911d03153f556))
35
+ * **cli:** add missing manifest cleanup to store delete ([3c6dd43](https://github.com/blueraai/bluera-knowledge/commit/3c6dd43ef3b3cc12fed03466dfc73ba3dbe12ef4))
36
+ * **cli:** add NaN validation for numeric options ([16b174d](https://github.com/blueraai/bluera-knowledge/commit/16b174d1126e342c8b5368c04f2a099d1de6d19e))
37
+ * **cli:** correct default paths in help text ([e1385b2](https://github.com/blueraai/bluera-knowledge/commit/e1385b29f3678a4b1cfee652f7158d677c7b612c))
38
+ * **cli:** correct repo path in help text ([3acf615](https://github.com/blueraai/bluera-knowledge/commit/3acf615e845acf3e0f26308e7abe2a71c9f8d3a5))
39
+ * **cli:** pass projectRoot to createServices in CLI commands ([6e245c7](https://github.com/blueraai/bluera-knowledge/commit/6e245c7b5d6ba71cdd842ec65dfb9d064cb9c46a))
40
+ * **cli:** standardize process termination to avoid cleanup bypass ([6b27ffc](https://github.com/blueraai/bluera-knowledge/commit/6b27ffc54ae29d0f717442ea375a1c9010f40b20))
41
+ * **cli:** support SSH git URLs in store create ([911761c](https://github.com/blueraai/bluera-knowledge/commit/911761c1e07035b0581442fe9b80bfc5c1893cd1))
42
+ * **config:** implement deep merge for partial configs ([451c8a6](https://github.com/blueraai/bluera-knowledge/commit/451c8a6ca5a82d8004a1ce55f12bca34933f30e6))
43
+ * **config:** prefer git root over PWD for project root resolution ([aed612c](https://github.com/blueraai/bluera-knowledge/commit/aed612ce161d2da10749fa38af35a30ae09445c1))
44
+ * **config:** remove unused search config fields ([515abad](https://github.com/blueraai/bluera-knowledge/commit/515abad947a11c8b5fa156a71bbf88e78c42b5a3))
45
+ * **config:** resolve explicit paths against projectRoot ([3dcb12a](https://github.com/blueraai/bluera-knowledge/commit/3dcb12a38a16352ecd4b986f254d02d1cd9535da))
46
+ * **config:** use path.isAbsolute() for Windows compatibility ([282c802](https://github.com/blueraai/bluera-knowledge/commit/282c802bf17ef6ad22f2e7410b4fcbc9999945c7))
47
+ * **config:** wire up crawl config to intelligent crawler ([81468d2](https://github.com/blueraai/bluera-knowledge/commit/81468d2238e08766b32236d3863d682e0c3699ae))
48
+ * **config:** wire up embedding batchSize from config ([ae4913f](https://github.com/blueraai/bluera-knowledge/commit/ae4913f4705d3c751ce2a000733650c622012c89))
49
+ * **config:** wire up ignorePatterns from config ([ed950f2](https://github.com/blueraai/bluera-knowledge/commit/ed950f21be3d402d72d411d15039736715d32973))
50
+ * **config:** wire up search defaults from config ([74bd956](https://github.com/blueraai/bluera-knowledge/commit/74bd956eba59d47e6107a29f8f7e7c028376b45b))
51
+ * **coverage:** improve branch coverage toward 80.5% threshold ([bd5c95f](https://github.com/blueraai/bluera-knowledge/commit/bd5c95fa7a65b2545a05ac83d6c055b361f652aa))
52
+ * **crawl:** clear existing documents before re-crawling web stores ([e4bc5b0](https://github.com/blueraai/bluera-knowledge/commit/e4bc5b0091e2e4931c156a50395e56366ba80317))
53
+ * **crawl:** correct user agent typo in crawler ([f19a491](https://github.com/blueraai/bluera-knowledge/commit/f19a491aa47bd65774537cc26191d945cd59941f))
54
+ * **crawl:** cross-platform Python executable detection ([782adc8](https://github.com/blueraai/bluera-knowledge/commit/782adc8804cafcc8dde1a9df95da35cd1d4b9622))
55
+ * **embeddings:** derive dimensions dynamically from model output ([8ec0ac8](https://github.com/blueraai/bluera-knowledge/commit/8ec0ac811b9357ead91fbad7e63578fedd3bb7ce))
56
+ * **http:** add full cleanup to DELETE /api/stores/:id ([91005de](https://github.com/blueraai/bluera-knowledge/commit/91005de7689315d7f638b9e2daede03654724d75))
57
+ * **index:** handle unreadable files gracefully ([103ee63](https://github.com/blueraai/bluera-knowledge/commit/103ee636cf9109bc28c9d3dcd382136c1c326586))
58
+ * **index:** implement --force flag with incremental indexing and duplicate prevention ([b3ad20b](https://github.com/blueraai/bluera-knowledge/commit/b3ad20b3cbaf79e354ec001110ac6af6da2780e2))
59
+ * **index:** include file path in document ID to prevent collisions ([7abd023](https://github.com/blueraai/bluera-knowledge/commit/7abd023729e2ad7bc24e9c92f4b20b3c0b7b8d0c))
60
+ * **index:** rebuild FTS index after document deletions ([0f8588b](https://github.com/blueraai/bluera-knowledge/commit/0f8588bdf7f83fadad13d33c82b0379b335e6e65))
61
+ * **index:** rename documentsIndexed to filesIndexed ([c2870fa](https://github.com/blueraai/bluera-knowledge/commit/c2870fa93397350c5900987810460c2d7b0a1e37))
62
+ * **index:** update code graphs on incremental indexing ([3446150](https://github.com/blueraai/bluera-knowledge/commit/344615074ba38c408f36f9c5a4e78a6fdafe97d9))
63
+ * **lance:** connect on-demand in deleteStore ([cc5a25f](https://github.com/blueraai/bluera-knowledge/commit/cc5a25f3591e2c27e2689fef64ab03ce5dbe73ac))
64
+ * **lance:** guard deleteDocuments against empty array ([617190c](https://github.com/blueraai/bluera-knowledge/commit/617190c16d500ec17a35435f257c7315bd64c098))
65
+ * **mcp:** add full cleanup to sync prune and resolve dataDir ([afdf39c](https://github.com/blueraai/bluera-knowledge/commit/afdf39c42bf2eef763101c4da437c0a4b8911395))
66
+ * **mcp:** add missing fields to store:create execute schema ([1e2a352](https://github.com/blueraai/bluera-knowledge/commit/1e2a35292efe994b485ee17df2434c5b55a13cb9))
67
+ * **mcp:** add sync and uninstall to execute tool description ([cea50aa](https://github.com/blueraai/bluera-knowledge/commit/cea50aa611b117753cef00979b04a218bd8805a6))
68
+ * **mcp:** correct stores:sync command name in execute tool description ([c4f09ca](https://github.com/blueraai/bluera-knowledge/commit/c4f09ca7bc9561b21eb5ee9c923d375958d4132f))
69
+ * **mcp:** pass projectRoot to runMCPServer in CLI command ([9a2d6e7](https://github.com/blueraai/bluera-knowledge/commit/9a2d6e7461722d4f6a5af65af58428da43714179))
70
+ * **mcp:** prevent bootstrap output from corrupting MCP stdio transport ([861f03f](https://github.com/blueraai/bluera-knowledge/commit/861f03fcd112fcfb145195852a3e1185c5707e39))
71
+ * **mcp:** queue crawl job for web stores ([8314014](https://github.com/blueraai/bluera-knowledge/commit/8314014cc0303cf20b34f4772cc55aaa5df32699))
72
+ * **mcp:** wire intent parameter through to search service ([305a608](https://github.com/blueraai/bluera-knowledge/commit/305a608668baa75b7cc93a65b88ef43d341b65e4))
73
+ * **plugin:** replace process.exit with exitCode pattern ([a64c420](https://github.com/blueraai/bluera-knowledge/commit/a64c4201aabeee132b24e204c5770b7866df4862))
74
+ * **search:** add changelog file type to ranking boosts ([035e414](https://github.com/blueraai/bluera-knowledge/commit/035e414481b416d0b59bf28f75d38bd07e08b870))
75
+ * **search:** include storeId in deduplication key ([251ff40](https://github.com/blueraai/bluera-knowledge/commit/251ff40614b2005b56ca55c56313f2107dc78588))
76
+ * **search:** skip minRelevance filter in FTS mode ([7429b79](https://github.com/blueraai/bluera-knowledge/commit/7429b79e18b746acc2caf7bf4b305e4803940bf1))
77
+ * **search:** validate threshold/minRelevance range (0-1) ([320d977](https://github.com/blueraai/bluera-knowledge/commit/320d977316c0653c4ed3ab31dfec07c4755bd014))
78
+ * **serve:** resolve dataDir for repo clone cleanup ([85abebe](https://github.com/blueraai/bluera-knowledge/commit/85abebe8e9f15255af356df98f2c7252019d61a3))
79
+ * **server:** use store.path for repo deletion ([3b1c151](https://github.com/blueraai/bluera-knowledge/commit/3b1c151f7192e8f7ac9112ea79658b03b03d0fa3))
80
+ * **serve:** use config values for host/port defaults ([b16812f](https://github.com/blueraai/bluera-knowledge/commit/b16812f4c77b98df5963bf6e280a0f140efc9611))
81
+ * **services:** skip search cleanup for uninitialized lazy containers ([f6387d2](https://github.com/blueraai/bluera-knowledge/commit/f6387d2ce996664f03a0eba2999c1b76248d7415))
82
+ * **services:** use ConfigService's resolved projectRoot ([a8b4a8e](https://github.com/blueraai/bluera-knowledge/commit/a8b4a8e3080b1346084a58a47feeafafcebf4fef))
83
+ * **store:** preserve depth field on RepoStore rename ([987a18c](https://github.com/blueraai/bluera-knowledge/commit/987a18c08d7df5298cbf24f34be32a6a7ea26076))
84
+ * **store:** prevent duplicate names on rename and sync definition correctly ([444b3df](https://github.com/blueraai/bluera-knowledge/commit/444b3dfb0ee01f676548f711c005b5dd383f6a54))
85
+ * **store:** use default depth in repo store metadata ([4e95048](https://github.com/blueraai/bluera-knowledge/commit/4e95048bec15de55e4a4d528ff8a5fd553546c76))
86
+ * **store:** validate empty names in update method ([2d30f57](https://github.com/blueraai/bluera-knowledge/commit/2d30f575e5b15fcbfd6a5dfb2490a148a7201e1d))
87
+ * **store:** validate local repo path exists before creating store ([6057d5e](https://github.com/blueraai/bluera-knowledge/commit/6057d5e717b859a1589a1fd15c82f2642fb6cdde))
88
+ * **sync:** complete cleanup in --prune ([a1c3dd2](https://github.com/blueraai/bluera-knowledge/commit/a1c3dd2b34edf4d70b7df8ecee8ff796986ec139))
89
+ * **sync:** use ProjectRootService for project root detection ([f7f166a](https://github.com/blueraai/bluera-knowledge/commit/f7f166a8a256fc83421925634610d54e59988438))
90
+ * **types:** allow SSH URLs in repo store definitions ([7855f5e](https://github.com/blueraai/bluera-knowledge/commit/7855f5ec0855fff2fda034cf17c280b544e63815))
91
+ * **types:** remove unused SearchQuery fields and align chunk defaults ([86e789e](https://github.com/blueraai/bluera-knowledge/commit/86e789e3b46f342a295411f4e672517926d791ff))
92
+ * **watch:** check full indexing result before calling onReindex ([c4622f5](https://github.com/blueraai/bluera-knowledge/commit/c4622f5ad563d030e8e4e331878a91a6f9225386))
93
+ * **watch:** inject embeddings for dimensions setup ([aafd89b](https://github.com/blueraai/bluera-knowledge/commit/aafd89b84b5c457daa4355e4f8c477691421a089))
94
+ * **watch:** proper SIGINT cleanup with destroyServices ([464f5d3](https://github.com/blueraai/bluera-knowledge/commit/464f5d362e40f8e1e1136aa2aad6b812871c621d))
95
+ * **worker:** set LanceDB dimensions before initialize ([b9554bb](https://github.com/blueraai/bluera-knowledge/commit/b9554bb36a399d2d425e63f25075b3829c616566))
96
+
5
97
  ## [0.17.2](https://github.com/blueraai/bluera-knowledge/compare/v0.17.0...v0.17.2) (2026-01-18)
6
98
 
7
99
 
package/README.md CHANGED
@@ -536,7 +536,7 @@ Simple mode still crawls effectively—it just doesn't use AI to select which pa
536
536
  <details>
537
537
  <summary><b>📋 How to view logs for debugging</b></summary>
538
538
 
539
- The plugin logs all MCP server operations to `~/.bluera/bluera-knowledge/logs/app.log`.
539
+ The plugin logs all MCP server operations to `.bluera/bluera-knowledge/logs/app.log` (relative to project root).
540
540
 
541
541
  **View logs using the `/logs` command:**
542
542
 
@@ -559,7 +559,7 @@ The plugin logs all MCP server operations to `~/.bluera/bluera-knowledge/logs/ap
559
559
 
560
560
  **Manual access:**
561
561
  ```bash
562
- tail -f ~/.bluera/bluera-knowledge/logs/app.log
562
+ tail -f .bluera/bluera-knowledge/logs/app.log
563
563
  ```
564
564
 
565
565
  Logs are JSON formatted (NDJSON) and can be processed with `jq` for pretty-printing.
@@ -675,7 +675,7 @@ When enabled, a UserPromptSubmit hook analyzes your prompt for patterns like:
675
675
 
676
676
  Claude evaluates each suggestion and invokes relevant skills before answering. Users who already use BK terminology are excluded (they already know the tool).
677
677
 
678
- **Configuration stored in:** `~/.local/share/bluera-knowledge/skill-activation.json`
678
+ **Configuration stored in:** `.bluera/bluera-knowledge/skill-activation.json` (relative to project root)
679
679
 
680
680
  ---
681
681
 
@@ -0,0 +1,13 @@
1
+ import {
2
+ createDocumentId,
3
+ createStoreId,
4
+ isDocumentId,
5
+ isStoreId
6
+ } from "./chunk-CLIMKLTW.js";
7
+ export {
8
+ createDocumentId,
9
+ createStoreId,
10
+ isDocumentId,
11
+ isStoreId
12
+ };
13
+ //# sourceMappingURL=brands-3EYIYV6T.js.map
@@ -0,0 +1,28 @@
1
+ // src/types/brands.ts
2
+ var ID_PATTERN = /^[a-zA-Z0-9_-]+$/;
3
+ function isStoreId(value) {
4
+ return value.length > 0 && ID_PATTERN.test(value);
5
+ }
6
+ function isDocumentId(value) {
7
+ return value.length > 0 && ID_PATTERN.test(value);
8
+ }
9
+ function createStoreId(value) {
10
+ if (!isStoreId(value)) {
11
+ throw new Error(`Invalid store ID: ${value}`);
12
+ }
13
+ return value;
14
+ }
15
+ function createDocumentId(value) {
16
+ if (!isDocumentId(value)) {
17
+ throw new Error(`Invalid document ID: ${value}`);
18
+ }
19
+ return value;
20
+ }
21
+
22
+ export {
23
+ isStoreId,
24
+ isDocumentId,
25
+ createStoreId,
26
+ createDocumentId
27
+ };
28
+ //# sourceMappingURL=chunk-CLIMKLTW.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/types/brands.ts"],"sourcesContent":["// Branded type symbols\ndeclare const StoreIdBrand: unique symbol;\ndeclare const DocumentIdBrand: unique symbol;\n\n// Branded types\nexport type StoreId = string & { readonly [StoreIdBrand]: typeof StoreIdBrand };\nexport type DocumentId = string & { readonly [DocumentIdBrand]: typeof DocumentIdBrand };\n\n// Valid ID pattern: alphanumeric, hyphens, underscores\nconst ID_PATTERN = /^[a-zA-Z0-9_-]+$/;\n\nexport function isStoreId(value: string): value is StoreId {\n return value.length > 0 && ID_PATTERN.test(value);\n}\n\nexport function isDocumentId(value: string): value is DocumentId {\n return value.length > 0 && ID_PATTERN.test(value);\n}\n\nexport function createStoreId(value: string): StoreId {\n if (!isStoreId(value)) {\n throw new Error(`Invalid store ID: ${value}`);\n }\n return value;\n}\n\nexport function createDocumentId(value: string): DocumentId {\n if (!isDocumentId(value)) {\n throw new Error(`Invalid document ID: ${value}`);\n }\n return value;\n}\n"],"mappings":";AASA,IAAM,aAAa;AAEZ,SAAS,UAAU,OAAiC;AACzD,SAAO,MAAM,SAAS,KAAK,WAAW,KAAK,KAAK;AAClD;AAEO,SAAS,aAAa,OAAoC;AAC/D,SAAO,MAAM,SAAS,KAAK,WAAW,KAAK,KAAK;AAClD;AAEO,SAAS,cAAc,OAAwB;AACpD,MAAI,CAAC,UAAU,KAAK,GAAG;AACrB,UAAM,IAAI,MAAM,qBAAqB,KAAK,EAAE;AAAA,EAC9C;AACA,SAAO;AACT;AAEO,SAAS,iBAAiB,OAA2B;AAC1D,MAAI,CAAC,aAAa,KAAK,GAAG;AACxB,UAAM,IAAI,MAAM,wBAAwB,KAAK,EAAE;AAAA,EACjD;AACA,SAAO;AACT;","names":[]}
@@ -4,13 +4,12 @@ import {
4
4
  StoreDefinitionService,
5
5
  createLazyServices,
6
6
  createLogger,
7
- createStoreId,
8
7
  destroyServices,
9
8
  isFileStoreDefinition,
10
9
  isRepoStoreDefinition,
11
10
  isWebStoreDefinition,
12
11
  summarizePayload
13
- } from "./chunk-WMALVLFW.js";
12
+ } from "./chunk-RDDGZIDL.js";
14
13
 
15
14
  // src/mcp/server.ts
16
15
  import { Server } from "@modelcontextprotocol/sdk/server/index.js";
@@ -670,10 +669,15 @@ var GetStoreInfoArgsSchema = z.object({
670
669
  });
671
670
  var CreateStoreArgsSchema = z.object({
672
671
  name: z.string().min(1, "Store name must be a non-empty string"),
673
- type: z.enum(["file", "repo"]),
672
+ type: z.enum(["file", "repo", "web"]),
674
673
  source: z.string().min(1, "Source path or URL must be a non-empty string"),
675
674
  branch: z.string().optional(),
676
- description: z.string().optional()
675
+ description: z.string().optional(),
676
+ tags: z.array(z.string()).optional(),
677
+ depth: z.number().int().positive().optional(),
678
+ maxPages: z.number().int().positive().optional(),
679
+ crawlInstructions: z.string().optional(),
680
+ extractInstructions: z.string().optional()
677
681
  });
678
682
  var IndexStoreArgsSchema = z.object({
679
683
  store: z.string().min(1, "Store name or ID must be a non-empty string")
@@ -965,12 +969,13 @@ var logger3 = createLogger("spawn-worker");
965
969
  function spawnBackgroundWorker(jobId, dataDir) {
966
970
  const currentFilePath = fileURLToPath(import.meta.url);
967
971
  const currentDir = path.dirname(currentFilePath);
968
- const isProduction = currentFilePath.includes("/dist/");
972
+ const distPattern = `${path.sep}dist${path.sep}`;
973
+ const isProduction = currentFilePath.includes(distPattern);
969
974
  let command;
970
975
  let args;
971
976
  if (isProduction) {
972
- const distIndex = currentFilePath.indexOf("/dist/");
973
- const distDir = currentFilePath.substring(0, distIndex + 6);
977
+ const distIndex = currentFilePath.indexOf(distPattern);
978
+ const distDir = currentFilePath.substring(0, distIndex + distPattern.length);
974
979
  const workerScript = path.join(distDir, "workers", "background-worker-cli.js");
975
980
  command = process.execPath;
976
981
  args = [workerScript, jobId];
@@ -1022,6 +1027,10 @@ var handleListStores = async (args, context) => {
1022
1027
  type: s.type,
1023
1028
  path: "path" in s ? s.path : void 0,
1024
1029
  url: "url" in s && s.url !== void 0 ? s.url : void 0,
1030
+ branch: "branch" in s ? s.branch : void 0,
1031
+ depth: "depth" in s ? s.depth : void 0,
1032
+ maxPages: "maxPages" in s ? s.maxPages : void 0,
1033
+ tags: "tags" in s ? s.tags : void 0,
1025
1034
  description: s.description,
1026
1035
  createdAt: s.createdAt.toISOString()
1027
1036
  }))
@@ -1037,7 +1046,7 @@ var handleGetStoreInfo = async (args, context) => {
1037
1046
  const validated = GetStoreInfoArgsSchema.parse(args);
1038
1047
  logger4.info({ store: validated.store }, "Get store info started");
1039
1048
  const { services } = context;
1040
- const store = await services.store.getByIdOrName(createStoreId(validated.store));
1049
+ const store = await services.store.getByIdOrName(validated.store);
1041
1050
  if (store === void 0) {
1042
1051
  logger4.warn({ store: validated.store }, "Store not found");
1043
1052
  throw new Error(`Store not found: ${validated.store}`);
@@ -1055,6 +1064,11 @@ var handleGetStoreInfo = async (args, context) => {
1055
1064
  path: "path" in store ? store.path : void 0,
1056
1065
  url: "url" in store && store.url !== void 0 ? store.url : void 0,
1057
1066
  branch: "branch" in store ? store.branch : void 0,
1067
+ depth: "depth" in store ? store.depth : void 0,
1068
+ maxPages: "maxPages" in store ? store.maxPages : void 0,
1069
+ crawlInstructions: "crawlInstructions" in store ? store.crawlInstructions : void 0,
1070
+ extractInstructions: "extractInstructions" in store ? store.extractInstructions : void 0,
1071
+ tags: "tags" in store ? store.tags : void 0,
1058
1072
  description: store.description,
1059
1073
  status: store.status,
1060
1074
  createdAt: store.createdAt.toISOString(),
@@ -1080,7 +1094,12 @@ var handleCreateStore = async (args, context) => {
1080
1094
  type: validated.type,
1081
1095
  ...isUrl ? { url: validated.source } : { path: validated.source },
1082
1096
  ...validated.branch !== void 0 ? { branch: validated.branch } : {},
1083
- ...validated.description !== void 0 ? { description: validated.description } : {}
1097
+ ...validated.description !== void 0 ? { description: validated.description } : {},
1098
+ ...validated.tags !== void 0 ? { tags: validated.tags } : {},
1099
+ ...validated.depth !== void 0 ? { depth: validated.depth } : {},
1100
+ ...validated.maxPages !== void 0 ? { maxPages: validated.maxPages } : {},
1101
+ ...validated.crawlInstructions !== void 0 ? { crawlInstructions: validated.crawlInstructions } : {},
1102
+ ...validated.extractInstructions !== void 0 ? { extractInstructions: validated.extractInstructions } : {}
1084
1103
  });
1085
1104
  if (!result.success) {
1086
1105
  logger4.error({ name: validated.name, error: result.error.message }, "Create store failed");
@@ -1097,10 +1116,21 @@ var handleCreateStore = async (args, context) => {
1097
1116
  if ("path" in result.data && result.data.path) {
1098
1117
  jobDetails["path"] = result.data.path;
1099
1118
  }
1119
+ if (validated.type === "web") {
1120
+ if (validated.maxPages !== void 0) {
1121
+ jobDetails["maxPages"] = validated.maxPages;
1122
+ }
1123
+ if (validated.crawlInstructions !== void 0) {
1124
+ jobDetails["crawlInstruction"] = validated.crawlInstructions;
1125
+ }
1126
+ if (validated.extractInstructions !== void 0) {
1127
+ jobDetails["extractInstruction"] = validated.extractInstructions;
1128
+ }
1129
+ }
1100
1130
  const job = jobService.createJob({
1101
- type: validated.type === "repo" && isUrl ? "clone" : "index",
1131
+ type: validated.type === "web" ? "crawl" : validated.type === "repo" && isUrl ? "clone" : "index",
1102
1132
  details: jobDetails,
1103
- message: `Indexing ${result.data.name}...`
1133
+ message: validated.type === "web" ? `Crawling ${result.data.name}...` : `Indexing ${result.data.name}...`
1104
1134
  });
1105
1135
  spawnBackgroundWorker(job.id, options.dataDir);
1106
1136
  logger4.info(
@@ -1137,7 +1167,7 @@ var handleIndexStore = async (args, context) => {
1137
1167
  const validated = IndexStoreArgsSchema.parse(args);
1138
1168
  logger4.info({ store: validated.store }, "Index store started");
1139
1169
  const { services, options } = context;
1140
- const store = await services.store.getByIdOrName(createStoreId(validated.store));
1170
+ const store = await services.store.getByIdOrName(validated.store);
1141
1171
  if (store === void 0) {
1142
1172
  logger4.warn({ store: validated.store }, "Store not found for indexing");
1143
1173
  throw new Error(`Store not found: ${validated.store}`);
@@ -1185,7 +1215,7 @@ var handleDeleteStore = async (args, context) => {
1185
1215
  const validated = DeleteStoreArgsSchema.parse(args);
1186
1216
  logger4.info({ store: validated.store }, "Delete store started");
1187
1217
  const { services, options } = context;
1188
- const store = await services.store.getByIdOrName(createStoreId(validated.store));
1218
+ const store = await services.store.getByIdOrName(validated.store);
1189
1219
  if (store === void 0) {
1190
1220
  logger4.warn({ store: validated.store }, "Store not found for deletion");
1191
1221
  throw new Error(`Store not found: ${validated.store}`);
@@ -1194,6 +1224,8 @@ var handleDeleteStore = async (args, context) => {
1194
1224
  await services.lance.deleteStore(store.id);
1195
1225
  logger4.debug({ storeId: store.id }, "Deleting code graph");
1196
1226
  await services.codeGraph.deleteGraph(store.id);
1227
+ logger4.debug({ storeId: store.id }, "Deleting manifest");
1228
+ await services.manifest.delete(store.id);
1197
1229
  if (store.type === "repo" && "url" in store && store.url !== void 0) {
1198
1230
  if (options.dataDir === void 0) {
1199
1231
  throw new Error("dataDir is required to delete cloned repository files");
@@ -1254,13 +1286,18 @@ var storeCommands = [
1254
1286
  },
1255
1287
  {
1256
1288
  name: "store:create",
1257
- description: "Create a new knowledge store from git URL or local path",
1289
+ description: "Create a new knowledge store from git URL, local path, or web URL",
1258
1290
  argsSchema: z5.object({
1259
1291
  name: z5.string().min(1).describe("Store name"),
1260
- type: z5.enum(["file", "repo"]).describe("Store type"),
1261
- source: z5.string().min(1).describe("Git URL or local path"),
1292
+ type: z5.enum(["file", "repo", "web"]).describe("Store type"),
1293
+ source: z5.string().min(1).describe("Git URL, local path, or web URL"),
1262
1294
  branch: z5.string().optional().describe("Git branch (for repo type)"),
1263
- description: z5.string().optional().describe("Store description")
1295
+ description: z5.string().optional().describe("Store description"),
1296
+ tags: z5.array(z5.string()).optional().describe("Tags for categorizing the store"),
1297
+ depth: z5.number().int().positive().optional().describe("Git clone depth (for repo type)"),
1298
+ maxPages: z5.number().int().positive().optional().describe("Maximum pages to crawl (for web type)"),
1299
+ crawlInstructions: z5.string().optional().describe("Instructions for crawler (for web type)"),
1300
+ extractInstructions: z5.string().optional().describe("Instructions for content extraction (for web type)")
1264
1301
  }),
1265
1302
  handler: (args, context) => handleCreateStore(args, context)
1266
1303
  },
@@ -1283,6 +1320,8 @@ var storeCommands = [
1283
1320
  ];
1284
1321
 
1285
1322
  // src/mcp/commands/sync.commands.ts
1323
+ import { rm as rm2 } from "fs/promises";
1324
+ import { join as join2 } from "path";
1286
1325
  import { z as z6 } from "zod";
1287
1326
  var logger5 = createLogger("mcp-sync");
1288
1327
  async function handleStoresSync(args, context) {
@@ -1340,6 +1379,14 @@ async function handleStoresSync(args, context) {
1340
1379
  for (const orphanName of result.orphans) {
1341
1380
  const store = await services.store.getByName(orphanName);
1342
1381
  if (store !== void 0) {
1382
+ await services.lance.deleteStore(store.id);
1383
+ await services.codeGraph.deleteGraph(store.id);
1384
+ await services.manifest.delete(store.id);
1385
+ if (store.type === "repo" && "url" in store && store.url !== void 0) {
1386
+ const dataDir = services.config.resolveDataDir();
1387
+ const repoPath = join2(dataDir, "repos", store.id);
1388
+ await rm2(repoPath, { recursive: true, force: true });
1389
+ }
1343
1390
  const deleteResult = await services.store.delete(store.id, { skipDefinitionSync: true });
1344
1391
  if (deleteResult.success) {
1345
1392
  result.pruned.push(orphanName);
@@ -1353,10 +1400,7 @@ async function handleStoresSync(args, context) {
1353
1400
  result.wouldReindex = [...result.skipped];
1354
1401
  } else {
1355
1402
  result.reindexJobs = [];
1356
- const dataDir = options.dataDir;
1357
- if (dataDir === void 0) {
1358
- throw new Error("dataDir is required for reindexing");
1359
- }
1403
+ const dataDir = options.dataDir ?? services.config.resolveDataDir();
1360
1404
  const jobService = new JobService(dataDir);
1361
1405
  for (const storeName of result.skipped) {
1362
1406
  const store = await services.store.getByName(storeName);
@@ -1486,9 +1530,9 @@ import { z as z7 } from "zod";
1486
1530
 
1487
1531
  // src/mcp/handlers/uninstall.handler.ts
1488
1532
  import { existsSync } from "fs";
1489
- import { readdir, rm as rm2 } from "fs/promises";
1533
+ import { readdir, rm as rm3 } from "fs/promises";
1490
1534
  import { homedir } from "os";
1491
- import { join as join2 } from "path";
1535
+ import { join as join3 } from "path";
1492
1536
  var logger6 = createLogger("uninstall-handler");
1493
1537
  var handleUninstall = async (args, context) => {
1494
1538
  const { global: includeGlobal = false, keepDefinitions = true } = args;
@@ -1496,20 +1540,20 @@ var handleUninstall = async (args, context) => {
1496
1540
  const kept = [];
1497
1541
  const errors = [];
1498
1542
  const projectRoot = context.options.projectRoot ?? process.cwd();
1499
- const projectDataDir = join2(projectRoot, ".bluera", "bluera-knowledge");
1543
+ const projectDataDir = join3(projectRoot, ".bluera", "bluera-knowledge");
1500
1544
  logger6.info({ projectDataDir, includeGlobal, keepDefinitions }, "Starting uninstall");
1501
1545
  if (existsSync(projectDataDir)) {
1502
1546
  if (keepDefinitions) {
1503
1547
  try {
1504
1548
  const entries = await readdir(projectDataDir, { withFileTypes: true });
1505
1549
  for (const entry of entries) {
1506
- const entryPath = join2(projectDataDir, entry.name);
1550
+ const entryPath = join3(projectDataDir, entry.name);
1507
1551
  if (entry.name === "stores.config.json") {
1508
1552
  kept.push(entryPath);
1509
1553
  continue;
1510
1554
  }
1511
1555
  try {
1512
- await rm2(entryPath, { recursive: true, force: true });
1556
+ await rm3(entryPath, { recursive: true, force: true });
1513
1557
  deleted.push(entryPath);
1514
1558
  } catch (err) {
1515
1559
  const msg = err instanceof Error ? err.message : String(err);
@@ -1524,7 +1568,7 @@ var handleUninstall = async (args, context) => {
1524
1568
  }
1525
1569
  } else {
1526
1570
  try {
1527
- await rm2(projectDataDir, { recursive: true, force: true });
1571
+ await rm3(projectDataDir, { recursive: true, force: true });
1528
1572
  deleted.push(projectDataDir);
1529
1573
  } catch (err) {
1530
1574
  const msg = err instanceof Error ? err.message : String(err);
@@ -1534,10 +1578,10 @@ var handleUninstall = async (args, context) => {
1534
1578
  }
1535
1579
  }
1536
1580
  if (includeGlobal) {
1537
- const globalDir = join2(homedir(), ".local", "share", "bluera-knowledge");
1581
+ const globalDir = join3(homedir(), ".local", "share", "bluera-knowledge");
1538
1582
  if (existsSync(globalDir)) {
1539
1583
  try {
1540
- await rm2(globalDir, { recursive: true, force: true });
1584
+ await rm3(globalDir, { recursive: true, force: true });
1541
1585
  deleted.push(globalDir);
1542
1586
  } catch (err) {
1543
1587
  const msg = err instanceof Error ? err.message : String(err);
@@ -1756,6 +1800,7 @@ var handleSearch = async (args, context) => {
1756
1800
  })
1757
1801
  ) : (await services.store.list()).map((s) => s.id);
1758
1802
  try {
1803
+ services.lance.setDimensions(await services.embeddings.ensureDimensions());
1759
1804
  for (const storeId of storeIds) {
1760
1805
  await services.lance.initialize(storeId);
1761
1806
  }
@@ -1771,7 +1816,8 @@ var handleSearch = async (args, context) => {
1771
1816
  limit: validated.limit,
1772
1817
  detail: validated.detail,
1773
1818
  threshold: validated.threshold,
1774
- minRelevance: validated.minRelevance
1819
+ minRelevance: validated.minRelevance,
1820
+ intent: validated.intent
1775
1821
  };
1776
1822
  const results = await services.search.search(searchQuery);
1777
1823
  for (const result of results.results) {
@@ -1873,6 +1919,7 @@ var handleGetFullContext = async (args, context) => {
1873
1919
  if (!store) {
1874
1920
  throw new Error(`Store not found: ${cachedResult.metadata.storeId}`);
1875
1921
  }
1922
+ services.lance.setDimensions(await services.embeddings.ensureDimensions());
1876
1923
  await services.lance.initialize(store.id);
1877
1924
  const searchQuery = {
1878
1925
  query: cachedResult.content.substring(0, 100),
@@ -2046,7 +2093,7 @@ function createMCPServer(options, services) {
2046
2093
  // Meta-tool for store and job management (consolidates 8 tools into 1)
2047
2094
  {
2048
2095
  name: "execute",
2049
- description: "Execute store/job management commands. Commands: stores, store:info, store:create, store:index, store:delete, jobs, job:status, job:cancel, help, commands",
2096
+ description: "Execute store/job management commands. Commands: stores, store:info, store:create, store:index, store:delete, jobs, job:status, job:cancel, stores:sync, uninstall, help, commands",
2050
2097
  inputSchema: {
2051
2098
  type: "object",
2052
2099
  properties: {
@@ -2155,4 +2202,4 @@ export {
2155
2202
  createMCPServer,
2156
2203
  runMCPServer
2157
2204
  };
2158
- //# sourceMappingURL=chunk-YMDXPECI.js.map
2205
+ //# sourceMappingURL=chunk-EZXJ3W5X.js.map