bluera-knowledge 0.9.30 → 0.9.32

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -3,7 +3,7 @@ import {
3
3
  createLogger,
4
4
  summarizePayload,
5
5
  truncateForLog
6
- } from "./chunk-NJUMU4X2.js";
6
+ } from "./chunk-6PBP5DVD.js";
7
7
 
8
8
  // src/crawl/intelligent-crawler.ts
9
9
  import { EventEmitter } from "events";
@@ -718,4 +718,4 @@ var IntelligentCrawler = class extends EventEmitter {
718
718
  export {
719
719
  IntelligentCrawler
720
720
  };
721
- //# sourceMappingURL=chunk-DNOIM7BO.js.map
721
+ //# sourceMappingURL=chunk-RST4XGRL.js.map
@@ -4,7 +4,7 @@ import {
4
4
  createServices,
5
5
  createStoreId,
6
6
  summarizePayload
7
- } from "./chunk-NJUMU4X2.js";
7
+ } from "./chunk-6PBP5DVD.js";
8
8
 
9
9
  // src/mcp/server.ts
10
10
  import { Server } from "@modelcontextprotocol/sdk/server/index.js";
@@ -1020,4 +1020,4 @@ export {
1020
1020
  createMCPServer,
1021
1021
  runMCPServer
1022
1022
  };
1023
- //# sourceMappingURL=chunk-SZNTYLYT.js.map
1023
+ //# sourceMappingURL=chunk-WT2DAEO7.js.map
package/dist/index.js CHANGED
@@ -1,10 +1,10 @@
1
1
  #!/usr/bin/env node
2
2
  import {
3
3
  runMCPServer
4
- } from "./chunk-SZNTYLYT.js";
4
+ } from "./chunk-WT2DAEO7.js";
5
5
  import {
6
6
  IntelligentCrawler
7
- } from "./chunk-DNOIM7BO.js";
7
+ } from "./chunk-RST4XGRL.js";
8
8
  import {
9
9
  ASTParser,
10
10
  ChunkingService,
@@ -16,7 +16,7 @@ import {
16
16
  err,
17
17
  extractRepoName,
18
18
  ok
19
- } from "./chunk-NJUMU4X2.js";
19
+ } from "./chunk-6PBP5DVD.js";
20
20
  import "./chunk-L2YVNC63.js";
21
21
 
22
22
  // src/index.ts
@@ -87,12 +87,14 @@ function createStoreCommand(getOptions) {
87
87
  store.command("create <name>").description("Create a new store pointing to a local path or URL").requiredOption("-t, --type <type>", "Store type: file (local dir), repo (git), web (crawled site)").requiredOption("-s, --source <path>", "Local path for file/repo stores, URL for web stores").option("-d, --description <desc>", "Optional description for the store").option("--tags <tags>", "Comma-separated tags for filtering").action(async (name, options) => {
88
88
  const globalOpts = getOptions();
89
89
  const services = await createServices(globalOpts.config, globalOpts.dataDir);
90
+ let exitCode = 0;
90
91
  try {
92
+ const isUrl = options.source.startsWith("http://") || options.source.startsWith("https://");
91
93
  const result = await services.store.create({
92
94
  name,
93
95
  type: options.type,
94
- path: options.type !== "web" ? options.source : void 0,
95
- url: options.type === "web" ? options.source : void 0,
96
+ path: options.type === "file" || options.type === "repo" && !isUrl ? options.source : void 0,
97
+ url: options.type === "web" || options.type === "repo" && isUrl ? options.source : void 0,
96
98
  description: options.description,
97
99
  tags: options.tags?.split(",").map((t) => t.trim())
98
100
  });
@@ -106,11 +108,14 @@ Created store: ${result.data.name} (${result.data.id})
106
108
  }
107
109
  } else {
108
110
  console.error(`Error: ${result.error.message}`);
109
- process.exit(1);
111
+ exitCode = 1;
110
112
  }
111
113
  } finally {
112
114
  await destroyServices(services);
113
115
  }
116
+ if (exitCode !== 0) {
117
+ process.exit(exitCode);
118
+ }
114
119
  });
115
120
  store.command("info <store>").description("Show store details: ID, type, path/URL, timestamps").action(async (storeIdOrName) => {
116
121
  const globalOpts = getOptions();
@@ -510,6 +515,7 @@ function createCrawlCommand(getOptions) {
510
515
  const webChunker = ChunkingService.forContentType("web");
511
516
  let pagesIndexed = 0;
512
517
  let chunksCreated = 0;
518
+ let exitCode = 0;
513
519
  crawler.on("progress", (progress) => {
514
520
  if (spinner) {
515
521
  if (progress.type === "strategy") {
@@ -595,11 +601,14 @@ function createCrawlCommand(getOptions) {
595
601
  } else {
596
602
  console.error(`Error: ${message}`);
597
603
  }
598
- process.exit(6);
604
+ exitCode = 6;
599
605
  } finally {
600
606
  await crawler.stop();
601
607
  await destroyServices(services);
602
608
  }
609
+ if (exitCode !== 0) {
610
+ process.exit(exitCode);
611
+ }
603
612
  });
604
613
  }
605
614