bluera-knowledge 0.9.32 → 0.9.36

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (198) hide show
  1. package/.claude/hooks/post-edit-check.sh +5 -3
  2. package/.claude/skills/atomic-commits/SKILL.md +3 -1
  3. package/.husky/pre-commit +3 -2
  4. package/.prettierrc +9 -0
  5. package/.versionrc.json +1 -1
  6. package/CHANGELOG.md +70 -0
  7. package/CLAUDE.md +6 -0
  8. package/README.md +25 -13
  9. package/bun.lock +277 -33
  10. package/dist/{chunk-L2YVNC63.js → chunk-6FHWC36B.js} +9 -1
  11. package/dist/chunk-6FHWC36B.js.map +1 -0
  12. package/dist/{chunk-RST4XGRL.js → chunk-DC7CGSGT.js} +288 -241
  13. package/dist/chunk-DC7CGSGT.js.map +1 -0
  14. package/dist/{chunk-6PBP5DVD.js → chunk-WFNPNAAP.js} +3212 -3054
  15. package/dist/chunk-WFNPNAAP.js.map +1 -0
  16. package/dist/{chunk-WT2DAEO7.js → chunk-Z2KKVH45.js} +548 -482
  17. package/dist/chunk-Z2KKVH45.js.map +1 -0
  18. package/dist/index.js +871 -758
  19. package/dist/index.js.map +1 -1
  20. package/dist/mcp/server.js +3 -3
  21. package/dist/watch.service-BJV3TI3F.js +7 -0
  22. package/dist/workers/background-worker-cli.js +97 -71
  23. package/dist/workers/background-worker-cli.js.map +1 -1
  24. package/eslint.config.js +43 -1
  25. package/package.json +18 -11
  26. package/plugin.json +8 -0
  27. package/python/requirements.txt +1 -1
  28. package/src/analysis/ast-parser.test.ts +12 -11
  29. package/src/analysis/ast-parser.ts +28 -22
  30. package/src/analysis/code-graph.test.ts +52 -62
  31. package/src/analysis/code-graph.ts +9 -13
  32. package/src/analysis/dependency-usage-analyzer.test.ts +91 -271
  33. package/src/analysis/dependency-usage-analyzer.ts +52 -24
  34. package/src/analysis/go-ast-parser.test.ts +22 -22
  35. package/src/analysis/go-ast-parser.ts +18 -25
  36. package/src/analysis/parser-factory.test.ts +9 -9
  37. package/src/analysis/parser-factory.ts +3 -3
  38. package/src/analysis/python-ast-parser.test.ts +27 -27
  39. package/src/analysis/python-ast-parser.ts +2 -2
  40. package/src/analysis/repo-url-resolver.test.ts +82 -82
  41. package/src/analysis/rust-ast-parser.test.ts +19 -19
  42. package/src/analysis/rust-ast-parser.ts +17 -27
  43. package/src/analysis/tree-sitter-parser.test.ts +3 -3
  44. package/src/analysis/tree-sitter-parser.ts +10 -16
  45. package/src/cli/commands/crawl.test.ts +40 -24
  46. package/src/cli/commands/crawl.ts +186 -166
  47. package/src/cli/commands/index-cmd.test.ts +90 -90
  48. package/src/cli/commands/index-cmd.ts +52 -36
  49. package/src/cli/commands/mcp.test.ts +6 -6
  50. package/src/cli/commands/mcp.ts +2 -2
  51. package/src/cli/commands/plugin-api.test.ts +16 -18
  52. package/src/cli/commands/plugin-api.ts +9 -6
  53. package/src/cli/commands/search.test.ts +16 -7
  54. package/src/cli/commands/search.ts +124 -87
  55. package/src/cli/commands/serve.test.ts +67 -25
  56. package/src/cli/commands/serve.ts +18 -3
  57. package/src/cli/commands/setup.test.ts +176 -101
  58. package/src/cli/commands/setup.ts +140 -117
  59. package/src/cli/commands/store.test.ts +82 -53
  60. package/src/cli/commands/store.ts +56 -37
  61. package/src/cli/program.ts +2 -2
  62. package/src/crawl/article-converter.test.ts +4 -1
  63. package/src/crawl/article-converter.ts +46 -31
  64. package/src/crawl/bridge.test.ts +240 -132
  65. package/src/crawl/bridge.ts +87 -30
  66. package/src/crawl/claude-client.test.ts +124 -56
  67. package/src/crawl/claude-client.ts +7 -15
  68. package/src/crawl/intelligent-crawler.test.ts +65 -22
  69. package/src/crawl/intelligent-crawler.ts +86 -53
  70. package/src/crawl/markdown-utils.ts +1 -4
  71. package/src/db/embeddings.ts +4 -6
  72. package/src/db/lance.test.ts +4 -4
  73. package/src/db/lance.ts +16 -12
  74. package/src/index.ts +26 -17
  75. package/src/logging/index.ts +1 -5
  76. package/src/logging/logger.ts +3 -5
  77. package/src/logging/payload.test.ts +1 -1
  78. package/src/logging/payload.ts +3 -5
  79. package/src/mcp/commands/index.ts +2 -2
  80. package/src/mcp/commands/job.commands.ts +12 -18
  81. package/src/mcp/commands/meta.commands.ts +13 -13
  82. package/src/mcp/commands/registry.ts +5 -8
  83. package/src/mcp/commands/store.commands.ts +19 -19
  84. package/src/mcp/handlers/execute.handler.test.ts +10 -10
  85. package/src/mcp/handlers/execute.handler.ts +4 -5
  86. package/src/mcp/handlers/index.ts +10 -14
  87. package/src/mcp/handlers/job.handler.test.ts +10 -10
  88. package/src/mcp/handlers/job.handler.ts +22 -25
  89. package/src/mcp/handlers/search.handler.test.ts +36 -65
  90. package/src/mcp/handlers/search.handler.ts +135 -104
  91. package/src/mcp/handlers/store.handler.test.ts +41 -52
  92. package/src/mcp/handlers/store.handler.ts +108 -88
  93. package/src/mcp/schemas/index.test.ts +73 -68
  94. package/src/mcp/schemas/index.ts +18 -12
  95. package/src/mcp/server.test.ts +1 -1
  96. package/src/mcp/server.ts +59 -46
  97. package/src/plugin/commands.test.ts +230 -95
  98. package/src/plugin/commands.ts +24 -25
  99. package/src/plugin/dependency-analyzer.test.ts +52 -52
  100. package/src/plugin/dependency-analyzer.ts +85 -22
  101. package/src/plugin/git-clone.test.ts +24 -13
  102. package/src/plugin/git-clone.ts +3 -7
  103. package/src/server/app.test.ts +109 -109
  104. package/src/server/app.ts +32 -23
  105. package/src/server/index.test.ts +64 -66
  106. package/src/services/chunking.service.test.ts +32 -32
  107. package/src/services/chunking.service.ts +16 -9
  108. package/src/services/code-graph.service.test.ts +30 -36
  109. package/src/services/code-graph.service.ts +24 -10
  110. package/src/services/code-unit.service.test.ts +55 -11
  111. package/src/services/code-unit.service.ts +85 -11
  112. package/src/services/config.service.test.ts +37 -18
  113. package/src/services/config.service.ts +30 -7
  114. package/src/services/index.service.test.ts +49 -18
  115. package/src/services/index.service.ts +98 -48
  116. package/src/services/index.ts +6 -9
  117. package/src/services/job.service.test.ts +22 -22
  118. package/src/services/job.service.ts +18 -18
  119. package/src/services/project-root.service.test.ts +1 -3
  120. package/src/services/search.service.test.ts +248 -120
  121. package/src/services/search.service.ts +286 -156
  122. package/src/services/services.test.ts +1 -1
  123. package/src/services/snippet.service.test.ts +14 -6
  124. package/src/services/snippet.service.ts +7 -5
  125. package/src/services/store.service.test.ts +68 -29
  126. package/src/services/store.service.ts +41 -12
  127. package/src/services/watch.service.test.ts +34 -14
  128. package/src/services/watch.service.ts +11 -1
  129. package/src/types/brands.test.ts +3 -1
  130. package/src/types/index.ts +2 -13
  131. package/src/types/search.ts +10 -8
  132. package/src/utils/type-guards.test.ts +20 -15
  133. package/src/utils/type-guards.ts +1 -1
  134. package/src/workers/background-worker-cli.ts +28 -30
  135. package/src/workers/background-worker.test.ts +54 -40
  136. package/src/workers/background-worker.ts +76 -60
  137. package/src/workers/pid-file.test.ts +167 -0
  138. package/src/workers/pid-file.ts +82 -0
  139. package/src/workers/spawn-worker.test.ts +22 -10
  140. package/src/workers/spawn-worker.ts +6 -6
  141. package/tests/analysis/ast-parser.test.ts +3 -3
  142. package/tests/analysis/code-graph.test.ts +5 -5
  143. package/tests/fixtures/code-snippets/api/error-handling.ts +4 -15
  144. package/tests/fixtures/code-snippets/api/rest-controller.ts +3 -9
  145. package/tests/fixtures/code-snippets/auth/jwt-auth.ts +5 -21
  146. package/tests/fixtures/code-snippets/auth/oauth-flow.ts +4 -4
  147. package/tests/fixtures/code-snippets/database/repository-pattern.ts +11 -3
  148. package/tests/fixtures/corpus/oss-repos/hono/src/adapter/aws-lambda/handler.ts +2 -2
  149. package/tests/fixtures/corpus/oss-repos/hono/src/adapter/cloudflare-pages/handler.ts +1 -1
  150. package/tests/fixtures/corpus/oss-repos/hono/src/adapter/cloudflare-workers/serve-static.ts +2 -2
  151. package/tests/fixtures/corpus/oss-repos/hono/src/client/client.ts +2 -2
  152. package/tests/fixtures/corpus/oss-repos/hono/src/client/types.ts +22 -20
  153. package/tests/fixtures/corpus/oss-repos/hono/src/context.ts +13 -10
  154. package/tests/fixtures/corpus/oss-repos/hono/src/helper/accepts/accepts.ts +10 -7
  155. package/tests/fixtures/corpus/oss-repos/hono/src/helper/adapter/index.ts +2 -2
  156. package/tests/fixtures/corpus/oss-repos/hono/src/helper/css/index.ts +1 -1
  157. package/tests/fixtures/corpus/oss-repos/hono/src/helper/factory/index.ts +16 -16
  158. package/tests/fixtures/corpus/oss-repos/hono/src/helper/ssg/ssg.ts +2 -2
  159. package/tests/fixtures/corpus/oss-repos/hono/src/hono-base.ts +3 -3
  160. package/tests/fixtures/corpus/oss-repos/hono/src/hono.ts +1 -1
  161. package/tests/fixtures/corpus/oss-repos/hono/src/jsx/dom/css.ts +2 -2
  162. package/tests/fixtures/corpus/oss-repos/hono/src/jsx/dom/intrinsic-element/components.ts +1 -1
  163. package/tests/fixtures/corpus/oss-repos/hono/src/jsx/dom/render.ts +7 -7
  164. package/tests/fixtures/corpus/oss-repos/hono/src/jsx/hooks/index.ts +3 -3
  165. package/tests/fixtures/corpus/oss-repos/hono/src/jsx/intrinsic-element/components.ts +1 -1
  166. package/tests/fixtures/corpus/oss-repos/hono/src/jsx/utils.ts +6 -6
  167. package/tests/fixtures/corpus/oss-repos/hono/src/middleware/jsx-renderer/index.ts +3 -3
  168. package/tests/fixtures/corpus/oss-repos/hono/src/middleware/serve-static/index.ts +1 -1
  169. package/tests/fixtures/corpus/oss-repos/hono/src/preset/quick.ts +1 -1
  170. package/tests/fixtures/corpus/oss-repos/hono/src/preset/tiny.ts +1 -1
  171. package/tests/fixtures/corpus/oss-repos/hono/src/router/pattern-router/router.ts +2 -2
  172. package/tests/fixtures/corpus/oss-repos/hono/src/router/reg-exp-router/node.ts +4 -4
  173. package/tests/fixtures/corpus/oss-repos/hono/src/router/reg-exp-router/router.ts +1 -1
  174. package/tests/fixtures/corpus/oss-repos/hono/src/router/trie-router/node.ts +1 -1
  175. package/tests/fixtures/corpus/oss-repos/hono/src/types.ts +166 -169
  176. package/tests/fixtures/corpus/oss-repos/hono/src/utils/body.ts +8 -8
  177. package/tests/fixtures/corpus/oss-repos/hono/src/utils/color.ts +3 -3
  178. package/tests/fixtures/corpus/oss-repos/hono/src/utils/cookie.ts +2 -2
  179. package/tests/fixtures/corpus/oss-repos/hono/src/utils/encode.ts +2 -2
  180. package/tests/fixtures/corpus/oss-repos/hono/src/utils/types.ts +30 -33
  181. package/tests/fixtures/corpus/oss-repos/hono/src/validator/validator.ts +2 -2
  182. package/tests/fixtures/test-server.ts +3 -2
  183. package/tests/helpers/performance-metrics.ts +8 -25
  184. package/tests/helpers/search-relevance.ts +14 -69
  185. package/tests/integration/cli-consistency.test.ts +6 -5
  186. package/tests/integration/python-bridge.test.ts +13 -3
  187. package/tests/mcp/server.test.ts +1 -1
  188. package/tests/services/code-unit.service.test.ts +48 -0
  189. package/tests/services/job.service.test.ts +124 -0
  190. package/tests/services/search.progressive-context.test.ts +2 -2
  191. package/.claude-plugin/plugin.json +0 -13
  192. package/dist/chunk-6PBP5DVD.js.map +0 -1
  193. package/dist/chunk-L2YVNC63.js.map +0 -1
  194. package/dist/chunk-RST4XGRL.js.map +0 -1
  195. package/dist/chunk-WT2DAEO7.js.map +0 -1
  196. package/dist/watch.service-YAIKKDCF.js +0 -7
  197. package/skills/atomic-commits/SKILL.md +0 -77
  198. /package/dist/{watch.service-YAIKKDCF.js.map → watch.service-BJV3TI3F.js.map} +0 -0
@@ -0,0 +1,7 @@
1
+ import {
2
+ WatchService
3
+ } from "./chunk-6FHWC36B.js";
4
+ export {
5
+ WatchService
6
+ };
7
+ //# sourceMappingURL=watch.service-BJV3TI3F.js.map
@@ -1,21 +1,21 @@
1
1
  #!/usr/bin/env node
2
2
  import {
3
3
  IntelligentCrawler
4
- } from "../chunk-RST4XGRL.js";
4
+ } from "../chunk-DC7CGSGT.js";
5
5
  import {
6
6
  JobService,
7
7
  createDocumentId,
8
8
  createServices,
9
9
  createStoreId
10
- } from "../chunk-6PBP5DVD.js";
11
- import "../chunk-L2YVNC63.js";
12
-
13
- // src/workers/background-worker-cli.ts
14
- import fs from "fs";
15
- import path from "path";
10
+ } from "../chunk-WFNPNAAP.js";
11
+ import "../chunk-6FHWC36B.js";
16
12
 
17
13
  // src/workers/background-worker.ts
18
14
  import { createHash } from "crypto";
15
+ function calculateIndexProgress(current, total, scale = 100) {
16
+ if (total === 0) return 0;
17
+ return current / total * scale;
18
+ }
19
19
  var BackgroundWorker = class {
20
20
  constructor(jobService, storeService, indexService, lanceStore, embeddingEngine) {
21
21
  this.jobService = jobService;
@@ -92,23 +92,26 @@ var BackgroundWorker = class {
92
92
  message: "Repository cloned, starting indexing...",
93
93
  progress: 30
94
94
  });
95
- const result = await this.indexService.indexStore(store, (event) => {
96
- const currentJob = this.jobService.getJob(job.id);
97
- if (currentJob?.status === "cancelled") {
98
- throw new Error("Job cancelled by user");
99
- }
100
- const indexProgress = event.current / event.total * 70;
101
- const totalProgress = 30 + indexProgress;
102
- this.jobService.updateJob(job.id, {
103
- message: `Indexed ${String(event.current)}/${String(event.total)} files`,
104
- progress: Math.min(99, totalProgress),
105
- // Cap at 99 until fully complete
106
- details: {
107
- filesProcessed: event.current,
108
- totalFiles: event.total
95
+ const result = await this.indexService.indexStore(
96
+ store,
97
+ (event) => {
98
+ const currentJob = this.jobService.getJob(job.id);
99
+ if (currentJob?.status === "cancelled") {
100
+ throw new Error("Job cancelled by user");
109
101
  }
110
- });
111
- });
102
+ const indexProgress = calculateIndexProgress(event.current, event.total, 70);
103
+ const totalProgress = 30 + indexProgress;
104
+ this.jobService.updateJob(job.id, {
105
+ message: `Indexed ${String(event.current)}/${String(event.total)} files`,
106
+ progress: Math.min(99, totalProgress),
107
+ // Cap at 99 until fully complete
108
+ details: {
109
+ filesProcessed: event.current,
110
+ totalFiles: event.total
111
+ }
112
+ });
113
+ }
114
+ );
112
115
  if (!result.success) {
113
116
  throw result.error;
114
117
  }
@@ -125,22 +128,25 @@ var BackgroundWorker = class {
125
128
  if (!store) {
126
129
  throw new Error(`Store ${storeId} not found`);
127
130
  }
128
- const result = await this.indexService.indexStore(store, (event) => {
129
- const currentJob = this.jobService.getJob(job.id);
130
- if (currentJob?.status === "cancelled") {
131
- throw new Error("Job cancelled by user");
132
- }
133
- const progress = event.current / event.total * 100;
134
- this.jobService.updateJob(job.id, {
135
- message: `Indexed ${String(event.current)}/${String(event.total)} files`,
136
- progress: Math.min(99, progress),
137
- // Cap at 99 until fully complete
138
- details: {
139
- filesProcessed: event.current,
140
- totalFiles: event.total
131
+ const result = await this.indexService.indexStore(
132
+ store,
133
+ (event) => {
134
+ const currentJob = this.jobService.getJob(job.id);
135
+ if (currentJob?.status === "cancelled") {
136
+ throw new Error("Job cancelled by user");
141
137
  }
142
- });
143
- });
138
+ const progress = calculateIndexProgress(event.current, event.total);
139
+ this.jobService.updateJob(job.id, {
140
+ message: `Indexed ${String(event.current)}/${String(event.total)} files`,
141
+ progress: Math.min(99, progress),
142
+ // Cap at 99 until fully complete
143
+ details: {
144
+ filesProcessed: event.current,
145
+ totalFiles: event.total
146
+ }
147
+ });
148
+ }
149
+ );
144
150
  if (!result.success) {
145
151
  throw result.error;
146
152
  }
@@ -149,15 +155,7 @@ var BackgroundWorker = class {
149
155
  * Execute a crawl job (web crawling + indexing)
150
156
  */
151
157
  async executeCrawlJob(job) {
152
- const {
153
- storeId,
154
- url,
155
- crawlInstruction,
156
- extractInstruction,
157
- maxPages,
158
- simple,
159
- useHeadless
160
- } = job.details;
158
+ const { storeId, url, crawlInstruction, extractInstruction, maxPages, simple, useHeadless } = job.details;
161
159
  if (storeId === void 0 || typeof storeId !== "string") {
162
160
  throw new Error("Store ID required for crawl job");
163
161
  }
@@ -165,7 +163,7 @@ var BackgroundWorker = class {
165
163
  throw new Error("URL required for crawl job");
166
164
  }
167
165
  const store = await this.storeService.get(createStoreId(storeId));
168
- if (!store || store.type !== "web") {
166
+ if (store?.type !== "web") {
169
167
  throw new Error(`Web store ${storeId} not found`);
170
168
  }
171
169
  const resolvedMaxPages = typeof maxPages === "number" ? maxPages : 50;
@@ -173,7 +171,6 @@ var BackgroundWorker = class {
173
171
  crawler.on("progress", (progress) => {
174
172
  const currentJob = this.jobService.getJob(job.id);
175
173
  if (currentJob?.status === "cancelled") {
176
- void crawler.stop();
177
174
  return;
178
175
  }
179
176
  const crawlProgress = progress.pagesVisited / resolvedMaxPages * 80;
@@ -237,6 +234,37 @@ var BackgroundWorker = class {
237
234
  }
238
235
  };
239
236
 
237
+ // src/workers/pid-file.ts
238
+ import fs from "fs";
239
+ import path from "path";
240
+ function writePidFile(pidFile, pid) {
241
+ try {
242
+ fs.writeFileSync(pidFile, pid.toString(), "utf-8");
243
+ } catch (error) {
244
+ const message = error instanceof Error ? error.message : String(error);
245
+ throw new Error(
246
+ `CRITICAL: Failed to write PID file ${pidFile}. Job cannot be cancelled without PID file. Original error: ${message}`
247
+ );
248
+ }
249
+ }
250
+ function deletePidFile(pidFile, _context) {
251
+ try {
252
+ fs.unlinkSync(pidFile);
253
+ return { success: true };
254
+ } catch (error) {
255
+ if (error instanceof Error && "code" in error && error.code === "ENOENT") {
256
+ return { success: true };
257
+ }
258
+ return {
259
+ success: false,
260
+ error: error instanceof Error ? error : new Error(String(error))
261
+ };
262
+ }
263
+ }
264
+ function buildPidFilePath(jobsDir, jobId) {
265
+ return path.join(jobsDir, `${jobId}.pid`);
266
+ }
267
+
240
268
  // src/workers/background-worker-cli.ts
241
269
  async function main() {
242
270
  const jobId = process.argv[2];
@@ -248,15 +276,16 @@ async function main() {
248
276
  }
249
277
  const jobService = new JobService(dataDir);
250
278
  const services = await createServices(void 0, dataDir);
251
- const pidFile = path.join(
279
+ const pidFile = buildPidFilePath(
252
280
  jobService["jobsDir"],
253
281
  // Access private field for PID path
254
- `${jobId}.pid`
282
+ jobId
255
283
  );
256
284
  try {
257
- fs.writeFileSync(pidFile, process.pid.toString(), "utf-8");
285
+ writePidFile(pidFile, process.pid);
258
286
  } catch (error) {
259
- console.error("Warning: Could not write PID file:", error);
287
+ console.error(error instanceof Error ? error.message : String(error));
288
+ process.exit(1);
260
289
  }
261
290
  process.on("SIGTERM", () => {
262
291
  console.log(`[${jobId}] Received SIGTERM, cancelling job...`);
@@ -264,12 +293,11 @@ async function main() {
264
293
  status: "cancelled",
265
294
  message: "Job cancelled by user"
266
295
  });
267
- try {
268
- if (fs.existsSync(pidFile)) {
269
- fs.unlinkSync(pidFile);
270
- }
271
- } catch (error) {
272
- console.error("Warning: Could not remove PID file:", error);
296
+ const deleteResult = deletePidFile(pidFile, "sigterm");
297
+ if (!deleteResult.success && deleteResult.error !== void 0) {
298
+ console.error(
299
+ `Warning: Could not remove PID file during SIGTERM: ${deleteResult.error.message}`
300
+ );
273
301
  }
274
302
  process.exit(0);
275
303
  });
@@ -282,23 +310,21 @@ async function main() {
282
310
  );
283
311
  try {
284
312
  await worker.executeJob(jobId);
285
- try {
286
- if (fs.existsSync(pidFile)) {
287
- fs.unlinkSync(pidFile);
288
- }
289
- } catch (error) {
290
- console.error("Warning: Could not remove PID file:", error);
313
+ const successCleanup = deletePidFile(pidFile, "success");
314
+ if (!successCleanup.success && successCleanup.error !== void 0) {
315
+ console.error(
316
+ `Warning: Could not remove PID file after success: ${successCleanup.error.message}`
317
+ );
291
318
  }
292
319
  console.log(`[${jobId}] Job completed successfully`);
293
320
  process.exit(0);
294
321
  } catch (error) {
295
322
  console.error(`[${jobId}] Job failed:`, error);
296
- try {
297
- if (fs.existsSync(pidFile)) {
298
- fs.unlinkSync(pidFile);
299
- }
300
- } catch (cleanupError) {
301
- console.error("Warning: Could not remove PID file:", cleanupError);
323
+ const failureCleanup = deletePidFile(pidFile, "failure");
324
+ if (!failureCleanup.success && failureCleanup.error !== void 0) {
325
+ console.error(
326
+ `Warning: Could not remove PID file after failure: ${failureCleanup.error.message}`
327
+ );
302
328
  }
303
329
  process.exit(1);
304
330
  }
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/workers/background-worker-cli.ts","../../src/workers/background-worker.ts"],"sourcesContent":["#!/usr/bin/env node\nimport fs from 'fs';\nimport path from 'path';\nimport { JobService } from '../services/job.service.js';\nimport { BackgroundWorker } from './background-worker.js';\nimport { createServices } from '../services/index.js';\n\n/**\n * Background worker CLI entry point\n *\n * Usage: background-worker-cli <job-id>\n *\n * This process runs detached from the parent and executes a single job.\n */\n\nasync function main(): Promise<void> {\n const jobId = process.argv[2];\n const dataDir = process.env['BLUERA_DATA_DIR'];\n\n if (jobId === undefined || jobId === '') {\n console.error('Error: Job ID required');\n console.error('Usage: background-worker-cli <job-id>');\n process.exit(1);\n }\n\n // Initialize services\n const jobService = new JobService(dataDir);\n const services = await createServices(undefined, dataDir);\n\n // Write PID file for job cancellation\n const pidFile = path.join(\n jobService['jobsDir'], // Access private field for PID path\n `${jobId}.pid`\n );\n\n try {\n fs.writeFileSync(pidFile, process.pid.toString(), 'utf-8');\n } catch (error) {\n console.error('Warning: Could not write PID file:', error);\n }\n\n // Handle SIGTERM for graceful shutdown\n process.on('SIGTERM', () => {\n console.log(`[${jobId}] Received SIGTERM, cancelling job...`);\n jobService.updateJob(jobId, {\n status: 'cancelled',\n message: 'Job cancelled by user'\n });\n\n // Clean up PID file\n try {\n if (fs.existsSync(pidFile)) {\n fs.unlinkSync(pidFile);\n }\n } catch (error) {\n console.error('Warning: Could not remove PID file:', error);\n }\n\n process.exit(0);\n });\n\n // Create worker and execute job\n const worker = new BackgroundWorker(\n jobService,\n services.store,\n services.index,\n services.lance,\n services.embeddings\n );\n\n try {\n await worker.executeJob(jobId);\n\n // Clean up PID file on success\n try {\n if (fs.existsSync(pidFile)) {\n fs.unlinkSync(pidFile);\n }\n } catch (error) {\n console.error('Warning: Could not remove PID file:', error);\n }\n\n console.log(`[${jobId}] Job completed successfully`);\n process.exit(0);\n } catch (error) {\n // Job service already updated with failure status in BackgroundWorker\n console.error(`[${jobId}] Job failed:`, error);\n\n // Clean up PID file on failure\n try {\n if (fs.existsSync(pidFile)) {\n fs.unlinkSync(pidFile);\n }\n } catch (cleanupError) {\n console.error('Warning: Could not remove PID file:', cleanupError);\n }\n\n process.exit(1);\n }\n}\n\nmain().catch((error: unknown) => {\n console.error('Fatal error in background worker:', error);\n process.exit(1);\n});\n","import { createHash } from 'node:crypto';\nimport { JobService } from '../services/job.service.js';\nimport { StoreService } from '../services/store.service.js';\nimport { IndexService } from '../services/index.service.js';\nimport type { LanceStore } from '../db/lance.js';\nimport type { EmbeddingEngine } from '../db/embeddings.js';\nimport { IntelligentCrawler, type CrawlProgress } from '../crawl/intelligent-crawler.js';\nimport type { Job } from '../types/job.js';\nimport type { Document } from '../types/document.js';\nimport { createStoreId, createDocumentId } from '../types/brands.js';\n\nexport class BackgroundWorker {\n constructor(\n private readonly jobService: JobService,\n private readonly storeService: StoreService,\n private readonly indexService: IndexService,\n private readonly lanceStore: LanceStore,\n private readonly embeddingEngine: EmbeddingEngine\n ) {}\n\n /**\n * Execute a job based on its type\n */\n async executeJob(jobId: string): Promise<void> {\n const job = this.jobService.getJob(jobId);\n\n if (!job) {\n throw new Error(`Job ${jobId} not found`);\n }\n\n try {\n // Update to running status\n this.jobService.updateJob(jobId, {\n status: 'running',\n message: `Starting ${job.type} operation...`,\n progress: 0,\n details: { startedAt: new Date().toISOString() }\n });\n\n // Execute based on job type\n switch (job.type) {\n case 'clone':\n await this.executeCloneJob(job);\n break;\n case 'index':\n await this.executeIndexJob(job);\n break;\n case 'crawl':\n await this.executeCrawlJob(job);\n break;\n default:\n throw new Error(`Unknown job type: ${String(job.type)}`);\n }\n\n // Mark as completed\n this.jobService.updateJob(jobId, {\n status: 'completed',\n progress: 100,\n message: `${job.type} operation completed successfully`,\n details: { completedAt: new Date().toISOString() }\n });\n } catch (error) {\n // Mark as failed\n const errorDetails: Record<string, unknown> = {\n completedAt: new Date().toISOString()\n };\n if (error instanceof Error && error.stack !== undefined) {\n errorDetails['error'] = error.stack;\n } else {\n errorDetails['error'] = String(error);\n }\n this.jobService.updateJob(jobId, {\n status: 'failed',\n message: error instanceof Error ? error.message : 'Unknown error',\n details: errorDetails\n });\n throw error;\n }\n }\n\n /**\n * Execute a clone job (git clone + initial indexing)\n */\n private async executeCloneJob(job: Job): Promise<void> {\n const { storeId } = job.details;\n\n if (storeId === undefined || typeof storeId !== 'string') {\n throw new Error('Store ID required for clone job');\n }\n\n // Get the store\n const store = await this.storeService.get(createStoreId(storeId));\n if (!store) {\n throw new Error(`Store ${storeId} not found`);\n }\n\n // Clone is already done by the time the job is created\n // (happens in StoreService.create), so we just need to index\n\n // Update progress - cloning considered done (30%)\n this.jobService.updateJob(job.id, {\n status: 'running',\n message: 'Repository cloned, starting indexing...',\n progress: 30\n });\n\n // Index the repository with progress updates\n const result = await this.indexService.indexStore(store, (event: { type: string; current: number; total: number; message: string }) => {\n // Check if job was cancelled\n const currentJob = this.jobService.getJob(job.id);\n if (currentJob?.status === 'cancelled') {\n throw new Error('Job cancelled by user');\n }\n\n // Indexing is 70% of total progress (30-100%)\n const indexProgress = (event.current / event.total) * 70;\n const totalProgress = 30 + indexProgress;\n\n this.jobService.updateJob(job.id, {\n message: `Indexed ${String(event.current)}/${String(event.total)} files`,\n progress: Math.min(99, totalProgress), // Cap at 99 until fully complete\n details: {\n filesProcessed: event.current,\n totalFiles: event.total\n }\n });\n });\n\n if (!result.success) {\n throw result.error;\n }\n }\n\n /**\n * Execute an index job (re-indexing existing store)\n */\n private async executeIndexJob(job: Job): Promise<void> {\n const { storeId } = job.details;\n\n if (storeId === undefined || typeof storeId !== 'string') {\n throw new Error('Store ID required for index job');\n }\n\n // Get the store\n const store = await this.storeService.getByIdOrName(createStoreId(storeId));\n if (!store) {\n throw new Error(`Store ${storeId} not found`);\n }\n\n // Index with progress updates\n const result = await this.indexService.indexStore(store, (event: { type: string; current: number; total: number; message: string }) => {\n // Check if job was cancelled\n const currentJob = this.jobService.getJob(job.id);\n if (currentJob?.status === 'cancelled') {\n throw new Error('Job cancelled by user');\n }\n\n const progress = (event.current / event.total) * 100;\n\n this.jobService.updateJob(job.id, {\n message: `Indexed ${String(event.current)}/${String(event.total)} files`,\n progress: Math.min(99, progress), // Cap at 99 until fully complete\n details: {\n filesProcessed: event.current,\n totalFiles: event.total\n }\n });\n });\n\n if (!result.success) {\n throw result.error;\n }\n }\n\n /**\n * Execute a crawl job (web crawling + indexing)\n */\n private async executeCrawlJob(job: Job): Promise<void> {\n const {\n storeId,\n url,\n crawlInstruction,\n extractInstruction,\n maxPages,\n simple,\n useHeadless,\n } = job.details;\n\n if (storeId === undefined || typeof storeId !== 'string') {\n throw new Error('Store ID required for crawl job');\n }\n if (url === undefined || typeof url !== 'string') {\n throw new Error('URL required for crawl job');\n }\n\n // Get the store\n const store = await this.storeService.get(createStoreId(storeId));\n if (!store || store.type !== 'web') {\n throw new Error(`Web store ${storeId} not found`);\n }\n\n const resolvedMaxPages = typeof maxPages === 'number' ? maxPages : 50;\n const crawler = new IntelligentCrawler();\n\n // Listen for progress events\n crawler.on('progress', (progress: CrawlProgress) => {\n // Check if job was cancelled\n const currentJob = this.jobService.getJob(job.id);\n if (currentJob?.status === 'cancelled') {\n void crawler.stop();\n return;\n }\n\n // Crawling is 80% of total progress (0-80%)\n const crawlProgress = (progress.pagesVisited / resolvedMaxPages) * 80;\n\n this.jobService.updateJob(job.id, {\n message: progress.message ?? `Crawling page ${String(progress.pagesVisited)}/${String(resolvedMaxPages)}`,\n progress: Math.min(80, crawlProgress),\n details: { pagesCrawled: progress.pagesVisited }\n });\n });\n\n try {\n await this.lanceStore.initialize(store.id);\n const docs: Document[] = [];\n\n // Build crawl options, only including defined values\n const crawlOptions: {\n maxPages: number;\n simple: boolean;\n useHeadless: boolean;\n crawlInstruction?: string;\n extractInstruction?: string;\n } = {\n maxPages: resolvedMaxPages,\n simple: simple ?? false,\n useHeadless: useHeadless ?? false,\n };\n if (crawlInstruction !== undefined) {\n crawlOptions.crawlInstruction = crawlInstruction;\n }\n if (extractInstruction !== undefined) {\n crawlOptions.extractInstruction = extractInstruction;\n }\n\n // Crawl pages using IntelligentCrawler\n for await (const result of crawler.crawl(url, crawlOptions)) {\n // Check cancellation between pages\n const currentJob = this.jobService.getJob(job.id);\n if (currentJob?.status === 'cancelled') {\n throw new Error('Job cancelled by user');\n }\n\n // Embed and index the content (use extracted if available, otherwise markdown)\n const contentToEmbed = result.extracted ?? result.markdown;\n const vector = await this.embeddingEngine.embed(contentToEmbed);\n\n docs.push({\n id: createDocumentId(`${store.id}-${createHash('md5').update(result.url).digest('hex')}`),\n content: contentToEmbed,\n vector,\n metadata: {\n type: 'web',\n storeId: store.id,\n url: result.url,\n title: result.title,\n extracted: result.extracted !== undefined,\n depth: result.depth,\n indexedAt: new Date(),\n },\n });\n }\n\n // Index all documents (remaining 20%)\n if (docs.length > 0) {\n this.jobService.updateJob(job.id, {\n message: 'Indexing crawled documents...',\n progress: 85\n });\n\n await this.lanceStore.addDocuments(store.id, docs);\n }\n\n this.jobService.updateJob(job.id, {\n message: `Crawled and indexed ${String(docs.length)} pages`,\n progress: 100,\n details: { pagesCrawled: docs.length }\n });\n } finally {\n await crawler.stop();\n }\n }\n}\n"],"mappings":";;;;;;;;;;;;;AACA,OAAO,QAAQ;AACf,OAAO,UAAU;;;ACFjB,SAAS,kBAAkB;AAWpB,IAAM,mBAAN,MAAuB;AAAA,EAC5B,YACmB,YACA,cACA,cACA,YACA,iBACjB;AALiB;AACA;AACA;AACA;AACA;AAAA,EAChB;AAAA;AAAA;AAAA;AAAA,EAKH,MAAM,WAAW,OAA8B;AAC7C,UAAM,MAAM,KAAK,WAAW,OAAO,KAAK;AAExC,QAAI,CAAC,KAAK;AACR,YAAM,IAAI,MAAM,OAAO,KAAK,YAAY;AAAA,IAC1C;AAEA,QAAI;AAEF,WAAK,WAAW,UAAU,OAAO;AAAA,QAC/B,QAAQ;AAAA,QACR,SAAS,YAAY,IAAI,IAAI;AAAA,QAC7B,UAAU;AAAA,QACV,SAAS,EAAE,YAAW,oBAAI,KAAK,GAAE,YAAY,EAAE;AAAA,MACjD,CAAC;AAGD,cAAQ,IAAI,MAAM;AAAA,QAChB,KAAK;AACH,gBAAM,KAAK,gBAAgB,GAAG;AAC9B;AAAA,QACF,KAAK;AACH,gBAAM,KAAK,gBAAgB,GAAG;AAC9B;AAAA,QACF,KAAK;AACH,gBAAM,KAAK,gBAAgB,GAAG;AAC9B;AAAA,QACF;AACE,gBAAM,IAAI,MAAM,qBAAqB,OAAO,IAAI,IAAI,CAAC,EAAE;AAAA,MAC3D;AAGA,WAAK,WAAW,UAAU,OAAO;AAAA,QAC/B,QAAQ;AAAA,QACR,UAAU;AAAA,QACV,SAAS,GAAG,IAAI,IAAI;AAAA,QACpB,SAAS,EAAE,cAAa,oBAAI,KAAK,GAAE,YAAY,EAAE;AAAA,MACnD,CAAC;AAAA,IACH,SAAS,OAAO;AAEd,YAAM,eAAwC;AAAA,QAC5C,cAAa,oBAAI,KAAK,GAAE,YAAY;AAAA,MACtC;AACA,UAAI,iBAAiB,SAAS,MAAM,UAAU,QAAW;AACvD,qBAAa,OAAO,IAAI,MAAM;AAAA,MAChC,OAAO;AACL,qBAAa,OAAO,IAAI,OAAO,KAAK;AAAA,MACtC;AACA,WAAK,WAAW,UAAU,OAAO;AAAA,QAC/B,QAAQ;AAAA,QACR,SAAS,iBAAiB,QAAQ,MAAM,UAAU;AAAA,QAClD,SAAS;AAAA,MACX,CAAC;AACD,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,gBAAgB,KAAyB;AACrD,UAAM,EAAE,QAAQ,IAAI,IAAI;AAExB,QAAI,YAAY,UAAa,OAAO,YAAY,UAAU;AACxD,YAAM,IAAI,MAAM,iCAAiC;AAAA,IACnD;AAGA,UAAM,QAAQ,MAAM,KAAK,aAAa,IAAI,cAAc,OAAO,CAAC;AAChE,QAAI,CAAC,OAAO;AACV,YAAM,IAAI,MAAM,SAAS,OAAO,YAAY;AAAA,IAC9C;AAMA,SAAK,WAAW,UAAU,IAAI,IAAI;AAAA,MAChC,QAAQ;AAAA,MACR,SAAS;AAAA,MACT,UAAU;AAAA,IACZ,CAAC;AAGD,UAAM,SAAS,MAAM,KAAK,aAAa,WAAW,OAAO,CAAC,UAA6E;AAErI,YAAM,aAAa,KAAK,WAAW,OAAO,IAAI,EAAE;AAChD,UAAI,YAAY,WAAW,aAAa;AACtC,cAAM,IAAI,MAAM,uBAAuB;AAAA,MACzC;AAGA,YAAM,gBAAiB,MAAM,UAAU,MAAM,QAAS;AACtD,YAAM,gBAAgB,KAAK;AAE3B,WAAK,WAAW,UAAU,IAAI,IAAI;AAAA,QAChC,SAAS,WAAW,OAAO,MAAM,OAAO,CAAC,IAAI,OAAO,MAAM,KAAK,CAAC;AAAA,QAChE,UAAU,KAAK,IAAI,IAAI,aAAa;AAAA;AAAA,QACpC,SAAS;AAAA,UACP,gBAAgB,MAAM;AAAA,UACtB,YAAY,MAAM;AAAA,QACpB;AAAA,MACF,CAAC;AAAA,IACH,CAAC;AAED,QAAI,CAAC,OAAO,SAAS;AACnB,YAAM,OAAO;AAAA,IACf;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,gBAAgB,KAAyB;AACrD,UAAM,EAAE,QAAQ,IAAI,IAAI;AAExB,QAAI,YAAY,UAAa,OAAO,YAAY,UAAU;AACxD,YAAM,IAAI,MAAM,iCAAiC;AAAA,IACnD;AAGA,UAAM,QAAQ,MAAM,KAAK,aAAa,cAAc,cAAc,OAAO,CAAC;AAC1E,QAAI,CAAC,OAAO;AACV,YAAM,IAAI,MAAM,SAAS,OAAO,YAAY;AAAA,IAC9C;AAGA,UAAM,SAAS,MAAM,KAAK,aAAa,WAAW,OAAO,CAAC,UAA6E;AAErI,YAAM,aAAa,KAAK,WAAW,OAAO,IAAI,EAAE;AAChD,UAAI,YAAY,WAAW,aAAa;AACtC,cAAM,IAAI,MAAM,uBAAuB;AAAA,MACzC;AAEA,YAAM,WAAY,MAAM,UAAU,MAAM,QAAS;AAEjD,WAAK,WAAW,UAAU,IAAI,IAAI;AAAA,QAChC,SAAS,WAAW,OAAO,MAAM,OAAO,CAAC,IAAI,OAAO,MAAM,KAAK,CAAC;AAAA,QAChE,UAAU,KAAK,IAAI,IAAI,QAAQ;AAAA;AAAA,QAC/B,SAAS;AAAA,UACP,gBAAgB,MAAM;AAAA,UACtB,YAAY,MAAM;AAAA,QACpB;AAAA,MACF,CAAC;AAAA,IACH,CAAC;AAED,QAAI,CAAC,OAAO,SAAS;AACnB,YAAM,OAAO;AAAA,IACf;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,gBAAgB,KAAyB;AACrD,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,IAAI,IAAI;AAER,QAAI,YAAY,UAAa,OAAO,YAAY,UAAU;AACxD,YAAM,IAAI,MAAM,iCAAiC;AAAA,IACnD;AACA,QAAI,QAAQ,UAAa,OAAO,QAAQ,UAAU;AAChD,YAAM,IAAI,MAAM,4BAA4B;AAAA,IAC9C;AAGA,UAAM,QAAQ,MAAM,KAAK,aAAa,IAAI,cAAc,OAAO,CAAC;AAChE,QAAI,CAAC,SAAS,MAAM,SAAS,OAAO;AAClC,YAAM,IAAI,MAAM,aAAa,OAAO,YAAY;AAAA,IAClD;AAEA,UAAM,mBAAmB,OAAO,aAAa,WAAW,WAAW;AACnE,UAAM,UAAU,IAAI,mBAAmB;AAGvC,YAAQ,GAAG,YAAY,CAAC,aAA4B;AAElD,YAAM,aAAa,KAAK,WAAW,OAAO,IAAI,EAAE;AAChD,UAAI,YAAY,WAAW,aAAa;AACtC,aAAK,QAAQ,KAAK;AAClB;AAAA,MACF;AAGA,YAAM,gBAAiB,SAAS,eAAe,mBAAoB;AAEnE,WAAK,WAAW,UAAU,IAAI,IAAI;AAAA,QAChC,SAAS,SAAS,WAAW,iBAAiB,OAAO,SAAS,YAAY,CAAC,IAAI,OAAO,gBAAgB,CAAC;AAAA,QACvG,UAAU,KAAK,IAAI,IAAI,aAAa;AAAA,QACpC,SAAS,EAAE,cAAc,SAAS,aAAa;AAAA,MACjD,CAAC;AAAA,IACH,CAAC;AAED,QAAI;AACF,YAAM,KAAK,WAAW,WAAW,MAAM,EAAE;AACzC,YAAM,OAAmB,CAAC;AAG1B,YAAM,eAMF;AAAA,QACF,UAAU;AAAA,QACV,QAAQ,UAAU;AAAA,QAClB,aAAa,eAAe;AAAA,MAC9B;AACA,UAAI,qBAAqB,QAAW;AAClC,qBAAa,mBAAmB;AAAA,MAClC;AACA,UAAI,uBAAuB,QAAW;AACpC,qBAAa,qBAAqB;AAAA,MACpC;AAGA,uBAAiB,UAAU,QAAQ,MAAM,KAAK,YAAY,GAAG;AAE3D,cAAM,aAAa,KAAK,WAAW,OAAO,IAAI,EAAE;AAChD,YAAI,YAAY,WAAW,aAAa;AACtC,gBAAM,IAAI,MAAM,uBAAuB;AAAA,QACzC;AAGA,cAAM,iBAAiB,OAAO,aAAa,OAAO;AAClD,cAAM,SAAS,MAAM,KAAK,gBAAgB,MAAM,cAAc;AAE9D,aAAK,KAAK;AAAA,UACR,IAAI,iBAAiB,GAAG,MAAM,EAAE,IAAI,WAAW,KAAK,EAAE,OAAO,OAAO,GAAG,EAAE,OAAO,KAAK,CAAC,EAAE;AAAA,UACxF,SAAS;AAAA,UACT;AAAA,UACA,UAAU;AAAA,YACR,MAAM;AAAA,YACN,SAAS,MAAM;AAAA,YACf,KAAK,OAAO;AAAA,YACZ,OAAO,OAAO;AAAA,YACd,WAAW,OAAO,cAAc;AAAA,YAChC,OAAO,OAAO;AAAA,YACd,WAAW,oBAAI,KAAK;AAAA,UACtB;AAAA,QACF,CAAC;AAAA,MACH;AAGA,UAAI,KAAK,SAAS,GAAG;AACnB,aAAK,WAAW,UAAU,IAAI,IAAI;AAAA,UAChC,SAAS;AAAA,UACT,UAAU;AAAA,QACZ,CAAC;AAED,cAAM,KAAK,WAAW,aAAa,MAAM,IAAI,IAAI;AAAA,MACnD;AAEA,WAAK,WAAW,UAAU,IAAI,IAAI;AAAA,QAChC,SAAS,uBAAuB,OAAO,KAAK,MAAM,CAAC;AAAA,QACnD,UAAU;AAAA,QACV,SAAS,EAAE,cAAc,KAAK,OAAO;AAAA,MACvC,CAAC;AAAA,IACH,UAAE;AACA,YAAM,QAAQ,KAAK;AAAA,IACrB;AAAA,EACF;AACF;;;ADtRA,eAAe,OAAsB;AACnC,QAAM,QAAQ,QAAQ,KAAK,CAAC;AAC5B,QAAM,UAAU,QAAQ,IAAI,iBAAiB;AAE7C,MAAI,UAAU,UAAa,UAAU,IAAI;AACvC,YAAQ,MAAM,wBAAwB;AACtC,YAAQ,MAAM,uCAAuC;AACrD,YAAQ,KAAK,CAAC;AAAA,EAChB;AAGA,QAAM,aAAa,IAAI,WAAW,OAAO;AACzC,QAAM,WAAW,MAAM,eAAe,QAAW,OAAO;AAGxD,QAAM,UAAU,KAAK;AAAA,IACnB,WAAW,SAAS;AAAA;AAAA,IACpB,GAAG,KAAK;AAAA,EACV;AAEA,MAAI;AACF,OAAG,cAAc,SAAS,QAAQ,IAAI,SAAS,GAAG,OAAO;AAAA,EAC3D,SAAS,OAAO;AACd,YAAQ,MAAM,sCAAsC,KAAK;AAAA,EAC3D;AAGA,UAAQ,GAAG,WAAW,MAAM;AAC1B,YAAQ,IAAI,IAAI,KAAK,uCAAuC;AAC5D,eAAW,UAAU,OAAO;AAAA,MAC1B,QAAQ;AAAA,MACR,SAAS;AAAA,IACX,CAAC;AAGD,QAAI;AACF,UAAI,GAAG,WAAW,OAAO,GAAG;AAC1B,WAAG,WAAW,OAAO;AAAA,MACvB;AAAA,IACF,SAAS,OAAO;AACd,cAAQ,MAAM,uCAAuC,KAAK;AAAA,IAC5D;AAEA,YAAQ,KAAK,CAAC;AAAA,EAChB,CAAC;AAGD,QAAM,SAAS,IAAI;AAAA,IACjB;AAAA,IACA,SAAS;AAAA,IACT,SAAS;AAAA,IACT,SAAS;AAAA,IACT,SAAS;AAAA,EACX;AAEA,MAAI;AACF,UAAM,OAAO,WAAW,KAAK;AAG7B,QAAI;AACF,UAAI,GAAG,WAAW,OAAO,GAAG;AAC1B,WAAG,WAAW,OAAO;AAAA,MACvB;AAAA,IACF,SAAS,OAAO;AACd,cAAQ,MAAM,uCAAuC,KAAK;AAAA,IAC5D;AAEA,YAAQ,IAAI,IAAI,KAAK,8BAA8B;AACnD,YAAQ,KAAK,CAAC;AAAA,EAChB,SAAS,OAAO;AAEd,YAAQ,MAAM,IAAI,KAAK,iBAAiB,KAAK;AAG7C,QAAI;AACF,UAAI,GAAG,WAAW,OAAO,GAAG;AAC1B,WAAG,WAAW,OAAO;AAAA,MACvB;AAAA,IACF,SAAS,cAAc;AACrB,cAAQ,MAAM,uCAAuC,YAAY;AAAA,IACnE;AAEA,YAAQ,KAAK,CAAC;AAAA,EAChB;AACF;AAEA,KAAK,EAAE,MAAM,CAAC,UAAmB;AAC/B,UAAQ,MAAM,qCAAqC,KAAK;AACxD,UAAQ,KAAK,CAAC;AAChB,CAAC;","names":[]}
1
+ {"version":3,"sources":["../../src/workers/background-worker.ts","../../src/workers/pid-file.ts","../../src/workers/background-worker-cli.ts"],"sourcesContent":["import { createHash } from 'node:crypto';\nimport { IntelligentCrawler, type CrawlProgress } from '../crawl/intelligent-crawler.js';\nimport { IndexService } from '../services/index.service.js';\nimport { JobService } from '../services/job.service.js';\nimport { StoreService } from '../services/store.service.js';\nimport { createStoreId, createDocumentId } from '../types/brands.js';\nimport type { EmbeddingEngine } from '../db/embeddings.js';\nimport type { LanceStore } from '../db/lance.js';\nimport type { Document } from '../types/document.js';\nimport type { Job } from '../types/job.js';\n\n/**\n * Calculate index progress as a percentage, handling division by zero.\n * @param current - Current number of items processed\n * @param total - Total number of items (may be 0)\n * @param scale - Scale factor for progress (default 100 for 0-100%)\n * @returns Progress value, or 0 if total is 0\n */\nexport function calculateIndexProgress(\n current: number,\n total: number,\n scale: number = 100\n): number {\n if (total === 0) return 0;\n return (current / total) * scale;\n}\n\nexport class BackgroundWorker {\n constructor(\n private readonly jobService: JobService,\n private readonly storeService: StoreService,\n private readonly indexService: IndexService,\n private readonly lanceStore: LanceStore,\n private readonly embeddingEngine: EmbeddingEngine\n ) {}\n\n /**\n * Execute a job based on its type\n */\n async executeJob(jobId: string): Promise<void> {\n const job = this.jobService.getJob(jobId);\n\n if (!job) {\n throw new Error(`Job ${jobId} not found`);\n }\n\n try {\n // Update to running status\n this.jobService.updateJob(jobId, {\n status: 'running',\n message: `Starting ${job.type} operation...`,\n progress: 0,\n details: { startedAt: new Date().toISOString() },\n });\n\n // Execute based on job type\n switch (job.type) {\n case 'clone':\n await this.executeCloneJob(job);\n break;\n case 'index':\n await this.executeIndexJob(job);\n break;\n case 'crawl':\n await this.executeCrawlJob(job);\n break;\n default:\n throw new Error(`Unknown job type: ${String(job.type)}`);\n }\n\n // Mark as completed\n this.jobService.updateJob(jobId, {\n status: 'completed',\n progress: 100,\n message: `${job.type} operation completed successfully`,\n details: { completedAt: new Date().toISOString() },\n });\n } catch (error) {\n // Mark as failed\n const errorDetails: Record<string, unknown> = {\n completedAt: new Date().toISOString(),\n };\n if (error instanceof Error && error.stack !== undefined) {\n errorDetails['error'] = error.stack;\n } else {\n errorDetails['error'] = String(error);\n }\n this.jobService.updateJob(jobId, {\n status: 'failed',\n message: error instanceof Error ? error.message : 'Unknown error',\n details: errorDetails,\n });\n throw error;\n }\n }\n\n /**\n * Execute a clone job (git clone + initial indexing)\n */\n private async executeCloneJob(job: Job): Promise<void> {\n const { storeId } = job.details;\n\n if (storeId === undefined || typeof storeId !== 'string') {\n throw new Error('Store ID required for clone job');\n }\n\n // Get the store\n const store = await this.storeService.get(createStoreId(storeId));\n if (!store) {\n throw new Error(`Store ${storeId} not found`);\n }\n\n // Clone is already done by the time the job is created\n // (happens in StoreService.create), so we just need to index\n\n // Update progress - cloning considered done (30%)\n this.jobService.updateJob(job.id, {\n status: 'running',\n message: 'Repository cloned, starting indexing...',\n progress: 30,\n });\n\n // Index the repository with progress updates\n const result = await this.indexService.indexStore(\n store,\n (event: { type: string; current: number; total: number; message: string }) => {\n // Check if job was cancelled\n const currentJob = this.jobService.getJob(job.id);\n if (currentJob?.status === 'cancelled') {\n throw new Error('Job cancelled by user');\n }\n\n // Indexing is 70% of total progress (30-100%)\n const indexProgress = calculateIndexProgress(event.current, event.total, 70);\n const totalProgress = 30 + indexProgress;\n\n this.jobService.updateJob(job.id, {\n message: `Indexed ${String(event.current)}/${String(event.total)} files`,\n progress: Math.min(99, totalProgress), // Cap at 99 until fully complete\n details: {\n filesProcessed: event.current,\n totalFiles: event.total,\n },\n });\n }\n );\n\n if (!result.success) {\n throw result.error;\n }\n }\n\n /**\n * Execute an index job (re-indexing existing store)\n */\n private async executeIndexJob(job: Job): Promise<void> {\n const { storeId } = job.details;\n\n if (storeId === undefined || typeof storeId !== 'string') {\n throw new Error('Store ID required for index job');\n }\n\n // Get the store\n const store = await this.storeService.getByIdOrName(createStoreId(storeId));\n if (!store) {\n throw new Error(`Store ${storeId} not found`);\n }\n\n // Index with progress updates\n const result = await this.indexService.indexStore(\n store,\n (event: { type: string; current: number; total: number; message: string }) => {\n // Check if job was cancelled\n const currentJob = this.jobService.getJob(job.id);\n if (currentJob?.status === 'cancelled') {\n throw new Error('Job cancelled by user');\n }\n\n const progress = calculateIndexProgress(event.current, event.total);\n\n this.jobService.updateJob(job.id, {\n message: `Indexed ${String(event.current)}/${String(event.total)} files`,\n progress: Math.min(99, progress), // Cap at 99 until fully complete\n details: {\n filesProcessed: event.current,\n totalFiles: event.total,\n },\n });\n }\n );\n\n if (!result.success) {\n throw result.error;\n }\n }\n\n /**\n * Execute a crawl job (web crawling + indexing)\n */\n private async executeCrawlJob(job: Job): Promise<void> {\n const { storeId, url, crawlInstruction, extractInstruction, maxPages, simple, useHeadless } =\n job.details;\n\n if (storeId === undefined || typeof storeId !== 'string') {\n throw new Error('Store ID required for crawl job');\n }\n if (url === undefined || typeof url !== 'string') {\n throw new Error('URL required for crawl job');\n }\n\n // Get the store\n const store = await this.storeService.get(createStoreId(storeId));\n if (store?.type !== 'web') {\n throw new Error(`Web store ${storeId} not found`);\n }\n\n const resolvedMaxPages = typeof maxPages === 'number' ? maxPages : 50;\n const crawler = new IntelligentCrawler();\n\n // Listen for progress events\n crawler.on('progress', (progress: CrawlProgress) => {\n // Check if job was cancelled - just return early, for-await loop will throw and finally will cleanup\n const currentJob = this.jobService.getJob(job.id);\n if (currentJob?.status === 'cancelled') {\n return;\n }\n\n // Crawling is 80% of total progress (0-80%)\n const crawlProgress = (progress.pagesVisited / resolvedMaxPages) * 80;\n\n this.jobService.updateJob(job.id, {\n message:\n progress.message ??\n `Crawling page ${String(progress.pagesVisited)}/${String(resolvedMaxPages)}`,\n progress: Math.min(80, crawlProgress),\n details: { pagesCrawled: progress.pagesVisited },\n });\n });\n\n try {\n await this.lanceStore.initialize(store.id);\n const docs: Document[] = [];\n\n // Build crawl options, only including defined values\n const crawlOptions: {\n maxPages: number;\n simple: boolean;\n useHeadless: boolean;\n crawlInstruction?: string;\n extractInstruction?: string;\n } = {\n maxPages: resolvedMaxPages,\n simple: simple ?? false,\n useHeadless: useHeadless ?? false,\n };\n if (crawlInstruction !== undefined) {\n crawlOptions.crawlInstruction = crawlInstruction;\n }\n if (extractInstruction !== undefined) {\n crawlOptions.extractInstruction = extractInstruction;\n }\n\n // Crawl pages using IntelligentCrawler\n for await (const result of crawler.crawl(url, crawlOptions)) {\n // Check cancellation between pages\n const currentJob = this.jobService.getJob(job.id);\n if (currentJob?.status === 'cancelled') {\n throw new Error('Job cancelled by user');\n }\n\n // Embed and index the content (use extracted if available, otherwise markdown)\n const contentToEmbed = result.extracted ?? result.markdown;\n const vector = await this.embeddingEngine.embed(contentToEmbed);\n\n docs.push({\n id: createDocumentId(`${store.id}-${createHash('md5').update(result.url).digest('hex')}`),\n content: contentToEmbed,\n vector,\n metadata: {\n type: 'web',\n storeId: store.id,\n url: result.url,\n title: result.title,\n extracted: result.extracted !== undefined,\n depth: result.depth,\n indexedAt: new Date(),\n },\n });\n }\n\n // Index all documents (remaining 20%)\n if (docs.length > 0) {\n this.jobService.updateJob(job.id, {\n message: 'Indexing crawled documents...',\n progress: 85,\n });\n\n await this.lanceStore.addDocuments(store.id, docs);\n }\n\n this.jobService.updateJob(job.id, {\n message: `Crawled and indexed ${String(docs.length)} pages`,\n progress: 100,\n details: { pagesCrawled: docs.length },\n });\n } finally {\n await crawler.stop();\n }\n }\n}\n","import fs from 'fs';\nimport path from 'path';\n\n/**\n * Result of a PID file delete operation.\n * Delete operations are best-effort and should not throw.\n */\nexport interface PidFileResult {\n success: boolean;\n error?: Error;\n}\n\n/**\n * Context for PID file deletion - indicates when the delete is happening.\n * Used for logging/debugging purposes.\n */\nexport type PidFileDeleteContext = 'sigterm' | 'success' | 'failure';\n\n/**\n * Write PID file - CRITICAL operation that must succeed.\n *\n * If the PID file cannot be written, the job cannot be cancelled through\n * the job management system. This is a critical failure and the job\n * should not proceed.\n *\n * @param pidFile - Absolute path to the PID file\n * @param pid - Process ID to write\n * @throws Error if PID file cannot be written\n */\nexport function writePidFile(pidFile: string, pid: number): void {\n try {\n fs.writeFileSync(pidFile, pid.toString(), 'utf-8');\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error);\n throw new Error(\n `CRITICAL: Failed to write PID file ${pidFile}. ` +\n `Job cannot be cancelled without PID file. ` +\n `Original error: ${message}`\n );\n }\n}\n\n/**\n * Delete PID file - best-effort cleanup during shutdown.\n *\n * This operation should NEVER throw. During process shutdown (SIGTERM,\n * job success, job failure), failing to delete a PID file should not\n * prevent the process from exiting cleanly.\n *\n * Stale PID files are cleaned up by JobService.cleanupOldJobs().\n *\n * @param pidFile - Absolute path to the PID file\n * @param _context - Context indicating when the delete is happening (for future logging)\n * @returns Result indicating success or failure with error details\n */\nexport function deletePidFile(pidFile: string, _context: PidFileDeleteContext): PidFileResult {\n try {\n fs.unlinkSync(pidFile);\n return { success: true };\n } catch (error) {\n // ENOENT = file doesn't exist - that's success (nothing to delete)\n if (error instanceof Error && 'code' in error && error.code === 'ENOENT') {\n return { success: true };\n }\n // Any other error = failure (permission denied, etc.)\n return {\n success: false,\n error: error instanceof Error ? error : new Error(String(error)),\n };\n }\n}\n\n/**\n * Build the path to a PID file for a given job.\n *\n * @param jobsDir - Directory where job files are stored\n * @param jobId - Job identifier\n * @returns Absolute path to the PID file\n */\nexport function buildPidFilePath(jobsDir: string, jobId: string): string {\n return path.join(jobsDir, `${jobId}.pid`);\n}\n","#!/usr/bin/env node\nimport { BackgroundWorker } from './background-worker.js';\nimport { writePidFile, deletePidFile, buildPidFilePath } from './pid-file.js';\nimport { createServices } from '../services/index.js';\nimport { JobService } from '../services/job.service.js';\n\n/**\n * Background worker CLI entry point\n *\n * Usage: background-worker-cli <job-id>\n *\n * This process runs detached from the parent and executes a single job.\n */\n\nasync function main(): Promise<void> {\n const jobId = process.argv[2];\n const dataDir = process.env['BLUERA_DATA_DIR'];\n\n if (jobId === undefined || jobId === '') {\n console.error('Error: Job ID required');\n console.error('Usage: background-worker-cli <job-id>');\n process.exit(1);\n }\n\n // Initialize services\n const jobService = new JobService(dataDir);\n const services = await createServices(undefined, dataDir);\n\n // Write PID file for job cancellation - CRITICAL: must succeed or job cannot be cancelled\n const pidFile = buildPidFilePath(\n jobService['jobsDir'], // Access private field for PID path\n jobId\n );\n\n try {\n writePidFile(pidFile, process.pid);\n } catch (error) {\n // CRITICAL: Cannot proceed without PID file - job would be uncancellable\n console.error(error instanceof Error ? error.message : String(error));\n process.exit(1);\n }\n\n // Handle SIGTERM for graceful shutdown\n process.on('SIGTERM', () => {\n console.log(`[${jobId}] Received SIGTERM, cancelling job...`);\n jobService.updateJob(jobId, {\n status: 'cancelled',\n message: 'Job cancelled by user',\n });\n\n // Clean up PID file (best-effort - don't block shutdown)\n const deleteResult = deletePidFile(pidFile, 'sigterm');\n if (!deleteResult.success && deleteResult.error !== undefined) {\n console.error(\n `Warning: Could not remove PID file during SIGTERM: ${deleteResult.error.message}`\n );\n }\n\n process.exit(0);\n });\n\n // Create worker and execute job\n const worker = new BackgroundWorker(\n jobService,\n services.store,\n services.index,\n services.lance,\n services.embeddings\n );\n\n try {\n await worker.executeJob(jobId);\n\n // Clean up PID file on success (best-effort - don't change exit code)\n const successCleanup = deletePidFile(pidFile, 'success');\n if (!successCleanup.success && successCleanup.error !== undefined) {\n console.error(\n `Warning: Could not remove PID file after success: ${successCleanup.error.message}`\n );\n }\n\n console.log(`[${jobId}] Job completed successfully`);\n process.exit(0);\n } catch (error) {\n // Job service already updated with failure status in BackgroundWorker\n console.error(`[${jobId}] Job failed:`, error);\n\n // Clean up PID file on failure (best-effort - exit code reflects job failure)\n const failureCleanup = deletePidFile(pidFile, 'failure');\n if (!failureCleanup.success && failureCleanup.error !== undefined) {\n console.error(\n `Warning: Could not remove PID file after failure: ${failureCleanup.error.message}`\n );\n }\n\n process.exit(1);\n }\n}\n\nmain().catch((error: unknown) => {\n console.error('Fatal error in background worker:', error);\n process.exit(1);\n});\n"],"mappings":";;;;;;;;;;;;;AAAA,SAAS,kBAAkB;AAkBpB,SAAS,uBACd,SACA,OACA,QAAgB,KACR;AACR,MAAI,UAAU,EAAG,QAAO;AACxB,SAAQ,UAAU,QAAS;AAC7B;AAEO,IAAM,mBAAN,MAAuB;AAAA,EAC5B,YACmB,YACA,cACA,cACA,YACA,iBACjB;AALiB;AACA;AACA;AACA;AACA;AAAA,EAChB;AAAA;AAAA;AAAA;AAAA,EAKH,MAAM,WAAW,OAA8B;AAC7C,UAAM,MAAM,KAAK,WAAW,OAAO,KAAK;AAExC,QAAI,CAAC,KAAK;AACR,YAAM,IAAI,MAAM,OAAO,KAAK,YAAY;AAAA,IAC1C;AAEA,QAAI;AAEF,WAAK,WAAW,UAAU,OAAO;AAAA,QAC/B,QAAQ;AAAA,QACR,SAAS,YAAY,IAAI,IAAI;AAAA,QAC7B,UAAU;AAAA,QACV,SAAS,EAAE,YAAW,oBAAI,KAAK,GAAE,YAAY,EAAE;AAAA,MACjD,CAAC;AAGD,cAAQ,IAAI,MAAM;AAAA,QAChB,KAAK;AACH,gBAAM,KAAK,gBAAgB,GAAG;AAC9B;AAAA,QACF,KAAK;AACH,gBAAM,KAAK,gBAAgB,GAAG;AAC9B;AAAA,QACF,KAAK;AACH,gBAAM,KAAK,gBAAgB,GAAG;AAC9B;AAAA,QACF;AACE,gBAAM,IAAI,MAAM,qBAAqB,OAAO,IAAI,IAAI,CAAC,EAAE;AAAA,MAC3D;AAGA,WAAK,WAAW,UAAU,OAAO;AAAA,QAC/B,QAAQ;AAAA,QACR,UAAU;AAAA,QACV,SAAS,GAAG,IAAI,IAAI;AAAA,QACpB,SAAS,EAAE,cAAa,oBAAI,KAAK,GAAE,YAAY,EAAE;AAAA,MACnD,CAAC;AAAA,IACH,SAAS,OAAO;AAEd,YAAM,eAAwC;AAAA,QAC5C,cAAa,oBAAI,KAAK,GAAE,YAAY;AAAA,MACtC;AACA,UAAI,iBAAiB,SAAS,MAAM,UAAU,QAAW;AACvD,qBAAa,OAAO,IAAI,MAAM;AAAA,MAChC,OAAO;AACL,qBAAa,OAAO,IAAI,OAAO,KAAK;AAAA,MACtC;AACA,WAAK,WAAW,UAAU,OAAO;AAAA,QAC/B,QAAQ;AAAA,QACR,SAAS,iBAAiB,QAAQ,MAAM,UAAU;AAAA,QAClD,SAAS;AAAA,MACX,CAAC;AACD,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,gBAAgB,KAAyB;AACrD,UAAM,EAAE,QAAQ,IAAI,IAAI;AAExB,QAAI,YAAY,UAAa,OAAO,YAAY,UAAU;AACxD,YAAM,IAAI,MAAM,iCAAiC;AAAA,IACnD;AAGA,UAAM,QAAQ,MAAM,KAAK,aAAa,IAAI,cAAc,OAAO,CAAC;AAChE,QAAI,CAAC,OAAO;AACV,YAAM,IAAI,MAAM,SAAS,OAAO,YAAY;AAAA,IAC9C;AAMA,SAAK,WAAW,UAAU,IAAI,IAAI;AAAA,MAChC,QAAQ;AAAA,MACR,SAAS;AAAA,MACT,UAAU;AAAA,IACZ,CAAC;AAGD,UAAM,SAAS,MAAM,KAAK,aAAa;AAAA,MACrC;AAAA,MACA,CAAC,UAA6E;AAE5E,cAAM,aAAa,KAAK,WAAW,OAAO,IAAI,EAAE;AAChD,YAAI,YAAY,WAAW,aAAa;AACtC,gBAAM,IAAI,MAAM,uBAAuB;AAAA,QACzC;AAGA,cAAM,gBAAgB,uBAAuB,MAAM,SAAS,MAAM,OAAO,EAAE;AAC3E,cAAM,gBAAgB,KAAK;AAE3B,aAAK,WAAW,UAAU,IAAI,IAAI;AAAA,UAChC,SAAS,WAAW,OAAO,MAAM,OAAO,CAAC,IAAI,OAAO,MAAM,KAAK,CAAC;AAAA,UAChE,UAAU,KAAK,IAAI,IAAI,aAAa;AAAA;AAAA,UACpC,SAAS;AAAA,YACP,gBAAgB,MAAM;AAAA,YACtB,YAAY,MAAM;AAAA,UACpB;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF;AAEA,QAAI,CAAC,OAAO,SAAS;AACnB,YAAM,OAAO;AAAA,IACf;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,gBAAgB,KAAyB;AACrD,UAAM,EAAE,QAAQ,IAAI,IAAI;AAExB,QAAI,YAAY,UAAa,OAAO,YAAY,UAAU;AACxD,YAAM,IAAI,MAAM,iCAAiC;AAAA,IACnD;AAGA,UAAM,QAAQ,MAAM,KAAK,aAAa,cAAc,cAAc,OAAO,CAAC;AAC1E,QAAI,CAAC,OAAO;AACV,YAAM,IAAI,MAAM,SAAS,OAAO,YAAY;AAAA,IAC9C;AAGA,UAAM,SAAS,MAAM,KAAK,aAAa;AAAA,MACrC;AAAA,MACA,CAAC,UAA6E;AAE5E,cAAM,aAAa,KAAK,WAAW,OAAO,IAAI,EAAE;AAChD,YAAI,YAAY,WAAW,aAAa;AACtC,gBAAM,IAAI,MAAM,uBAAuB;AAAA,QACzC;AAEA,cAAM,WAAW,uBAAuB,MAAM,SAAS,MAAM,KAAK;AAElE,aAAK,WAAW,UAAU,IAAI,IAAI;AAAA,UAChC,SAAS,WAAW,OAAO,MAAM,OAAO,CAAC,IAAI,OAAO,MAAM,KAAK,CAAC;AAAA,UAChE,UAAU,KAAK,IAAI,IAAI,QAAQ;AAAA;AAAA,UAC/B,SAAS;AAAA,YACP,gBAAgB,MAAM;AAAA,YACtB,YAAY,MAAM;AAAA,UACpB;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF;AAEA,QAAI,CAAC,OAAO,SAAS;AACnB,YAAM,OAAO;AAAA,IACf;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,gBAAgB,KAAyB;AACrD,UAAM,EAAE,SAAS,KAAK,kBAAkB,oBAAoB,UAAU,QAAQ,YAAY,IACxF,IAAI;AAEN,QAAI,YAAY,UAAa,OAAO,YAAY,UAAU;AACxD,YAAM,IAAI,MAAM,iCAAiC;AAAA,IACnD;AACA,QAAI,QAAQ,UAAa,OAAO,QAAQ,UAAU;AAChD,YAAM,IAAI,MAAM,4BAA4B;AAAA,IAC9C;AAGA,UAAM,QAAQ,MAAM,KAAK,aAAa,IAAI,cAAc,OAAO,CAAC;AAChE,QAAI,OAAO,SAAS,OAAO;AACzB,YAAM,IAAI,MAAM,aAAa,OAAO,YAAY;AAAA,IAClD;AAEA,UAAM,mBAAmB,OAAO,aAAa,WAAW,WAAW;AACnE,UAAM,UAAU,IAAI,mBAAmB;AAGvC,YAAQ,GAAG,YAAY,CAAC,aAA4B;AAElD,YAAM,aAAa,KAAK,WAAW,OAAO,IAAI,EAAE;AAChD,UAAI,YAAY,WAAW,aAAa;AACtC;AAAA,MACF;AAGA,YAAM,gBAAiB,SAAS,eAAe,mBAAoB;AAEnE,WAAK,WAAW,UAAU,IAAI,IAAI;AAAA,QAChC,SACE,SAAS,WACT,iBAAiB,OAAO,SAAS,YAAY,CAAC,IAAI,OAAO,gBAAgB,CAAC;AAAA,QAC5E,UAAU,KAAK,IAAI,IAAI,aAAa;AAAA,QACpC,SAAS,EAAE,cAAc,SAAS,aAAa;AAAA,MACjD,CAAC;AAAA,IACH,CAAC;AAED,QAAI;AACF,YAAM,KAAK,WAAW,WAAW,MAAM,EAAE;AACzC,YAAM,OAAmB,CAAC;AAG1B,YAAM,eAMF;AAAA,QACF,UAAU;AAAA,QACV,QAAQ,UAAU;AAAA,QAClB,aAAa,eAAe;AAAA,MAC9B;AACA,UAAI,qBAAqB,QAAW;AAClC,qBAAa,mBAAmB;AAAA,MAClC;AACA,UAAI,uBAAuB,QAAW;AACpC,qBAAa,qBAAqB;AAAA,MACpC;AAGA,uBAAiB,UAAU,QAAQ,MAAM,KAAK,YAAY,GAAG;AAE3D,cAAM,aAAa,KAAK,WAAW,OAAO,IAAI,EAAE;AAChD,YAAI,YAAY,WAAW,aAAa;AACtC,gBAAM,IAAI,MAAM,uBAAuB;AAAA,QACzC;AAGA,cAAM,iBAAiB,OAAO,aAAa,OAAO;AAClD,cAAM,SAAS,MAAM,KAAK,gBAAgB,MAAM,cAAc;AAE9D,aAAK,KAAK;AAAA,UACR,IAAI,iBAAiB,GAAG,MAAM,EAAE,IAAI,WAAW,KAAK,EAAE,OAAO,OAAO,GAAG,EAAE,OAAO,KAAK,CAAC,EAAE;AAAA,UACxF,SAAS;AAAA,UACT;AAAA,UACA,UAAU;AAAA,YACR,MAAM;AAAA,YACN,SAAS,MAAM;AAAA,YACf,KAAK,OAAO;AAAA,YACZ,OAAO,OAAO;AAAA,YACd,WAAW,OAAO,cAAc;AAAA,YAChC,OAAO,OAAO;AAAA,YACd,WAAW,oBAAI,KAAK;AAAA,UACtB;AAAA,QACF,CAAC;AAAA,MACH;AAGA,UAAI,KAAK,SAAS,GAAG;AACnB,aAAK,WAAW,UAAU,IAAI,IAAI;AAAA,UAChC,SAAS;AAAA,UACT,UAAU;AAAA,QACZ,CAAC;AAED,cAAM,KAAK,WAAW,aAAa,MAAM,IAAI,IAAI;AAAA,MACnD;AAEA,WAAK,WAAW,UAAU,IAAI,IAAI;AAAA,QAChC,SAAS,uBAAuB,OAAO,KAAK,MAAM,CAAC;AAAA,QACnD,UAAU;AAAA,QACV,SAAS,EAAE,cAAc,KAAK,OAAO;AAAA,MACvC,CAAC;AAAA,IACH,UAAE;AACA,YAAM,QAAQ,KAAK;AAAA,IACrB;AAAA,EACF;AACF;;;ACrTA,OAAO,QAAQ;AACf,OAAO,UAAU;AA4BV,SAAS,aAAa,SAAiB,KAAmB;AAC/D,MAAI;AACF,OAAG,cAAc,SAAS,IAAI,SAAS,GAAG,OAAO;AAAA,EACnD,SAAS,OAAO;AACd,UAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACrE,UAAM,IAAI;AAAA,MACR,sCAAsC,OAAO,+DAExB,OAAO;AAAA,IAC9B;AAAA,EACF;AACF;AAeO,SAAS,cAAc,SAAiB,UAA+C;AAC5F,MAAI;AACF,OAAG,WAAW,OAAO;AACrB,WAAO,EAAE,SAAS,KAAK;AAAA,EACzB,SAAS,OAAO;AAEd,QAAI,iBAAiB,SAAS,UAAU,SAAS,MAAM,SAAS,UAAU;AACxE,aAAO,EAAE,SAAS,KAAK;AAAA,IACzB;AAEA,WAAO;AAAA,MACL,SAAS;AAAA,MACT,OAAO,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AAAA,IACjE;AAAA,EACF;AACF;AASO,SAAS,iBAAiB,SAAiB,OAAuB;AACvE,SAAO,KAAK,KAAK,SAAS,GAAG,KAAK,MAAM;AAC1C;;;ACnEA,eAAe,OAAsB;AACnC,QAAM,QAAQ,QAAQ,KAAK,CAAC;AAC5B,QAAM,UAAU,QAAQ,IAAI,iBAAiB;AAE7C,MAAI,UAAU,UAAa,UAAU,IAAI;AACvC,YAAQ,MAAM,wBAAwB;AACtC,YAAQ,MAAM,uCAAuC;AACrD,YAAQ,KAAK,CAAC;AAAA,EAChB;AAGA,QAAM,aAAa,IAAI,WAAW,OAAO;AACzC,QAAM,WAAW,MAAM,eAAe,QAAW,OAAO;AAGxD,QAAM,UAAU;AAAA,IACd,WAAW,SAAS;AAAA;AAAA,IACpB;AAAA,EACF;AAEA,MAAI;AACF,iBAAa,SAAS,QAAQ,GAAG;AAAA,EACnC,SAAS,OAAO;AAEd,YAAQ,MAAM,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AACpE,YAAQ,KAAK,CAAC;AAAA,EAChB;AAGA,UAAQ,GAAG,WAAW,MAAM;AAC1B,YAAQ,IAAI,IAAI,KAAK,uCAAuC;AAC5D,eAAW,UAAU,OAAO;AAAA,MAC1B,QAAQ;AAAA,MACR,SAAS;AAAA,IACX,CAAC;AAGD,UAAM,eAAe,cAAc,SAAS,SAAS;AACrD,QAAI,CAAC,aAAa,WAAW,aAAa,UAAU,QAAW;AAC7D,cAAQ;AAAA,QACN,sDAAsD,aAAa,MAAM,OAAO;AAAA,MAClF;AAAA,IACF;AAEA,YAAQ,KAAK,CAAC;AAAA,EAChB,CAAC;AAGD,QAAM,SAAS,IAAI;AAAA,IACjB;AAAA,IACA,SAAS;AAAA,IACT,SAAS;AAAA,IACT,SAAS;AAAA,IACT,SAAS;AAAA,EACX;AAEA,MAAI;AACF,UAAM,OAAO,WAAW,KAAK;AAG7B,UAAM,iBAAiB,cAAc,SAAS,SAAS;AACvD,QAAI,CAAC,eAAe,WAAW,eAAe,UAAU,QAAW;AACjE,cAAQ;AAAA,QACN,qDAAqD,eAAe,MAAM,OAAO;AAAA,MACnF;AAAA,IACF;AAEA,YAAQ,IAAI,IAAI,KAAK,8BAA8B;AACnD,YAAQ,KAAK,CAAC;AAAA,EAChB,SAAS,OAAO;AAEd,YAAQ,MAAM,IAAI,KAAK,iBAAiB,KAAK;AAG7C,UAAM,iBAAiB,cAAc,SAAS,SAAS;AACvD,QAAI,CAAC,eAAe,WAAW,eAAe,UAAU,QAAW;AACjE,cAAQ;AAAA,QACN,qDAAqD,eAAe,MAAM,OAAO;AAAA,MACnF;AAAA,IACF;AAEA,YAAQ,KAAK,CAAC;AAAA,EAChB;AACF;AAEA,KAAK,EAAE,MAAM,CAAC,UAAmB;AAC/B,UAAQ,MAAM,qCAAqC,KAAK;AACxD,UAAQ,KAAK,CAAC;AAChB,CAAC;","names":[]}
package/eslint.config.js CHANGED
@@ -1,6 +1,8 @@
1
1
  import eslint from '@eslint/js';
2
2
  import tseslint from 'typescript-eslint';
3
3
  import eslintComments from 'eslint-plugin-eslint-comments';
4
+ import importPlugin from 'eslint-plugin-import';
5
+ import eslintConfigPrettier from 'eslint-config-prettier';
4
6
  import requireSkipComment from './eslint-rules/require-skip-comment.js';
5
7
 
6
8
  export default tseslint.config(
@@ -13,10 +15,19 @@ export default tseslint.config(
13
15
  tsconfigRootDir: import.meta.dirname,
14
16
  },
15
17
  },
18
+ settings: {
19
+ 'import/resolver': {
20
+ typescript: {
21
+ alwaysTryTypes: true,
22
+ project: './tsconfig.json',
23
+ },
24
+ },
25
+ },
16
26
  },
17
27
  {
18
28
  plugins: {
19
29
  'eslint-comments': eslintComments,
30
+ import: importPlugin,
20
31
  },
21
32
  rules: {
22
33
  // Type safety
@@ -30,6 +41,16 @@ export default tseslint.config(
30
41
  '@typescript-eslint/prefer-readonly': 'error',
31
42
  '@typescript-eslint/strict-boolean-expressions': 'error',
32
43
  '@typescript-eslint/consistent-type-assertions': ['error', { assertionStyle: 'never' }],
44
+ '@typescript-eslint/no-non-null-assertion': 'error',
45
+ '@typescript-eslint/no-floating-promises': 'error',
46
+ '@typescript-eslint/no-misused-promises': 'error',
47
+ '@typescript-eslint/await-thenable': 'error',
48
+ '@typescript-eslint/no-unnecessary-type-assertion': 'error',
49
+ '@typescript-eslint/prefer-nullish-coalescing': 'error',
50
+ '@typescript-eslint/prefer-optional-chain': 'error',
51
+ '@typescript-eslint/no-unsafe-argument': 'error',
52
+ '@typescript-eslint/require-await': 'error',
53
+ '@typescript-eslint/return-await': ['error', 'in-try-catch'],
33
54
 
34
55
  // Dead code detection
35
56
  '@typescript-eslint/no-unused-vars': ['error', { argsIgnorePattern: '^_' }],
@@ -37,8 +58,28 @@ export default tseslint.config(
37
58
  'no-unreachable-loop': 'error',
38
59
  'no-constant-condition': ['error', { checkLoops: false }],
39
60
 
61
+ // General best practices
62
+ 'no-console': 'off', // CLI tool needs console
63
+ eqeqeq: ['error', 'always'],
64
+ 'no-var': 'error',
65
+ 'prefer-const': 'error',
66
+ 'prefer-template': 'error',
67
+ 'no-throw-literal': 'error',
68
+
40
69
  // Require explanations for ESLint disable comments (warn for now to allow gradual fix)
41
70
  'eslint-comments/require-description': 'warn',
71
+
72
+ // Import organization
73
+ 'import/order': [
74
+ 'error',
75
+ {
76
+ groups: ['builtin', 'external', 'internal', ['parent', 'sibling'], 'index', 'type'],
77
+ 'newlines-between': 'never',
78
+ alphabetize: { order: 'asc', caseInsensitive: true },
79
+ },
80
+ ],
81
+ 'import/first': 'error',
82
+ 'import/no-duplicates': 'error',
42
83
  },
43
84
  },
44
85
  {
@@ -57,5 +98,6 @@ export default tseslint.config(
57
98
  rules: {
58
99
  'custom/require-skip-comment': 'error',
59
100
  },
60
- }
101
+ },
102
+ eslintConfigPrettier,
61
103
  );
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "bluera-knowledge",
3
- "version": "0.9.32",
3
+ "version": "0.9.36",
4
4
  "description": "CLI tool for managing knowledge stores with semantic search",
5
5
  "type": "module",
6
6
  "bin": {
@@ -16,6 +16,9 @@
16
16
  "test": "vitest",
17
17
  "test:run": "vitest run",
18
18
  "test:coverage": "vitest run --coverage",
19
+ "format": "prettier --write \"src/**/*.ts\" \"tests/**/*.ts\"",
20
+ "format:check": "prettier --check \"src/**/*.ts\" \"tests/**/*.ts\"",
21
+ "format:check:quiet": "prettier --check \"src/**/*.ts\" \"tests/**/*.ts\" --log-level warn && echo '✓ Format check passed'",
19
22
  "lint": "eslint src/",
20
23
  "lint:deadcode": "knip --no-exit-code",
21
24
  "lint:deadcode:strict": "knip",
@@ -58,19 +61,23 @@
58
61
  "@eslint/js": "^9.39.2",
59
62
  "@types/babel__core": "^7.20.5",
60
63
  "@types/babel__traverse": "^7.28.0",
61
- "@types/node": "^25.0.2",
64
+ "@types/node": "^25.0.3",
62
65
  "@types/turndown": "^5.0.6",
63
66
  "@vitest/coverage-v8": "^4.0.16",
64
67
  "commit-and-tag-version": "^12.6.1",
65
68
  "eslint": "^9.39.2",
69
+ "eslint-config-prettier": "^10.1.8",
70
+ "eslint-import-resolver-typescript": "^4.4.4",
66
71
  "eslint-plugin-eslint-comments": "^3.2.0",
72
+ "eslint-plugin-import": "^2.32.0",
67
73
  "husky": "^9.1.7",
68
- "knip": "^5.79.0",
74
+ "knip": "^5.80.0",
69
75
  "node-gyp": "^12.1.0",
76
+ "prettier": "^3.7.4",
70
77
  "tsup": "^8.5.1",
71
- "tsx": "^4.19.2",
78
+ "tsx": "^4.21.0",
72
79
  "typescript": "^5.9.3",
73
- "typescript-eslint": "^8.50.0",
80
+ "typescript-eslint": "^8.52.0",
74
81
  "vitest": "^4.0.16"
75
82
  },
76
83
  "dependencies": {
@@ -81,7 +88,7 @@
81
88
  "@hono/node-server": "^1.19.7",
82
89
  "@huggingface/transformers": "^3.8.1",
83
90
  "@lancedb/lancedb": "^0.23.0",
84
- "@modelcontextprotocol/sdk": "^1.25.1",
91
+ "@modelcontextprotocol/sdk": "^1.25.2",
85
92
  "apache-arrow": "^21.1.0",
86
93
  "axios": "^1.13.2",
87
94
  "chalk": "^5.6.2",
@@ -89,17 +96,17 @@
89
96
  "chokidar": "^5.0.0",
90
97
  "cli-table3": "^0.6.5",
91
98
  "commander": "^14.0.2",
92
- "hono": "^4.11.1",
99
+ "hono": "^4.11.3",
93
100
  "node-addon-api": "^8.5.0",
94
101
  "ora": "^9.0.0",
95
- "pino": "^9.6.0",
96
- "pino-roll": "^1.3.0",
102
+ "pino": "^10.1.0",
103
+ "pino-roll": "^4.0.0",
97
104
  "slurp-ai": "^1.0.6",
98
105
  "tree-sitter": "^0.25.0",
99
- "tree-sitter-go": "^0.23.1",
106
+ "tree-sitter-go": "^0.25.0",
100
107
  "tree-sitter-rust": "^0.24.0",
101
108
  "turndown": "^7.2.2",
102
109
  "turndown-plugin-gfm": "^1.0.2",
103
- "zod": "^4.3.4"
110
+ "zod": "^4.3.5"
104
111
  }
105
112
  }
package/plugin.json ADDED
@@ -0,0 +1,8 @@
1
+ {
2
+ "name": "bluera-knowledge",
3
+ "version": "0.9.36",
4
+ "description": "Clone repos, crawl docs, search locally. Fast, authoritative answers for AI coding agents.",
5
+ "commands": "./commands",
6
+ "hooks": "./hooks/hooks.json",
7
+ "mcpServers": "./mcp.plugin.json"
8
+ }
@@ -1,2 +1,2 @@
1
1
  crawl4ai==0.7.8
2
- playwright>=1.40.0
2
+ playwright>=1.57.0
@@ -14,7 +14,7 @@ describe('ASTParser', () => {
14
14
  type: 'function',
15
15
  name: 'hello',
16
16
  exported: false,
17
- async: false
17
+ async: false,
18
18
  });
19
19
  });
20
20
 
@@ -73,7 +73,7 @@ function test() {
73
73
  const nodes = parser.parse(code, 'javascript');
74
74
 
75
75
  // Should not capture arrow functions, only declarations
76
- expect(nodes.filter(n => n.type === 'function')).toHaveLength(0);
76
+ expect(nodes.filter((n) => n.type === 'function')).toHaveLength(0);
77
77
  });
78
78
  });
79
79
 
@@ -86,7 +86,7 @@ function test() {
86
86
  expect(nodes[0]).toMatchObject({
87
87
  type: 'class',
88
88
  name: 'MyClass',
89
- exported: false
89
+ exported: false,
90
90
  });
91
91
  });
92
92
 
@@ -149,7 +149,7 @@ class Test {
149
149
  expect(nodes[0]).toMatchObject({
150
150
  type: 'interface',
151
151
  name: 'User',
152
- exported: false
152
+ exported: false,
153
153
  });
154
154
  });
155
155
 
@@ -236,7 +236,7 @@ function invalid( { syntax error
236
236
  expect(imports[0]).toMatchObject({
237
237
  source: 'module',
238
238
  specifiers: ['foo', 'bar'],
239
- isType: false
239
+ isType: false,
240
240
  });
241
241
  });
242
242
 
@@ -370,7 +370,7 @@ import type { Props } from "./types";
370
370
  const nodes = parser.parse(code, 'javascript');
371
371
 
372
372
  // Anonymous classes don't have an id, should be skipped
373
- expect(nodes.filter(n => n.type === 'class')).toHaveLength(0);
373
+ expect(nodes.filter((n) => n.type === 'class')).toHaveLength(0);
374
374
  });
375
375
 
376
376
  it('handles class with computed property method (non-identifier key)', () => {
@@ -430,7 +430,7 @@ function fn2() {}
430
430
  const nodes = parser.parse(code, 'typescript');
431
431
 
432
432
  expect(nodes).toHaveLength(4);
433
- expect(nodes.map(n => n.name)).toEqual(['fn1', 'Class1', 'Interface1', 'fn2']);
433
+ expect(nodes.map((n) => n.name)).toEqual(['fn1', 'Class1', 'Interface1', 'fn2']);
434
434
  });
435
435
 
436
436
  it('handles anonymous function expressions (no id)', () => {
@@ -438,7 +438,7 @@ function fn2() {}
438
438
  const nodes = parser.parse(code, 'javascript');
439
439
 
440
440
  // Anonymous functions don't have an id, should be skipped
441
- expect(nodes.filter(n => n.name)).toHaveLength(0);
441
+ expect(nodes.filter((n) => n.name)).toHaveLength(0);
442
442
  });
443
443
 
444
444
  it('handles class with constructor', () => {
@@ -451,12 +451,13 @@ function fn2() {}
451
451
 
452
452
  expect(nodes[0]?.name).toBe('MyClass');
453
453
  // Constructor is a method
454
- expect(nodes[0]?.methods?.some(m => m.name === 'constructor')).toBe(true);
454
+ expect(nodes[0]?.methods?.some((m) => m.name === 'constructor')).toBe(true);
455
455
  });
456
456
 
457
457
  it('handles very long file with many declarations', () => {
458
- const functions = Array.from({ length: 50 }, (_, i) =>
459
- `function fn${i}() { return ${i}; }`
458
+ const functions = Array.from(
459
+ { length: 50 },
460
+ (_, i) => `function fn${i}() { return ${i}; }`
460
461
  ).join('\n');
461
462
 
462
463
  const nodes = parser.parse(functions, 'javascript');