@vpxa/kb 0.1.12 → 0.1.15

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (383) hide show
  1. package/README.md +56 -39
  2. package/package.json +10 -7
  3. package/packages/analyzers/dist/blast-radius-analyzer.d.ts +17 -20
  4. package/packages/analyzers/dist/blast-radius-analyzer.js +6 -12
  5. package/packages/analyzers/dist/dependency-analyzer.d.ts +31 -27
  6. package/packages/analyzers/dist/dependency-analyzer.js +7 -9
  7. package/packages/analyzers/dist/diagram-generator.d.ts +12 -8
  8. package/packages/analyzers/dist/diagram-generator.js +3 -4
  9. package/packages/analyzers/dist/entry-point-analyzer.d.ts +39 -17
  10. package/packages/analyzers/dist/entry-point-analyzer.js +5 -5
  11. package/packages/analyzers/dist/index.d.ts +12 -14
  12. package/packages/analyzers/dist/index.js +1 -1
  13. package/packages/analyzers/dist/knowledge-producer.d.ts +29 -25
  14. package/packages/analyzers/dist/knowledge-producer.js +16 -15
  15. package/packages/analyzers/dist/pattern-analyzer.d.ts +14 -10
  16. package/packages/analyzers/dist/pattern-analyzer.js +3 -5
  17. package/packages/analyzers/dist/regex-call-graph.d.ts +6 -12
  18. package/packages/analyzers/dist/regex-call-graph.js +2 -1
  19. package/packages/analyzers/dist/structure-analyzer.d.ts +13 -9
  20. package/packages/analyzers/dist/structure-analyzer.js +3 -4
  21. package/packages/analyzers/dist/symbol-analyzer.d.ts +12 -8
  22. package/packages/analyzers/dist/symbol-analyzer.js +8 -13
  23. package/packages/analyzers/dist/ts-call-graph.d.ts +16 -13
  24. package/packages/analyzers/dist/ts-call-graph.js +2 -1
  25. package/packages/analyzers/dist/types.d.ts +82 -79
  26. package/packages/analyzers/dist/types.js +1 -0
  27. package/packages/chunker/dist/call-graph-extractor.d.ts +13 -10
  28. package/packages/chunker/dist/call-graph-extractor.js +2 -1
  29. package/packages/chunker/dist/chunker-factory.d.ts +6 -2
  30. package/packages/chunker/dist/chunker-factory.js +2 -1
  31. package/packages/chunker/dist/chunker.interface.d.ts +8 -4
  32. package/packages/chunker/dist/chunker.interface.js +1 -0
  33. package/packages/chunker/dist/code-chunker.d.ts +16 -12
  34. package/packages/chunker/dist/code-chunker.js +12 -14
  35. package/packages/chunker/dist/generic-chunker.d.ts +14 -10
  36. package/packages/chunker/dist/generic-chunker.js +6 -5
  37. package/packages/chunker/dist/index.d.ts +8 -8
  38. package/packages/chunker/dist/index.js +1 -1
  39. package/packages/chunker/dist/markdown-chunker.d.ts +16 -12
  40. package/packages/chunker/dist/markdown-chunker.js +4 -10
  41. package/packages/chunker/dist/treesitter-chunker.d.ts +28 -31
  42. package/packages/chunker/dist/treesitter-chunker.js +7 -8
  43. package/packages/cli/dist/commands/analyze.d.ts +6 -2
  44. package/packages/cli/dist/commands/analyze.js +3 -3
  45. package/packages/cli/dist/commands/context-cmds.d.ts +6 -2
  46. package/packages/cli/dist/commands/context-cmds.js +2 -1
  47. package/packages/cli/dist/commands/environment.d.ts +6 -2
  48. package/packages/cli/dist/commands/environment.js +2 -2
  49. package/packages/cli/dist/commands/execution.d.ts +6 -2
  50. package/packages/cli/dist/commands/execution.js +2 -1
  51. package/packages/cli/dist/commands/graph.d.ts +6 -2
  52. package/packages/cli/dist/commands/graph.js +6 -6
  53. package/packages/cli/dist/commands/init/adapters.d.ts +27 -0
  54. package/packages/cli/dist/commands/init/adapters.js +2 -0
  55. package/packages/cli/dist/commands/init/config.d.ts +11 -0
  56. package/packages/cli/dist/commands/init/config.js +4 -0
  57. package/packages/cli/dist/commands/init/curated.d.ts +8 -0
  58. package/packages/cli/dist/commands/init/curated.js +2 -0
  59. package/packages/cli/dist/commands/init/index.d.ts +23 -0
  60. package/packages/cli/dist/commands/init/index.js +3 -0
  61. package/packages/cli/dist/commands/init/scaffold.d.ts +24 -0
  62. package/packages/cli/dist/commands/init/scaffold.js +2 -0
  63. package/packages/cli/dist/commands/init/templates.d.ts +10 -0
  64. package/packages/cli/dist/commands/init/templates.js +302 -0
  65. package/packages/cli/dist/commands/init.d.ts +9 -3
  66. package/packages/cli/dist/commands/init.js +253 -197
  67. package/packages/cli/dist/commands/knowledge.d.ts +6 -2
  68. package/packages/cli/dist/commands/knowledge.js +2 -1
  69. package/packages/cli/dist/commands/search.d.ts +6 -2
  70. package/packages/cli/dist/commands/search.js +2 -8
  71. package/packages/cli/dist/commands/system.d.ts +6 -2
  72. package/packages/cli/dist/commands/system.js +5 -4
  73. package/packages/cli/dist/commands/workspace.d.ts +6 -2
  74. package/packages/cli/dist/commands/workspace.js +2 -2
  75. package/packages/cli/dist/context.d.ts +7 -4
  76. package/packages/cli/dist/context.js +2 -1
  77. package/packages/cli/dist/helpers.d.ts +51 -47
  78. package/packages/cli/dist/helpers.js +6 -3
  79. package/packages/cli/dist/index.d.ts +4 -1
  80. package/packages/cli/dist/index.js +3 -2
  81. package/packages/cli/dist/kb-init.d.ts +48 -50
  82. package/packages/cli/dist/kb-init.js +2 -1
  83. package/packages/cli/dist/types.d.ts +8 -5
  84. package/packages/cli/dist/types.js +1 -0
  85. package/packages/core/dist/constants.d.ts +36 -33
  86. package/packages/core/dist/constants.js +2 -1
  87. package/packages/core/dist/content-detector.d.ts +10 -5
  88. package/packages/core/dist/content-detector.js +2 -1
  89. package/packages/core/dist/errors.d.ts +15 -12
  90. package/packages/core/dist/errors.js +2 -1
  91. package/packages/core/dist/index.d.ts +6 -6
  92. package/packages/core/dist/index.js +1 -1
  93. package/packages/core/dist/logger.d.ts +16 -7
  94. package/packages/core/dist/logger.js +2 -1
  95. package/packages/core/dist/types.d.ts +108 -90
  96. package/packages/core/dist/types.js +2 -0
  97. package/packages/embeddings/dist/embedder.interface.d.ts +22 -19
  98. package/packages/embeddings/dist/embedder.interface.js +1 -0
  99. package/packages/embeddings/dist/index.d.ts +3 -3
  100. package/packages/embeddings/dist/index.js +1 -1
  101. package/packages/embeddings/dist/onnx-embedder.d.ts +21 -22
  102. package/packages/embeddings/dist/onnx-embedder.js +2 -1
  103. package/packages/enterprise-bridge/dist/cache.d.ts +29 -0
  104. package/packages/enterprise-bridge/dist/cache.js +2 -0
  105. package/packages/enterprise-bridge/dist/er-client.d.ts +38 -0
  106. package/packages/enterprise-bridge/dist/er-client.js +2 -0
  107. package/packages/enterprise-bridge/dist/evolution-collector.d.ts +63 -0
  108. package/packages/enterprise-bridge/dist/evolution-collector.js +2 -0
  109. package/packages/enterprise-bridge/dist/index.d.ts +8 -0
  110. package/packages/enterprise-bridge/dist/index.js +1 -0
  111. package/packages/enterprise-bridge/dist/policy-store.d.ts +46 -0
  112. package/packages/enterprise-bridge/dist/policy-store.js +2 -0
  113. package/packages/enterprise-bridge/dist/push-adapter.d.ts +24 -0
  114. package/packages/enterprise-bridge/dist/push-adapter.js +2 -0
  115. package/packages/enterprise-bridge/dist/result-merger.d.ts +15 -0
  116. package/packages/enterprise-bridge/dist/result-merger.js +2 -0
  117. package/packages/enterprise-bridge/dist/types.d.ts +82 -0
  118. package/packages/enterprise-bridge/dist/types.js +2 -0
  119. package/packages/indexer/dist/file-hasher.d.ts +5 -2
  120. package/packages/indexer/dist/file-hasher.js +2 -1
  121. package/packages/indexer/dist/filesystem-crawler.d.ts +23 -20
  122. package/packages/indexer/dist/filesystem-crawler.js +2 -1
  123. package/packages/indexer/dist/graph-extractor.d.ts +9 -12
  124. package/packages/indexer/dist/graph-extractor.js +2 -1
  125. package/packages/indexer/dist/incremental-indexer.d.ts +49 -43
  126. package/packages/indexer/dist/incremental-indexer.js +2 -1
  127. package/packages/indexer/dist/index.d.ts +5 -5
  128. package/packages/indexer/dist/index.js +1 -1
  129. package/packages/server/dist/api.d.ts +3 -8
  130. package/packages/server/dist/api.js +1 -1
  131. package/packages/server/dist/config.d.ts +6 -2
  132. package/packages/server/dist/config.js +2 -1
  133. package/packages/server/dist/curated-manager.d.ts +79 -76
  134. package/packages/server/dist/curated-manager.js +6 -10
  135. package/packages/server/dist/index.d.ts +1 -2
  136. package/packages/server/dist/index.js +2 -1
  137. package/packages/server/dist/replay-interceptor.d.ts +6 -6
  138. package/packages/server/dist/replay-interceptor.js +2 -1
  139. package/packages/server/dist/resources/resources.d.ts +7 -3
  140. package/packages/server/dist/resources/resources.js +3 -2
  141. package/packages/server/dist/server.d.ts +34 -24
  142. package/packages/server/dist/server.js +2 -1
  143. package/packages/server/dist/tools/analyze.tools.d.ts +14 -10
  144. package/packages/server/dist/tools/analyze.tools.js +2 -1
  145. package/packages/server/dist/tools/audit.tool.d.ts +9 -0
  146. package/packages/server/dist/tools/audit.tool.js +2 -0
  147. package/packages/server/dist/tools/bridge.tools.d.ts +35 -0
  148. package/packages/server/dist/tools/bridge.tools.js +16 -0
  149. package/packages/server/dist/tools/evolution.tools.d.ts +8 -0
  150. package/packages/server/dist/tools/evolution.tools.js +6 -0
  151. package/packages/server/dist/tools/forge.tools.d.ts +13 -11
  152. package/packages/server/dist/tools/forge.tools.js +11 -13
  153. package/packages/server/dist/tools/forget.tool.d.ts +7 -3
  154. package/packages/server/dist/tools/forget.tool.js +2 -7
  155. package/packages/server/dist/tools/graph.tool.d.ts +7 -3
  156. package/packages/server/dist/tools/graph.tool.js +5 -5
  157. package/packages/server/dist/tools/list.tool.d.ts +7 -3
  158. package/packages/server/dist/tools/list.tool.js +3 -8
  159. package/packages/server/dist/tools/lookup.tool.d.ts +7 -3
  160. package/packages/server/dist/tools/lookup.tool.js +3 -9
  161. package/packages/server/dist/tools/onboard.tool.d.ts +8 -4
  162. package/packages/server/dist/tools/onboard.tool.js +3 -2
  163. package/packages/server/dist/tools/policy.tools.d.ts +8 -0
  164. package/packages/server/dist/tools/policy.tools.js +3 -0
  165. package/packages/server/dist/tools/produce.tool.d.ts +6 -2
  166. package/packages/server/dist/tools/produce.tool.js +3 -2
  167. package/packages/server/dist/tools/read.tool.d.ts +7 -3
  168. package/packages/server/dist/tools/read.tool.js +3 -6
  169. package/packages/server/dist/tools/reindex.tool.d.ts +10 -6
  170. package/packages/server/dist/tools/reindex.tool.js +3 -2
  171. package/packages/server/dist/tools/remember.tool.d.ts +8 -3
  172. package/packages/server/dist/tools/remember.tool.js +4 -5
  173. package/packages/server/dist/tools/replay.tool.d.ts +6 -2
  174. package/packages/server/dist/tools/replay.tool.js +3 -6
  175. package/packages/server/dist/tools/search.tool.d.ts +10 -4
  176. package/packages/server/dist/tools/search.tool.js +7 -18
  177. package/packages/server/dist/tools/status.tool.d.ts +7 -3
  178. package/packages/server/dist/tools/status.tool.js +3 -3
  179. package/packages/server/dist/tools/toolkit.tools.d.ts +36 -34
  180. package/packages/server/dist/tools/toolkit.tools.js +20 -21
  181. package/packages/server/dist/tools/update.tool.d.ts +7 -3
  182. package/packages/server/dist/tools/update.tool.js +2 -6
  183. package/packages/server/dist/tools/utility.tools.d.ts +15 -14
  184. package/packages/server/dist/tools/utility.tools.js +11 -23
  185. package/packages/server/dist/version-check.d.ts +5 -1
  186. package/packages/server/dist/version-check.js +2 -1
  187. package/packages/store/dist/graph-store.interface.d.ts +89 -86
  188. package/packages/store/dist/graph-store.interface.js +1 -0
  189. package/packages/store/dist/index.d.ts +6 -6
  190. package/packages/store/dist/index.js +1 -1
  191. package/packages/store/dist/lance-store.d.ts +37 -30
  192. package/packages/store/dist/lance-store.js +2 -1
  193. package/packages/store/dist/sqlite-graph-store.d.ts +43 -46
  194. package/packages/store/dist/sqlite-graph-store.js +14 -13
  195. package/packages/store/dist/store-factory.d.ts +11 -7
  196. package/packages/store/dist/store-factory.js +2 -1
  197. package/packages/store/dist/store.interface.d.ts +47 -44
  198. package/packages/store/dist/store.interface.js +1 -0
  199. package/packages/tools/dist/audit.d.ts +66 -0
  200. package/packages/tools/dist/audit.js +7 -0
  201. package/packages/tools/dist/batch.d.ts +20 -17
  202. package/packages/tools/dist/batch.js +2 -1
  203. package/packages/tools/dist/changelog.d.ts +29 -26
  204. package/packages/tools/dist/changelog.js +3 -2
  205. package/packages/tools/dist/check.d.ts +45 -22
  206. package/packages/tools/dist/check.js +3 -2
  207. package/packages/tools/dist/checkpoint.d.ts +17 -14
  208. package/packages/tools/dist/checkpoint.js +2 -2
  209. package/packages/tools/dist/codemod.d.ts +35 -32
  210. package/packages/tools/dist/codemod.js +3 -2
  211. package/packages/tools/dist/compact.d.ts +34 -35
  212. package/packages/tools/dist/compact.js +3 -2
  213. package/packages/tools/dist/data-transform.d.ts +10 -7
  214. package/packages/tools/dist/data-transform.js +2 -1
  215. package/packages/tools/dist/dead-symbols.d.ts +29 -17
  216. package/packages/tools/dist/dead-symbols.js +3 -2
  217. package/packages/tools/dist/delegate.d.ts +26 -23
  218. package/packages/tools/dist/delegate.js +2 -5
  219. package/packages/tools/dist/diff-parse.d.ts +24 -21
  220. package/packages/tools/dist/diff-parse.js +4 -3
  221. package/packages/tools/dist/digest.d.ts +43 -45
  222. package/packages/tools/dist/digest.js +5 -5
  223. package/packages/tools/dist/encode.d.ts +11 -8
  224. package/packages/tools/dist/encode.js +2 -1
  225. package/packages/tools/dist/env-info.d.ts +25 -22
  226. package/packages/tools/dist/env-info.js +2 -1
  227. package/packages/tools/dist/eval.d.ts +13 -10
  228. package/packages/tools/dist/eval.js +3 -3
  229. package/packages/tools/dist/evidence-map.d.ts +64 -61
  230. package/packages/tools/dist/evidence-map.js +3 -3
  231. package/packages/tools/dist/file-cache.d.ts +42 -0
  232. package/packages/tools/dist/file-cache.js +4 -0
  233. package/packages/tools/dist/file-summary.d.ts +34 -29
  234. package/packages/tools/dist/file-summary.js +3 -2
  235. package/packages/tools/dist/file-walk.d.ts +6 -3
  236. package/packages/tools/dist/file-walk.js +2 -1
  237. package/packages/tools/dist/find-examples.d.ts +26 -21
  238. package/packages/tools/dist/find-examples.js +4 -3
  239. package/packages/tools/dist/find.d.ts +39 -40
  240. package/packages/tools/dist/find.js +2 -1
  241. package/packages/tools/dist/forge-classify.d.ts +35 -38
  242. package/packages/tools/dist/forge-classify.js +3 -2
  243. package/packages/tools/dist/forge-ground.d.ts +58 -60
  244. package/packages/tools/dist/forge-ground.js +2 -1
  245. package/packages/tools/dist/git-context.d.ts +22 -19
  246. package/packages/tools/dist/git-context.js +4 -3
  247. package/packages/tools/dist/graph-query.d.ts +75 -78
  248. package/packages/tools/dist/graph-query.js +2 -1
  249. package/packages/tools/dist/guide.d.ts +26 -0
  250. package/packages/tools/dist/guide.js +2 -0
  251. package/packages/tools/dist/health.d.ts +13 -10
  252. package/packages/tools/dist/health.js +3 -1
  253. package/packages/tools/dist/http-request.d.ts +20 -17
  254. package/packages/tools/dist/http-request.js +2 -1
  255. package/packages/tools/dist/index.d.ts +54 -49
  256. package/packages/tools/dist/index.js +1 -1
  257. package/packages/tools/dist/lane.d.ts +28 -25
  258. package/packages/tools/dist/lane.js +7 -7
  259. package/packages/tools/dist/measure.d.ts +32 -29
  260. package/packages/tools/dist/measure.js +3 -2
  261. package/packages/tools/dist/onboard.d.ts +29 -26
  262. package/packages/tools/dist/onboard.js +18 -41
  263. package/packages/tools/dist/parse-output.d.ts +48 -45
  264. package/packages/tools/dist/parse-output.js +3 -2
  265. package/packages/tools/dist/path-resolver.d.ts +15 -0
  266. package/packages/tools/dist/path-resolver.js +2 -0
  267. package/packages/tools/dist/process-manager.d.ts +18 -15
  268. package/packages/tools/dist/process-manager.js +2 -1
  269. package/packages/tools/dist/queue.d.ts +28 -25
  270. package/packages/tools/dist/queue.js +2 -2
  271. package/packages/tools/dist/regex-test.d.ts +26 -23
  272. package/packages/tools/dist/regex-test.js +2 -1
  273. package/packages/tools/dist/rename.d.ts +28 -25
  274. package/packages/tools/dist/rename.js +3 -2
  275. package/packages/tools/dist/replay.d.ts +33 -30
  276. package/packages/tools/dist/replay.js +5 -6
  277. package/packages/tools/dist/response-envelope.d.ts +44 -0
  278. package/packages/tools/dist/response-envelope.js +2 -0
  279. package/packages/tools/dist/schema-validate.d.ts +15 -12
  280. package/packages/tools/dist/schema-validate.js +2 -1
  281. package/packages/tools/dist/scope-map.d.ts +45 -45
  282. package/packages/tools/dist/scope-map.js +2 -1
  283. package/packages/tools/dist/snippet.d.ts +26 -24
  284. package/packages/tools/dist/snippet.js +2 -1
  285. package/packages/tools/dist/stash.d.ts +13 -10
  286. package/packages/tools/dist/stash.js +2 -2
  287. package/packages/tools/dist/stratum-card.d.ts +27 -27
  288. package/packages/tools/dist/stratum-card.js +4 -5
  289. package/packages/tools/dist/symbol.d.ts +29 -25
  290. package/packages/tools/dist/symbol.js +4 -3
  291. package/packages/tools/dist/test-run.d.ts +19 -15
  292. package/packages/tools/dist/test-run.js +3 -2
  293. package/packages/tools/dist/text-utils.d.ts +6 -3
  294. package/packages/tools/dist/text-utils.js +3 -2
  295. package/packages/tools/dist/time-utils.d.ts +15 -12
  296. package/packages/tools/dist/time-utils.js +2 -1
  297. package/packages/tools/dist/trace.d.ts +24 -20
  298. package/packages/tools/dist/trace.js +3 -2
  299. package/packages/tools/dist/truncation.d.ts +14 -2
  300. package/packages/tools/dist/truncation.js +8 -14
  301. package/packages/tools/dist/watch.d.ts +28 -25
  302. package/packages/tools/dist/watch.js +2 -1
  303. package/packages/tools/dist/web-fetch.d.ts +35 -32
  304. package/packages/tools/dist/web-fetch.js +7 -12
  305. package/packages/tools/dist/web-search.d.ts +16 -13
  306. package/packages/tools/dist/web-search.js +2 -1
  307. package/packages/tools/dist/workset.d.ts +19 -16
  308. package/packages/tools/dist/workset.js +2 -2
  309. package/packages/tui/dist/App-BAlmxCCw.js +3 -0
  310. package/packages/tui/dist/App.d.ts +11 -5
  311. package/packages/tui/dist/App.js +1 -450
  312. package/packages/tui/dist/CuratedPanel-sYdZAICX.js +3 -0
  313. package/packages/tui/dist/LogPanel-DVB8Sv46.js +4 -0
  314. package/packages/tui/dist/SearchPanel-DREo6zgt.js +3 -0
  315. package/packages/tui/dist/StatusPanel-2ex8fLOO.js +3 -0
  316. package/packages/tui/dist/chunk-D6axbAb-.js +2 -0
  317. package/packages/tui/dist/devtools-DUyj952l.js +8 -0
  318. package/packages/tui/dist/embedder.interface-D4ew0HPW.d.ts +29 -0
  319. package/packages/tui/dist/index-B9VpfVPP.d.ts +14 -0
  320. package/packages/tui/dist/index.d.ts +3 -19
  321. package/packages/tui/dist/index.js +2 -476
  322. package/packages/tui/dist/jsx-runtime-Cof-kwFn.js +317 -0
  323. package/packages/tui/dist/panels/CuratedPanel.d.ts +11 -5
  324. package/packages/tui/dist/panels/CuratedPanel.js +1 -371
  325. package/packages/tui/dist/panels/LogPanel.d.ts +7 -2
  326. package/packages/tui/dist/panels/LogPanel.js +1 -449
  327. package/packages/tui/dist/panels/SearchPanel.d.ts +14 -7
  328. package/packages/tui/dist/panels/SearchPanel.js +1 -372
  329. package/packages/tui/dist/panels/StatusPanel.d.ts +11 -5
  330. package/packages/tui/dist/panels/StatusPanel.js +1 -371
  331. package/packages/tui/dist/store.interface-CnY6SPOH.d.ts +151 -0
  332. package/scaffold/adapters/claude-code.mjs +20 -0
  333. package/scaffold/adapters/copilot.mjs +320 -0
  334. package/scaffold/copilot/agents/Architect-Reviewer-Alpha.agent.md +14 -0
  335. package/scaffold/copilot/agents/Architect-Reviewer-Beta.agent.md +14 -0
  336. package/scaffold/copilot/agents/Code-Reviewer-Alpha.agent.md +12 -0
  337. package/scaffold/copilot/agents/Code-Reviewer-Beta.agent.md +12 -0
  338. package/scaffold/copilot/agents/Debugger.agent.md +31 -0
  339. package/scaffold/copilot/agents/Documenter.agent.md +35 -0
  340. package/scaffold/copilot/agents/Explorer.agent.md +50 -0
  341. package/scaffold/copilot/agents/Frontend.agent.md +29 -0
  342. package/scaffold/copilot/agents/Implementer.agent.md +31 -0
  343. package/scaffold/copilot/agents/Orchestrator.agent.md +96 -0
  344. package/scaffold/copilot/agents/Planner.agent.md +45 -0
  345. package/scaffold/copilot/agents/README.md +57 -0
  346. package/scaffold/copilot/agents/Refactor.agent.md +30 -0
  347. package/scaffold/copilot/agents/Researcher-Alpha.agent.md +12 -0
  348. package/scaffold/copilot/agents/Researcher-Beta.agent.md +12 -0
  349. package/scaffold/copilot/agents/Researcher-Delta.agent.md +12 -0
  350. package/scaffold/copilot/agents/Researcher-Gamma.agent.md +12 -0
  351. package/scaffold/copilot/agents/Security.agent.md +42 -0
  352. package/scaffold/copilot/agents/_shared/adr-protocol.md +91 -0
  353. package/scaffold/copilot/agents/_shared/architect-reviewer-base.md +50 -0
  354. package/scaffold/copilot/agents/_shared/code-agent-base.md +70 -0
  355. package/scaffold/copilot/agents/_shared/code-reviewer-base.md +54 -0
  356. package/scaffold/copilot/agents/_shared/decision-protocol.md +27 -0
  357. package/scaffold/copilot/agents/_shared/forge-protocol.md +46 -0
  358. package/scaffold/copilot/agents/_shared/researcher-base.md +61 -0
  359. package/scaffold/copilot/agents/templates/adr-template.md +27 -0
  360. package/scaffold/copilot/agents/templates/execution-state.md +25 -0
  361. package/scaffold/copilot/prompts/ask.prompt.md +20 -0
  362. package/scaffold/copilot/prompts/debug.prompt.md +25 -0
  363. package/scaffold/copilot/prompts/design.prompt.md +22 -0
  364. package/scaffold/copilot/prompts/implement.prompt.md +26 -0
  365. package/scaffold/copilot/prompts/plan.prompt.md +24 -0
  366. package/scaffold/copilot/prompts/review.prompt.md +31 -0
  367. package/scaffold/definitions/agents.mjs +165 -0
  368. package/scaffold/definitions/bodies.mjs +292 -0
  369. package/scaffold/definitions/hooks.mjs +43 -0
  370. package/scaffold/definitions/models.mjs +56 -0
  371. package/scaffold/definitions/plugins.mjs +24 -0
  372. package/scaffold/definitions/prompts.mjs +145 -0
  373. package/scaffold/definitions/protocols.mjs +322 -0
  374. package/scaffold/definitions/tools.mjs +176 -0
  375. package/scaffold/generate.mjs +74 -0
  376. package/skills/brainstorming/SKILL.md +259 -0
  377. package/skills/brainstorming/scripts/frame-template.html +365 -0
  378. package/skills/brainstorming/scripts/helper.js +216 -0
  379. package/skills/brainstorming/scripts/server.cjs +9 -0
  380. package/skills/brainstorming/scripts/server.src.cjs +249 -0
  381. package/skills/brainstorming/spec-document-reviewer-prompt.md +49 -0
  382. package/skills/brainstorming/visual-companion.md +430 -0
  383. package/skills/knowledge-base/SKILL.md +34 -21
@@ -0,0 +1,38 @@
1
+ import { ERBridgeConfig, ERPushRequest, ERPushResponse, ERSearchResult } from "./types.js";
2
+
3
+ //#region packages/enterprise-bridge/src/er-client.d.ts
4
+ declare class ERClient {
5
+ private readonly baseUrl;
6
+ private readonly apiKey;
7
+ private readonly timeoutMs;
8
+ private circuitState;
9
+ private consecutiveFailures;
10
+ private openUntil;
11
+ private halfOpenProbeInFlight;
12
+ private static readonly FAILURE_THRESHOLD;
13
+ private static readonly RESET_TIMEOUT_MS;
14
+ constructor(config: ERBridgeConfig);
15
+ /** Search ER knowledge base */
16
+ search(query: string, maxResults?: number): Promise<ERSearchResult[]>;
17
+ /** Push curated knowledge to ER via MCP tools endpoint */
18
+ push(request: ERPushRequest): Promise<ERPushResponse>;
19
+ /** Explicit pull from ER (bypasses local search, for cross-repo context) */
20
+ pull(query: string, maxResults?: number): Promise<ERSearchResult[]>;
21
+ /** Check ER health */
22
+ health(): Promise<{
23
+ healthy: boolean;
24
+ status?: number;
25
+ }>;
26
+ /** Check available MCP tools (verify curated_remember exists) */
27
+ listTools(): Promise<string[]>;
28
+ private fetch;
29
+ private checkCircuit;
30
+ private recordSuccess;
31
+ private recordFailure;
32
+ private backoffMs;
33
+ private parseRetryAfter;
34
+ private sleep;
35
+ }
36
+ //#endregion
37
+ export { ERClient };
38
+ //# sourceMappingURL=er-client.d.ts.map
@@ -0,0 +1,2 @@
1
+ import{ERTransientError as e}from"./types.js";var t=class t{baseUrl;apiKey;timeoutMs;circuitState=`closed`;consecutiveFailures=0;openUntil=0;halfOpenProbeInFlight=!1;static FAILURE_THRESHOLD=3;static RESET_TIMEOUT_MS=6e4;constructor(e){let t=new URL(e.baseUrl);if(t.protocol!==`http:`&&t.protocol!==`https:`)throw Error(`Unsupported protocol: ${t.protocol} — only http/https allowed`);let n=t.hostname===`localhost`||t.hostname===`127.0.0.1`||t.hostname===`::1`;if(t.protocol===`http:`&&!n)throw Error(`Non-TLS (http://) ER endpoints are only allowed for localhost. Use https:// to avoid leaking API keys.`);this.baseUrl=e.baseUrl.replace(/\/+$/,``),this.apiKey=e.apiKey,this.timeoutMs=e.timeoutMs}async search(e,t=5){let n=await this.fetch(`/api/v1/search`,{method:`POST`,body:JSON.stringify({query:e,maxResults:t,generateResponse:!1})});if(!n.ok)throw Error(`ER search failed: ${n.status} ${n.statusText}`);return((await n.json()).sources??[]).map(e=>{let t=typeof e.score==`number`?e.score:void 0,n=typeof e.confidence==`number`?e.confidence:void 0;return{content:e.content,sourcePath:e.metadata?.sourceUri??`unknown`,score:t??n??0,metadata:e.metadata}})}async push(e){let t=await this.fetch(`/mcp/tools`,{method:`POST`,body:JSON.stringify({name:`curated_remember`,arguments:{title:e.title,content:e.content,category:e.category??`conventions`,tags:[...e.tags??[],`pushed-from-kb`]}})});if(!t.ok)return{pushed:!1,status:`failed`,timestamp:new Date().toISOString(),error:`ER push failed: ${t.status} ${t.statusText}`};let n=await t.json();return{pushed:!0,status:`stored`,remotePath:typeof n.path==`string`?n.path:void 0,timestamp:new Date().toISOString()}}async pull(e,t=10){return this.search(e,t)}async health(){try{let e=await this.fetch(`/api/v1/health`,{method:`GET`});return{healthy:e.ok,status:e.status}}catch{return{healthy:!1}}}async listTools(){try{let e=await this.fetch(`/mcp/tools`,{method:`GET`});return e.ok?((await e.json()).tools??[]).map(e=>e.name):[]}catch{return[]}}async fetch(e,t){this.checkCircuit();let n;for(let r=0;r<=2;r++){let i=new AbortController,a=setTimeout(()=>i.abort(),this.timeoutMs);try{let n=await fetch(`${this.baseUrl}${e}`,{...t,headers:{"Content-Type":`application/json`,"X-Api-Key":this.apiKey,"User-Agent":`kb-enterprise-bridge/1.0`,...t.headers},signal:i.signal});if(n.ok||n.status>=400&&n.status<500&&n.status!==429)return this.recordSuccess(),n;if(r<2){let e=this.parseRetryAfter(n)??this.backoffMs(r);await this.sleep(e);continue}return this.recordFailure(),n}catch(e){n=e,r<2&&await this.sleep(this.backoffMs(r))}finally{clearTimeout(a)}}throw this.recordFailure(),n??Error(`Fetch failed after retries`)}checkCircuit(){if(this.circuitState===`open`)if(Date.now()>=this.openUntil){if(this.halfOpenProbeInFlight)throw new e(`ER circuit breaker is half-open — probe in progress`);this.circuitState=`half-open`,this.halfOpenProbeInFlight=!0}else throw new e(`ER circuit breaker is open — skipping request`)}recordSuccess(){this.consecutiveFailures=0,this.circuitState=`closed`,this.halfOpenProbeInFlight=!1}recordFailure(){this.consecutiveFailures++,this.halfOpenProbeInFlight=!1,this.consecutiveFailures>=t.FAILURE_THRESHOLD&&(this.circuitState=`open`,this.openUntil=Date.now()+t.RESET_TIMEOUT_MS)}backoffMs(e){let t=500*3**e,n=t*.25*(Math.random()*2-1);return Math.round(t+n)}parseRetryAfter(e){let t=e.headers.get(`Retry-After`);if(!t)return;let n=Number(t);if(!Number.isNaN(n)&&n>=0)return Math.min(n*1e3,3e4)}sleep(e){return new Promise(t=>setTimeout(t,e))}};export{t as ERClient};
2
+ //# sourceMappingURL=er-client.js.map
@@ -0,0 +1,63 @@
1
+ //#region packages/enterprise-bridge/src/evolution-collector.d.ts
2
+ /** Evolution data collector — tracks usage metrics for LLM review */
3
+ interface EvolutionMetrics {
4
+ /** Search metrics */
5
+ search: {
6
+ totalSearches: number;
7
+ erFallbackCount: number;
8
+ erFallbackRate: number;
9
+ erCacheHitCount: number;
10
+ erCacheHitRate: number;
11
+ topMissedQueries: Array<{
12
+ query: string;
13
+ count: number;
14
+ }>;
15
+ };
16
+ /** Push metrics */
17
+ push: {
18
+ totalPushes: number;
19
+ successCount: number;
20
+ failCount: number;
21
+ pushRate: number; /** Classification match rate — how often rules fired vs knowledge stored */
22
+ classificationMatchRate: number; /** Push acceptance rate — how often LLM decided to push after recommendation */
23
+ pushAcceptanceRate: number;
24
+ };
25
+ /** Rule effectiveness */
26
+ rules: {
27
+ matchCounts: Record<string, number>;
28
+ pushCounts: Record<string, number>; /** Rules that fire often but rarely lead to pushes (potential false positives) */
29
+ lowConversionRules: Array<{
30
+ ruleId: string;
31
+ matchCount: number;
32
+ pushCount: number;
33
+ conversionRate: number;
34
+ }>;
35
+ };
36
+ /** Summary period */
37
+ period: {
38
+ startedAt: string;
39
+ queriedAt: string;
40
+ totalEvents: number;
41
+ };
42
+ }
43
+ declare class EvolutionCollector {
44
+ private searchEvents;
45
+ private classificationEvents;
46
+ private pushEvents;
47
+ private readonly startedAt;
48
+ constructor();
49
+ /** Record a search event */
50
+ recordSearch(query: string, erFallbackTriggered: boolean, erCacheHit: boolean): void;
51
+ /** Record a classification event (when remember returns classification signals) */
52
+ recordClassification(entryTitle: string, matchingRuleIds: string[], pushRecommended: boolean): void;
53
+ /** Record a push event */
54
+ recordPush(entryId: string, success: boolean, ruleId?: string): void;
55
+ /** Get aggregated metrics for LLM review */
56
+ getMetrics(): EvolutionMetrics;
57
+ /** Reset all collected data */
58
+ reset(): void;
59
+ private trimEvents;
60
+ }
61
+ //#endregion
62
+ export { EvolutionCollector, EvolutionMetrics };
63
+ //# sourceMappingURL=evolution-collector.d.ts.map
@@ -0,0 +1,2 @@
1
+ var e=class{searchEvents=[];classificationEvents=[];pushEvents=[];startedAt;constructor(){this.startedAt=new Date().toISOString()}recordSearch(e,t,n){this.searchEvents.push({query:e.toLowerCase().trim(),erFallbackTriggered:t,erCacheHit:n,timestamp:Date.now()}),this.trimEvents()}recordClassification(e,t,n){this.classificationEvents.push({entryTitle:e,matchingRuleIds:t,pushRecommended:n,timestamp:Date.now()}),this.trimEvents()}recordPush(e,t,n){this.pushEvents.push({entryId:e,ruleId:n,success:t,timestamp:Date.now()}),this.trimEvents()}getMetrics(){let e=this.searchEvents.length,t=this.searchEvents.filter(e=>e.erFallbackTriggered).length,n=this.searchEvents.filter(e=>e.erCacheHit).length,r=new Map;for(let e of this.searchEvents)e.erFallbackTriggered&&r.set(e.query,(r.get(e.query)??0)+1);let i=[...r.entries()].sort((e,t)=>t[1]-e[1]).slice(0,20).map(([e,t])=>({query:e,count:t})),a=this.pushEvents.length,o=this.pushEvents.filter(e=>e.success).length,s=this.classificationEvents.length,c=this.classificationEvents.filter(e=>e.pushRecommended).length,l={},u={};for(let e of this.classificationEvents)for(let t of e.matchingRuleIds)l[t]=(l[t]??0)+1;for(let e of this.pushEvents)e.ruleId&&(u[e.ruleId]=(u[e.ruleId]??0)+1);let d=Object.entries(l).map(([e,t])=>{let n=u[e]??0;return{ruleId:e,matchCount:t,pushCount:n,conversionRate:t>0?n/t:0}}).filter(e=>e.matchCount>=3&&e.conversionRate<.3).sort((e,t)=>e.conversionRate-t.conversionRate);return{search:{totalSearches:e,erFallbackCount:t,erFallbackRate:e>0?t/e:0,erCacheHitCount:n,erCacheHitRate:t>0?n/t:0,topMissedQueries:i},push:{totalPushes:a,successCount:o,failCount:a-o,pushRate:s>0?a/s:0,classificationMatchRate:s>0?c/s:0,pushAcceptanceRate:c>0?a/c:0},rules:{matchCounts:l,pushCounts:u,lowConversionRules:d},period:{startedAt:this.startedAt,queriedAt:new Date().toISOString(),totalEvents:this.searchEvents.length+this.classificationEvents.length+this.pushEvents.length}}}reset(){this.searchEvents=[],this.classificationEvents=[],this.pushEvents=[]}trimEvents(){this.searchEvents.length>500&&(this.searchEvents=this.searchEvents.slice(-500)),this.classificationEvents.length>500&&(this.classificationEvents=this.classificationEvents.slice(-500)),this.pushEvents.length>500&&(this.pushEvents=this.pushEvents.slice(-500))}};export{e as EvolutionCollector};
2
+ //# sourceMappingURL=evolution-collector.js.map
@@ -0,0 +1,8 @@
1
+ import { ERBridgeConfig, ERCacheEntry, ERPushRequest, ERPushResponse, ERSearchResponse, ERSearchResult, ERSyncEntry, ERTransientError, MergedSearchResult } from "./types.js";
2
+ import { ERCache } from "./cache.js";
3
+ import { ERClient } from "./er-client.js";
4
+ import { EvolutionCollector, EvolutionMetrics } from "./evolution-collector.js";
5
+ import { ClassificationResult, ClassificationRule, PolicyStore } from "./policy-store.js";
6
+ import { PushAdapter } from "./push-adapter.js";
7
+ import { mergeResults } from "./result-merger.js";
8
+ export { type ClassificationResult, type ClassificationRule, type ERBridgeConfig, ERCache, type ERCacheEntry, ERClient, type ERPushRequest, type ERPushResponse, type ERSearchResponse, type ERSearchResult, type ERSyncEntry, ERTransientError, EvolutionCollector, type EvolutionMetrics, type MergedSearchResult, PolicyStore, PushAdapter, mergeResults };
@@ -0,0 +1 @@
1
+ import{ERCache as e}from"./cache.js";import{ERTransientError as t}from"./types.js";import{ERClient as n}from"./er-client.js";import{EvolutionCollector as r}from"./evolution-collector.js";import{PolicyStore as i}from"./policy-store.js";import{PushAdapter as a}from"./push-adapter.js";import{mergeResults as o}from"./result-merger.js";export{e as ERCache,n as ERClient,t as ERTransientError,r as EvolutionCollector,i as PolicyStore,a as PushAdapter,o as mergeResults};
@@ -0,0 +1,46 @@
1
+ //#region packages/enterprise-bridge/src/policy-store.d.ts
2
+ /** A single classification rule */
3
+ interface ClassificationRule {
4
+ id: string;
5
+ patterns: string[];
6
+ category: string;
7
+ pushWeight: number;
8
+ description: string;
9
+ examples: string[];
10
+ autoPush: false;
11
+ enabled: boolean;
12
+ createdAt: string;
13
+ updatedAt: string;
14
+ }
15
+ /** Classification result for a knowledge entry */
16
+ interface ClassificationResult {
17
+ matchingRules: Array<{
18
+ ruleId: string;
19
+ category: string;
20
+ pushWeight: number;
21
+ matchedPatterns: string[];
22
+ }>;
23
+ pushRecommended: boolean;
24
+ maxPushWeight: number;
25
+ }
26
+ declare class PolicyStore {
27
+ private rules;
28
+ private readonly rulesPath;
29
+ constructor(curatedPath: string);
30
+ /** Classify a knowledge entry against all enabled rules */
31
+ classify(title: string, content: string, tags: string[]): ClassificationResult;
32
+ /** Get all rules */
33
+ getRules(): ClassificationRule[];
34
+ /** Get a single rule by ID */
35
+ getRule(ruleId: string): ClassificationRule | undefined;
36
+ /** Update an existing rule */
37
+ updateRule(ruleId: string, changes: Partial<Omit<ClassificationRule, 'id' | 'createdAt' | 'autoPush'>>): ClassificationRule | undefined;
38
+ /** Add a new rule */
39
+ addRule(rule: Omit<ClassificationRule, 'createdAt' | 'updatedAt' | 'autoPush'>): ClassificationRule;
40
+ /** Delete a rule */
41
+ deleteRule(ruleId: string): boolean;
42
+ private save;
43
+ }
44
+ //#endregion
45
+ export { ClassificationResult, ClassificationRule, PolicyStore };
46
+ //# sourceMappingURL=policy-store.d.ts.map
@@ -0,0 +1,2 @@
1
+ import{existsSync as e,mkdirSync as t,readFileSync as n,writeFileSync as r}from"node:fs";import{join as i}from"node:path";import{createLogger as a}from"../../core/dist/index.js";const o=a(`bridge`),s=()=>new Date().toISOString(),c=[{id:`cross-repo-contract`,patterns:[`contract`,`interface`,`api`,`event schema`,`publishes`,`subscribes`],category:`contract`,pushWeight:.8,description:`Knowledge describing cross-service interfaces`,examples:[`Service X publishes OrderCreated event with fields...`],autoPush:!1,enabled:!0,createdAt:s(),updatedAt:s()},{id:`architecture-decision`,patterns:[`decided`,`chose`,`tradeoff`,`adr`,`because we need`],category:`decision`,pushWeight:.7,description:`Architecture decisions that affect the ecosystem`,examples:[`We decided to use event sourcing because...`],autoPush:!1,enabled:!0,createdAt:s(),updatedAt:s()},{id:`shared-pattern`,patterns:[`pattern`,`convention`,`standard`,`guideline`,`always use`],category:`pattern`,pushWeight:.6,description:`Patterns and conventions that maintain consistency across repos`,examples:[`Always use the Result type for error handling...`],autoPush:!1,enabled:!0,createdAt:s(),updatedAt:s()},{id:`company-term`,patterns:[`means`,`defined as`,`acronym`,`refers to`],category:`glossary`,pushWeight:.7,description:`Shared vocabulary and terminology definitions`,examples:[`DDD means Domain-Driven Design in our context...`],autoPush:!1,enabled:!0,createdAt:s(),updatedAt:s()},{id:`implementation-detail`,patterns:[`private`,`internal`,`helper`,`todo`,`hack`,`workaround`],category:`local-only`,pushWeight:0,description:`Local-only implementation details — never push`,examples:[`TODO: refactor this helper`,`Internal workaround for...`],autoPush:!1,enabled:!0,createdAt:s(),updatedAt:s()}];var l=class{rules=[];rulesPath;constructor(r){let a=i(r,`_policy`);if(this.rulesPath=i(a,`rules.json`),e(a)||t(a,{recursive:!0}),e(this.rulesPath))try{let e=n(this.rulesPath,`utf-8`),t=JSON.parse(e);if(!Array.isArray(t))throw Error(`rules.json must be an array`);for(let e of t)if(typeof e!=`object`||!e||typeof e.id!=`string`||!Array.isArray(e.patterns)||typeof e.category!=`string`||typeof e.pushWeight!=`number`)throw Error(`Invalid rule: ${JSON.stringify(e).slice(0,100)}`);this.rules=t}catch{o.warn(`Failed to parse policy rules, resetting to starter set`),this.rules=[...c],this.save()}else this.rules=[...c],this.save()}classify(e,t,n){let r=`${e} ${t} ${n.join(` `)}`.toLowerCase(),i=[];for(let e of this.rules){if(!e.enabled)continue;let t=e.patterns.filter(e=>r.includes(e.toLowerCase()));t.length>0&&i.push({ruleId:e.id,category:e.category,pushWeight:e.pushWeight,matchedPatterns:t})}let a=i.length>0?Math.max(...i.map(e=>e.pushWeight)):0;return{matchingRules:i,pushRecommended:a>=.5,maxPushWeight:a}}getRules(){return[...this.rules]}getRule(e){return this.rules.find(t=>t.id===e)}updateRule(e,t){let n=this.rules.find(t=>t.id===e);if(n)return Object.assign(n,t,{updatedAt:new Date().toISOString(),autoPush:!1}),this.save(),n}addRule(e){if(this.rules.find(t=>t.id===e.id))throw Error(`Rule with id '${e.id}' already exists`);let t=new Date().toISOString(),n={...e,autoPush:!1,createdAt:t,updatedAt:t};return this.rules.push(n),this.save(),n}deleteRule(e){let t=this.rules.findIndex(t=>t.id===e);return t===-1?!1:(this.rules.splice(t,1),this.save(),!0)}save(){r(this.rulesPath,JSON.stringify(this.rules,null,2),`utf-8`)}};export{l as PolicyStore};
2
+ //# sourceMappingURL=policy-store.js.map
@@ -0,0 +1,24 @@
1
+ import { ERPushRequest, ERPushResponse, ERSyncEntry } from "./types.js";
2
+ import { ERClient } from "./er-client.js";
3
+
4
+ //#region packages/enterprise-bridge/src/push-adapter.d.ts
5
+ declare class PushAdapter {
6
+ private readonly client;
7
+ private readonly syncHistory;
8
+ private static readonly MAX_HISTORY;
9
+ constructor(client: ERClient);
10
+ /** Push a knowledge entry to ER */
11
+ push(entryId: string, request: ERPushRequest): Promise<ERPushResponse>;
12
+ /** Get sync history */
13
+ getHistory(): ERSyncEntry[];
14
+ /** Get sync status summary */
15
+ getStatus(): {
16
+ totalPushed: number;
17
+ successCount: number;
18
+ failCount: number;
19
+ lastPush?: ERSyncEntry;
20
+ };
21
+ }
22
+ //#endregion
23
+ export { PushAdapter };
24
+ //# sourceMappingURL=push-adapter.d.ts.map
@@ -0,0 +1,2 @@
1
+ var e=class e{client;syncHistory=[];static MAX_HISTORY=100;constructor(e){this.client=e}async push(t,n){let r=await this.client.push(n);for(this.syncHistory.push({entryId:t,title:n.title,pushedAt:r.timestamp,status:r.status,remotePath:r.remotePath});this.syncHistory.length>e.MAX_HISTORY;)this.syncHistory.shift();return r}getHistory(){return[...this.syncHistory]}getStatus(){let e=this.syncHistory.filter(e=>e.status===`stored`).length;return{totalPushed:this.syncHistory.length,successCount:e,failCount:this.syncHistory.length-e,lastPush:this.syncHistory.at(-1)}}};export{e as PushAdapter};
2
+ //# sourceMappingURL=push-adapter.js.map
@@ -0,0 +1,15 @@
1
+ import { ERSearchResult, MergedSearchResult } from "./types.js";
2
+ import { SearchResult } from "@kb/core";
3
+
4
+ //#region packages/enterprise-bridge/src/result-merger.d.ts
5
+ /**
6
+ * Merge local and ER results using local-first interleaving.
7
+ *
8
+ * Local RRF scores (~0.01-0.05) and ER raw similarity scores (~0.3-0.9) are on
9
+ * incomparable scales. Instead of sorting both together (which would always rank
10
+ * ER above local), we preserve local ordering, deduplicate, then append ER results.
11
+ */
12
+ declare function mergeResults(localResults: SearchResult[], erResults: ERSearchResult[], limit: number): MergedSearchResult[];
13
+ //#endregion
14
+ export { mergeResults };
15
+ //# sourceMappingURL=result-merger.d.ts.map
@@ -0,0 +1,2 @@
1
+ function e(e){return{content:e.record.content,sourcePath:e.record.sourcePath,score:e.score,source:`local`,startLine:e.record.startLine,endLine:e.record.endLine,contentType:e.record.contentType,headingPath:e.record.headingPath,origin:e.record.origin,category:e.record.category,tags:e.record.tags}}function t(e){return{content:e.content,sourcePath:e.sourcePath,score:e.score,source:`er`,metadata:e.metadata}}function n(e){return`${e.slice(0,200).replace(/\s+/g,` `)}:${e.length}`}function r(r,i,a){let o=new Set,s=[];for(let t of r){let r=n(t.record.content);o.has(r)||(o.add(r),s.push(e(t)))}let c=[...i].sort((e,t)=>t.score-e.score);for(let e of c){let r=n(e.content);o.has(r)||(o.add(r),s.push(t(e)))}return s.slice(0,a)}export{r as mergeResults};
2
+ //# sourceMappingURL=result-merger.js.map
@@ -0,0 +1,82 @@
1
+ //#region packages/enterprise-bridge/src/types.d.ts
2
+ /** ER bridge configuration (subset of KBConfig.er) */
3
+ interface ERBridgeConfig {
4
+ enabled: boolean;
5
+ baseUrl: string;
6
+ apiKey: string;
7
+ timeoutMs: number;
8
+ cacheTtlMs: number;
9
+ cacheMaxEntries: number;
10
+ fallbackThreshold: number;
11
+ }
12
+ /** Result from ER search API */
13
+ interface ERSearchResult {
14
+ content: string;
15
+ sourcePath: string;
16
+ score: number;
17
+ metadata?: Record<string, unknown>;
18
+ }
19
+ /** Response from ER search API (POST /api/v1/search) */
20
+ interface ERSearchResponse {
21
+ sources: Array<{
22
+ content: string;
23
+ metadata: Record<string, string>;
24
+ score?: number;
25
+ confidence?: number;
26
+ }>;
27
+ query?: string;
28
+ searchMode?: string;
29
+ }
30
+ /** Push request to ER */
31
+ interface ERPushRequest {
32
+ title: string;
33
+ content: string;
34
+ category?: string;
35
+ tags?: string[];
36
+ }
37
+ /** Push response from ER */
38
+ interface ERPushResponse {
39
+ pushed: boolean;
40
+ status: 'stored' | 'failed';
41
+ remotePath?: string;
42
+ timestamp: string;
43
+ error?: string;
44
+ }
45
+ /** Transient ER error (429, 5xx, network) — retries exhausted but not a permanent failure */
46
+ declare class ERTransientError extends Error {
47
+ readonly statusCode?: number | undefined;
48
+ constructor(message: string, statusCode?: number | undefined);
49
+ }
50
+ /** Sync status entry for tracking push history */
51
+ interface ERSyncEntry {
52
+ entryId: string;
53
+ title: string;
54
+ pushedAt: string;
55
+ status: 'stored' | 'failed';
56
+ remotePath?: string;
57
+ }
58
+ /** Cache entry wrapping ER search results */
59
+ interface ERCacheEntry {
60
+ results: ERSearchResult[];
61
+ query: string;
62
+ cachedAt: number;
63
+ ttl: number;
64
+ }
65
+ /** Merged search result with source provenance */
66
+ interface MergedSearchResult {
67
+ content: string;
68
+ sourcePath: string;
69
+ score: number;
70
+ source: 'local' | 'er';
71
+ startLine?: number;
72
+ endLine?: number;
73
+ contentType?: string;
74
+ headingPath?: string;
75
+ origin?: string;
76
+ category?: string;
77
+ tags?: string[];
78
+ metadata?: Record<string, unknown>;
79
+ }
80
+ //#endregion
81
+ export { ERBridgeConfig, ERCacheEntry, ERPushRequest, ERPushResponse, ERSearchResponse, ERSearchResult, ERSyncEntry, ERTransientError, MergedSearchResult };
82
+ //# sourceMappingURL=types.d.ts.map
@@ -0,0 +1,2 @@
1
+ var e=class extends Error{constructor(e,t){super(e),this.statusCode=t,this.name=`ERTransientError`}};export{e as ERTransientError};
2
+ //# sourceMappingURL=types.js.map
@@ -1,11 +1,14 @@
1
+ //#region packages/indexer/src/file-hasher.d.ts
1
2
  /**
2
3
  * Generate a deterministic hash for a file's content.
3
4
  * Used for incremental indexing -- skip files that haven't changed.
4
5
  */
5
- export declare function hashContent(content: string): string;
6
+ declare function hashContent(content: string): string;
6
7
  /**
7
8
  * Generate a deterministic ID for a knowledge record.
8
9
  * Based on source path + chunk index to ensure idempotent upserts.
9
10
  */
10
- export declare function generateRecordId(sourcePath: string, chunkIndex: number): string;
11
+ declare function generateRecordId(sourcePath: string, chunkIndex: number): string;
12
+ //#endregion
13
+ export { generateRecordId, hashContent };
11
14
  //# sourceMappingURL=file-hasher.d.ts.map
@@ -1 +1,2 @@
1
- import{createHash as e}from"node:crypto";function i(t){return e("sha256").update(t).digest("hex").slice(0,16)}function o(t,n){const r=`${t}:${n}`;return e("sha256").update(r).digest("hex").slice(0,16)}export{o as generateRecordId,i as hashContent};
1
+ import{createHash as e}from"node:crypto";function t(t){return e(`sha256`).update(t).digest(`hex`).slice(0,16)}function n(t,n){let r=`${t}:${n}`;return e(`sha256`).update(r).digest(`hex`).slice(0,16)}export{n as generateRecordId,t as hashContent};
2
+ //# sourceMappingURL=file-hasher.js.map
@@ -1,27 +1,30 @@
1
- export interface CrawlResult {
2
- /** File path relative to workspace root */
3
- relativePath: string;
4
- /** Absolute file path */
5
- absolutePath: string;
6
- /** File content */
7
- content: string;
8
- /** File extension (lowercase, with dot) */
9
- extension: string;
1
+ //#region packages/indexer/src/filesystem-crawler.d.ts
2
+ interface CrawlResult {
3
+ /** File path relative to workspace root */
4
+ relativePath: string;
5
+ /** Absolute file path */
6
+ absolutePath: string;
7
+ /** File content */
8
+ content: string;
9
+ /** File extension (lowercase, with dot) */
10
+ extension: string;
10
11
  }
11
- export interface CrawlOptions {
12
- /** Glob patterns to exclude */
13
- excludePatterns: string[];
14
- /** Root directory to crawl */
15
- rootDir: string;
12
+ interface CrawlOptions {
13
+ /** Glob patterns to exclude */
14
+ excludePatterns: string[];
15
+ /** Root directory to crawl */
16
+ rootDir: string;
16
17
  }
17
18
  /**
18
19
  * Crawl a directory tree and yield files that should be indexed.
19
20
  */
20
- export declare class FilesystemCrawler {
21
- /** Binary file extensions that should always be skipped */
22
- private static readonly BINARY_EXTENSIONS;
23
- crawl(options: CrawlOptions): Promise<CrawlResult[]>;
24
- private walkDir;
25
- private isExcluded;
21
+ declare class FilesystemCrawler {
22
+ /** Binary file extensions that should always be skipped */
23
+ private static readonly BINARY_EXTENSIONS;
24
+ crawl(options: CrawlOptions): Promise<CrawlResult[]>;
25
+ private walkDir;
26
+ private isExcluded;
26
27
  }
28
+ //#endregion
29
+ export { CrawlOptions, CrawlResult, FilesystemCrawler };
27
30
  //# sourceMappingURL=filesystem-crawler.d.ts.map
@@ -1 +1,2 @@
1
- import{lstat as u,readdir as w,readFile as d,stat as g}from"node:fs/promises";import{extname as h,join as x,relative as S}from"node:path";import{FILE_LIMITS as y}from"../../core/dist/index.js";import{minimatch as E}from"minimatch";class p{static BINARY_EXTENSIONS=new Set([".node",".so",".dylib",".dll",".wasm",".bin",".exe",".png",".jpg",".jpeg",".gif",".bmp",".ico",".webp",".svg",".mp3",".mp4",".wav",".avi",".mov",".flac",".zip",".gz",".tar",".bz2",".7z",".rar",".pdf",".doc",".docx",".xls",".xlsx",".ppt",".pptx",".ttf",".otf",".woff",".woff2",".eot",".pyc",".class",".o",".obj",".a",".lib"]);async crawl(e){const i=[],n=new Set;return await this.walkDir(e.rootDir,e.rootDir,e.excludePatterns,i,n),i}async walkDir(e,i,n,s,o){let c;try{c=await w(e,{withFileTypes:!0})}catch(r){const t=r.code;(t==="EACCES"||t==="EPERM")&&console.error(`[KB] Permission denied, skipping directory: ${e}`);return}for(const r of c){const t=x(e,r.name),l=S(i,t).replace(/\\/g,"/");if(!this.isExcluded(l,n)){if(r.isDirectory()){if(r.name.startsWith("."))continue;try{if((await u(t)).isSymbolicLink())continue}catch{continue}const a=t;if(o.has(a))continue;o.add(a),await this.walkDir(t,i,n,s,o)}else if(r.isFile()){const a=h(r.name).toLowerCase();if(p.BINARY_EXTENSIONS.has(a))continue;try{if((await g(t)).size>y.maxFileSizeBytes)continue;const f=await d(t,"utf-8");if(f.includes("\0"))continue;s.push({relativePath:l,absolutePath:t,content:f,extension:a})}catch{}}}}}isExcluded(e,i){return i.some(n=>E(e,n,{dot:!0}))}}export{p as FilesystemCrawler};
1
+ import{lstat as e,readFile as t,readdir as n,stat as r}from"node:fs/promises";import{extname as i,join as a,relative as o}from"node:path";import{FILE_LIMITS as s,createLogger as c}from"../../core/dist/index.js";import{minimatch as l}from"minimatch";const u=c(`indexer`);var d=class c{static BINARY_EXTENSIONS=new Set(`.node,.so,.dylib,.dll,.wasm,.bin,.exe,.png,.jpg,.jpeg,.gif,.bmp,.ico,.webp,.svg,.mp3,.mp4,.wav,.avi,.mov,.flac,.zip,.gz,.tar,.bz2,.7z,.rar,.pdf,.doc,.docx,.xls,.xlsx,.ppt,.pptx,.ttf,.otf,.woff,.woff2,.eot,.pyc,.class,.o,.obj,.a,.lib`.split(`,`));async crawl(e){let t=[],n=new Set;return await this.walkDir(e.rootDir,e.rootDir,e.excludePatterns,t,n),t}async walkDir(l,d,f,p,m){let h;try{h=await n(l,{withFileTypes:!0})}catch(e){let t=e.code;(t===`EACCES`||t===`EPERM`)&&u.warn(`Permission denied, skipping directory`,{dir:l});return}for(let n of h){let u=a(l,n.name),h=o(d,u).replace(/\\/g,`/`);if(!this.isExcluded(h,f)){if(n.isDirectory()){if(n.name.startsWith(`.`))continue;try{if((await e(u)).isSymbolicLink())continue}catch{continue}let t=u;if(m.has(t))continue;m.add(t),await this.walkDir(u,d,f,p,m)}else if(n.isFile()){let e=i(n.name).toLowerCase();if(c.BINARY_EXTENSIONS.has(e))continue;try{if((await r(u)).size>s.maxFileSizeBytes)continue;let n=await t(u,`utf-8`);if(n.includes(`\0`))continue;p.push({relativePath:h,absolutePath:u,content:n,extension:e})}catch{}}}}}isExcluded(e,t){return t.some(t=>l(e,t,{dot:!0}))}};export{d as FilesystemCrawler};
2
+ //# sourceMappingURL=filesystem-crawler.js.map
@@ -1,14 +1,9 @@
1
- /**
2
- * Lightweight graph extraction for automatic population during indexing.
3
- * Extracts symbols (functions, classes, interfaces, etc.) and import relationships
4
- * from source code, producing GraphNode[] and GraphEdge[] for the graph store.
5
- *
6
- * Focuses on TS/JS — the primary use case. Non-code files return empty graphs.
7
- */
8
- import type { GraphEdge, GraphNode } from '@kb/store';
9
- export interface ExtractedGraph {
10
- nodes: GraphNode[];
11
- edges: GraphEdge[];
1
+ import { GraphEdge, GraphNode } from "@kb/store";
2
+
3
+ //#region packages/indexer/src/graph-extractor.d.ts
4
+ interface ExtractedGraph {
5
+ nodes: GraphNode[];
6
+ edges: GraphEdge[];
12
7
  }
13
8
  /**
14
9
  * Extract graph nodes and edges from a single file's content.
@@ -18,5 +13,7 @@ export interface ExtractedGraph {
18
13
  * - "defines" edges from module → symbol
19
14
  * - "imports" edges from module → imported module
20
15
  */
21
- export declare function extractGraph(content: string, sourcePath: string): ExtractedGraph;
16
+ declare function extractGraph(content: string, sourcePath: string): ExtractedGraph;
17
+ //#endregion
18
+ export { ExtractedGraph, extractGraph };
22
19
  //# sourceMappingURL=graph-extractor.d.ts.map
@@ -1 +1,2 @@
1
- import{createHash as y}from"node:crypto";import{dirname as k,extname as j,join as I}from"node:path";const S=new Set([".ts",".tsx",".js",".jsx",".mjs",".cjs"]),$=[{pattern:/^export\s+(?:async\s+)?function\s+(\w+)/gm,kind:"function",exported:!0},{pattern:/^export\s+(?:default\s+)?class\s+(\w+)/gm,kind:"class",exported:!0},{pattern:/^export\s+interface\s+(\w+)/gm,kind:"interface",exported:!0},{pattern:/^export\s+type\s+(\w+)/gm,kind:"type",exported:!0},{pattern:/^export\s+(?:const|let)\s+(\w+)/gm,kind:"const",exported:!0},{pattern:/^export\s+enum\s+(\w+)/gm,kind:"enum",exported:!0},{pattern:/^(?:async\s+)?function\s+(\w+)/gm,kind:"function",exported:!1},{pattern:/^class\s+(\w+)/gm,kind:"class",exported:!1},{pattern:/^interface\s+(\w+)/gm,kind:"interface",exported:!1},{pattern:/^type\s+(\w+)/gm,kind:"type",exported:!1},{pattern:/^enum\s+(\w+)/gm,kind:"enum",exported:!1}],G=[/import\s+(?:(?:type\s+)?(?:(?:\{[^}]*\}|[\w*]+)\s+from\s+)?)['"]([^'"]+)['"]/g,/import\(\s*['"]([^'"]+)['"]\s*\)/g,/require\(\s*['"]([^'"]+)['"]\s*\)/g];function x(t,e,s){return y("sha256").update(`${t}:${e}:${s}`).digest("hex").slice(0,16)}function E(t,e,s){return y("sha256").update(`${t}-${s}-${e}`).digest("hex").slice(0,16)}function R(t,e){const s=k(e);return I(s,t).replace(/\\/g,"/").replace(/\.(js|jsx|ts|tsx|mjs|cjs)$/,"")}function N(t){return t.replace(/\.(js|jsx|ts|tsx|mjs|cjs)$/,"")}function O(t,e){const s=j(e).toLowerCase();if(!S.has(s))return{nodes:[],edges:[]};const c=[],g=[],l=new Date().toISOString(),u=new Set,f=N(e),o=x("module",f,f);c.push({id:o,type:"module",name:e,properties:{ext:s},sourcePath:e,createdAt:l});for(const{pattern:i,kind:p,exported:d}of $){const n=new RegExp(i.source,i.flags);let a;for(;(a=n.exec(t))!==null;){const r=a[1],h=`${p}:${r}`;if(u.has(h))continue;u.add(h);const m=x(p,r,e);c.push({id:m,type:p,name:r,properties:{exported:d},sourcePath:e,createdAt:l}),g.push({id:E(o,m,"defines"),fromId:o,toId:m,type:"defines",weight:d?1:.5})}}const w=new Set;for(const i of G){const p=new RegExp(i.source,i.flags);let d;for(;(d=p.exec(t))!==null;){const n=d[1];if(!n.startsWith(".")||w.has(n))continue;w.add(n);const a=R(n,e),r=x("module",a,a);g.push({id:E(o,r,"imports"),fromId:o,toId:r,type:"imports",properties:{source:n}})}}return{nodes:c,edges:g}}export{O as extractGraph};
1
+ import{createHash as e}from"node:crypto";import{dirname as t,extname as n,join as r}from"node:path";const i=new Set([`.ts`,`.tsx`,`.js`,`.jsx`,`.mjs`,`.cjs`]),a=[{pattern:/^export\s+(?:async\s+)?function\s+(\w+)/gm,kind:`function`,exported:!0},{pattern:/^export\s+(?:default\s+)?class\s+(\w+)/gm,kind:`class`,exported:!0},{pattern:/^export\s+interface\s+(\w+)/gm,kind:`interface`,exported:!0},{pattern:/^export\s+type\s+(\w+)/gm,kind:`type`,exported:!0},{pattern:/^export\s+(?:const|let)\s+(\w+)/gm,kind:`const`,exported:!0},{pattern:/^export\s+enum\s+(\w+)/gm,kind:`enum`,exported:!0},{pattern:/^(?:async\s+)?function\s+(\w+)/gm,kind:`function`,exported:!1},{pattern:/^class\s+(\w+)/gm,kind:`class`,exported:!1},{pattern:/^interface\s+(\w+)/gm,kind:`interface`,exported:!1},{pattern:/^type\s+(\w+)/gm,kind:`type`,exported:!1},{pattern:/^enum\s+(\w+)/gm,kind:`enum`,exported:!1}],o=[/import\s+(?:(?:type\s+)?(?:(?:\{[^}]*\}|[\w*]+)\s+from\s+)?)['"]([^'"]+)['"]/g,/import\(\s*['"]([^'"]+)['"]\s*\)/g,/require\(\s*['"]([^'"]+)['"]\s*\)/g];function s(t,n,r){return e(`sha256`).update(`${t}:${n}:${r}`).digest(`hex`).slice(0,16)}function c(t,n,r){return e(`sha256`).update(`${t}-${r}-${n}`).digest(`hex`).slice(0,16)}function l(e,n){return r(t(n),e).replace(/\\/g,`/`).replace(/\.(js|jsx|ts|tsx|mjs|cjs)$/,``)}function u(e){return e.replace(/\.(js|jsx|ts|tsx|mjs|cjs)$/,``)}function d(e,t){let r=n(t).toLowerCase();if(!i.has(r))return{nodes:[],edges:[]};let d=[],f=[],p=new Date().toISOString(),m=new Set,h=u(t),g=s(`module`,h,h);d.push({id:g,type:`module`,name:t,properties:{ext:r},sourcePath:t,createdAt:p});for(let{pattern:n,kind:r,exported:i}of a){let a=new RegExp(n.source,n.flags),o;for(;(o=a.exec(e))!==null;){let e=o[1],n=`${r}:${e}`;if(m.has(n))continue;m.add(n);let a=s(r,e,t);d.push({id:a,type:r,name:e,properties:{exported:i},sourcePath:t,createdAt:p}),f.push({id:c(g,a,`defines`),fromId:g,toId:a,type:`defines`,weight:i?1:.5})}}let _=new Set;for(let n of o){let r=new RegExp(n.source,n.flags),i;for(;(i=r.exec(e))!==null;){let e=i[1];if(!e.startsWith(`.`)||_.has(e))continue;_.add(e);let n=l(e,t),r=s(`module`,n,n);f.push({id:c(g,r,`imports`),fromId:g,toId:r,type:`imports`,properties:{source:e}})}}return{nodes:d,edges:f}}export{d as extractGraph};
2
+ //# sourceMappingURL=graph-extractor.js.map
@@ -1,47 +1,53 @@
1
- import type { IndexStats, KBConfig } from '@kb/core';
2
- import type { IEmbedder } from '@kb/embeddings';
3
- import type { IGraphStore, IKnowledgeStore } from '@kb/store';
4
- export interface IndexProgress {
5
- phase: 'crawling' | 'chunking' | 'embedding' | 'storing' | 'cleanup' | 'done';
6
- filesTotal: number;
7
- filesProcessed: number;
8
- chunksTotal: number;
9
- chunksProcessed: number;
10
- /** The file currently being processed */
11
- currentFile?: string;
1
+ import { IGraphStore, IKnowledgeStore } from "@kb/store";
2
+ import { IndexStats, KBConfig } from "@kb/core";
3
+ import { IEmbedder } from "@kb/embeddings";
4
+
5
+ //#region packages/indexer/src/incremental-indexer.d.ts
6
+ interface IndexProgress {
7
+ phase: 'crawling' | 'chunking' | 'embedding' | 'storing' | 'cleanup' | 'done';
8
+ filesTotal: number;
9
+ filesProcessed: number;
10
+ chunksTotal: number;
11
+ chunksProcessed: number;
12
+ /** The file currently being processed */
13
+ currentFile?: string;
12
14
  }
13
- export type ProgressCallback = (progress: IndexProgress) => void;
14
- export interface IndexResult {
15
- filesProcessed: number;
16
- filesSkipped: number;
17
- chunksCreated: number;
18
- filesRemoved: number;
19
- durationMs: number;
15
+ type ProgressCallback = (progress: IndexProgress) => void;
16
+ interface IndexResult {
17
+ filesProcessed: number;
18
+ filesSkipped: number;
19
+ chunksCreated: number;
20
+ filesRemoved: number;
21
+ durationMs: number;
20
22
  }
21
- export declare class IncrementalIndexer {
22
- private readonly embedder;
23
- private readonly store;
24
- private readonly crawler;
25
- private indexing;
26
- private graphStore?;
27
- constructor(embedder: IEmbedder, store: IKnowledgeStore);
28
- /** Set the graph store for auto-population during indexing and cleanup on re-index. */
29
- setGraphStore(graphStore: IGraphStore): void;
30
- /**
31
- * Index all configured sources. Only re-indexes files that have changed.
32
- * Sources are crawled in parallel, and file processing runs concurrently
33
- * up to `config.indexing.concurrency` (default: half of CPU cores).
34
- */
35
- index(config: KBConfig, onProgress?: ProgressCallback): Promise<IndexResult>;
36
- private doIndex;
37
- /**
38
- * Force re-index all files (ignoring hashes).
39
- */
40
- reindexAll(config: KBConfig, onProgress?: ProgressCallback): Promise<IndexResult>;
41
- private doReindex;
42
- /**
43
- * Get current index statistics.
44
- */
45
- getStats(): Promise<IndexStats>;
23
+ declare class IncrementalIndexer {
24
+ private readonly embedder;
25
+ private readonly store;
26
+ private readonly crawler;
27
+ private indexing;
28
+ private graphStore?;
29
+ /** Whether an index operation is currently in progress. */
30
+ get isIndexing(): boolean;
31
+ constructor(embedder: IEmbedder, store: IKnowledgeStore);
32
+ /** Set the graph store for auto-population during indexing and cleanup on re-index. */
33
+ setGraphStore(graphStore: IGraphStore): void;
34
+ /**
35
+ * Index all configured sources. Only re-indexes files that have changed.
36
+ * Sources are crawled in parallel, and file processing runs concurrently
37
+ * up to `config.indexing.concurrency` (default: half of CPU cores).
38
+ */
39
+ index(config: KBConfig, onProgress?: ProgressCallback): Promise<IndexResult>;
40
+ private doIndex;
41
+ /**
42
+ * Force re-index all files (ignoring hashes).
43
+ */
44
+ reindexAll(config: KBConfig, onProgress?: ProgressCallback): Promise<IndexResult>;
45
+ private doReindex;
46
+ /**
47
+ * Get current index statistics.
48
+ */
49
+ getStats(): Promise<IndexStats>;
46
50
  }
51
+ //#endregion
52
+ export { IncrementalIndexer, IndexProgress, IndexResult, ProgressCallback };
47
53
  //# sourceMappingURL=incremental-indexer.d.ts.map
@@ -1 +1,2 @@
1
- import{availableParallelism as G}from"node:os";import{createChunker as F}from"../../chunker/dist/index.js";import{detectContentType as B}from"../../core/dist/index.js";import{generateRecordId as K,hashContent as I}from"./file-hasher.js";import{FilesystemCrawler as H}from"./filesystem-crawler.js";import{extractGraph as A}from"./graph-extractor.js";async function S(c,s,r,o){let g=0;async function l(){for(;g<c.length;){const d=g++;try{await s(c[d])}catch(a){o?.(c[d],a)}}}await Promise.all(Array.from({length:Math.min(r,c.length)},()=>l()))}const L=Math.max(1,Math.floor(G()/2));class O{constructor(s,r){this.embedder=s;this.store=r;this.crawler=new H}crawler;indexing=!1;graphStore;setGraphStore(s){this.graphStore=s}async index(s,r){if(this.indexing)throw new Error("Indexing is already in progress");this.indexing=!0;try{return await this.doIndex(s,r,{})}finally{this.indexing=!1}}async doIndex(s,r,o={}){const g=Date.now();let l=0,d=0,a=0,C=0;const w=s.indexing.concurrency??L;r?.({phase:"crawling",filesTotal:0,filesProcessed:0,chunksTotal:0,chunksProcessed:0});const y=(await Promise.all(s.sources.map(e=>this.crawler.crawl({rootDir:e.path,excludePatterns:e.excludePatterns})))).flat();let p,f;if(o.skipHashCheck)p=y,f=[];else{const e=await this.store.listSourcePaths(),h=new Set(y.map(n=>n.relativePath));f=e.filter(n=>!h.has(n)&&!n.startsWith("curated/")),p=[],await S(y,async n=>{const i=I(n.content),P=await this.store.getBySourcePath(n.relativePath);if(P.length>0&&P[0].fileHash===i){d++;return}p.push(n)},w,(n,i)=>console.error(`[indexer] hash check failed for ${n.relativePath}:`,i))}const u=p.length,b=50;let m=[],x=[],k=0;const v=async()=>{if(this.graphStore){try{m.length>0&&await this.graphStore.upsertNodes(m),x.length>0&&await this.graphStore.upsertEdges(x)}catch(e){console.error("[indexer] graph batch flush failed:",e)}m=[],x=[],k=0}};return await S(p,async e=>{r?.({phase:"chunking",filesTotal:u,filesProcessed:l,chunksTotal:a,chunksProcessed:a,currentFile:e.relativePath});const h=B(e.relativePath),i=F(e.extension).chunk(e.content,{sourcePath:e.relativePath,contentType:h});if(i.length===0)return;r?.({phase:"embedding",filesTotal:u,filesProcessed:l,chunksTotal:a+i.length,chunksProcessed:a,currentFile:e.relativePath});const P=await this.embedder.embedBatch(i.map(t=>t.text)),T=I(e.content),R=i.map((t,E)=>({id:K(e.relativePath,E),content:t.text,sourcePath:t.sourcePath,contentType:t.contentType,headingPath:t.headingPath,chunkIndex:t.chunkIndex,totalChunks:t.totalChunks,startLine:t.startLine,endLine:t.endLine,fileHash:T,indexedAt:new Date().toISOString(),origin:"indexed",tags:[],version:1}));if(r?.({phase:"storing",filesTotal:u,filesProcessed:l,chunksTotal:a+i.length,chunksProcessed:a,currentFile:e.relativePath}),await this.store.upsert(R,P),this.graphStore)try{o.graphCleared||await this.graphStore.deleteBySourcePath(e.relativePath);const t=A(e.content,e.relativePath);t.nodes.length>0&&m.push(...t.nodes),t.edges.length>0&&x.push(...t.edges),k++,k>=b&&await v()}catch(t){console.error(`[indexer] graph extraction failed for ${e.relativePath}:`,t)}l++,a+=i.length},w,(e,h)=>console.error(`[indexer] processing failed for ${e.relativePath}:`,h)),await v(),f.length>0&&(r?.({phase:"cleanup",filesTotal:u,filesProcessed:l,chunksTotal:a,chunksProcessed:a}),await S(f,async e=>{await this.store.deleteBySourcePath(e),this.graphStore&&await this.graphStore.deleteBySourcePath(e).catch(h=>console.error(`[indexer] graph cleanup failed for ${e}:`,h)),C++},w,(e,h)=>console.error(`[indexer] cleanup failed for ${e}:`,h))),r?.({phase:"done",filesTotal:u,filesProcessed:l,chunksTotal:a,chunksProcessed:a}),{filesProcessed:l,filesSkipped:d,chunksCreated:a,filesRemoved:C,durationMs:Date.now()-g}}async reindexAll(s,r){if(await this.store.dropTable(),this.graphStore)try{const o=await this.graphStore.getStats();o.nodeCount>0&&(await this.graphStore.clear(),console.error(`[indexer] Graph store cleared (was ${o.nodeCount} nodes, ${o.edgeCount} edges)`))}catch(o){console.error("[indexer] Graph store clear failed:",o)}return this.doReindex(s,r)}async doReindex(s,r){if(this.indexing)throw new Error("Indexing is already in progress");this.indexing=!0;try{return await this.doIndex(s,r,{skipHashCheck:!0,graphCleared:!0})}finally{this.indexing=!1}}async getStats(){return this.store.getStats()}}export{O as IncrementalIndexer};
1
+ import{generateRecordId as e,hashContent as t}from"./file-hasher.js";import{FilesystemCrawler as n}from"./filesystem-crawler.js";import{extractGraph as r}from"./graph-extractor.js";import{createLogger as i,detectContentType as a,serializeError as o}from"../../core/dist/index.js";import{availableParallelism as s}from"node:os";import{createChunker as c}from"../../chunker/dist/index.js";const l=i(`indexer`);async function u(e,t,n,r){let i=0;async function a(){for(;i<e.length;){let n=i++;try{await t(e[n])}catch(t){r?.(e[n],t)}}}await Promise.all(Array.from({length:Math.min(n,e.length)},()=>a()))}const d=Math.max(1,Math.floor(s()/2));var f=class{crawler;indexing=!1;graphStore;get isIndexing(){return this.indexing}constructor(e,t){this.embedder=e,this.store=t,this.crawler=new n}setGraphStore(e){this.graphStore=e}async index(e,t){if(this.indexing)throw Error(`Indexing is already in progress`);this.indexing=!0;try{return await this.doIndex(e,t,{})}finally{this.indexing=!1}}async doIndex(n,i,s={}){let f=Date.now(),p=0,m=0,h=0,g=0,_=n.indexing.concurrency??d;i?.({phase:`crawling`,filesTotal:0,filesProcessed:0,chunksTotal:0,chunksProcessed:0});let v=(await Promise.all(n.sources.map(e=>this.crawler.crawl({rootDir:e.path,excludePatterns:e.excludePatterns})))).flat(),y,b;if(s.skipHashCheck)y=v,b=[];else{let e=await this.store.listSourcePaths(),n=new Set(v.map(e=>e.relativePath));b=e.filter(e=>!n.has(e)&&!e.startsWith(`curated/`)),y=[],await u(v,async e=>{let n=t(e.content),r=await this.store.getBySourcePath(e.relativePath);if(r.length>0&&r[0].fileHash===n){m++;return}y.push(e)},_,(e,t)=>l.error(`Hash check failed`,{sourcePath:e.relativePath,...o(t)}))}let x=y.length,S=[],C=[],w=0,T=async()=>{if(this.graphStore){try{S.length>0&&await this.graphStore.upsertNodes(S),C.length>0&&await this.graphStore.upsertEdges(C)}catch(e){l.warn(`Graph batch flush failed`,o(e))}S=[],C=[],w=0}};return await u(y,async n=>{i?.({phase:`chunking`,filesTotal:x,filesProcessed:p,chunksTotal:h,chunksProcessed:h,currentFile:n.relativePath});let u=a(n.relativePath),d=c(n.extension).chunk(n.content,{sourcePath:n.relativePath,contentType:u});if(d.length===0)return;i?.({phase:`embedding`,filesTotal:x,filesProcessed:p,chunksTotal:h+d.length,chunksProcessed:h,currentFile:n.relativePath});let f=await this.embedder.embedBatch(d.map(e=>e.text)),m=t(n.content),g=d.map((t,r)=>({id:e(n.relativePath,r),content:t.text,sourcePath:t.sourcePath,contentType:t.contentType,headingPath:t.headingPath,chunkIndex:t.chunkIndex,totalChunks:t.totalChunks,startLine:t.startLine,endLine:t.endLine,fileHash:m,indexedAt:new Date().toISOString(),origin:`indexed`,tags:[],version:1}));if(i?.({phase:`storing`,filesTotal:x,filesProcessed:p,chunksTotal:h+d.length,chunksProcessed:h,currentFile:n.relativePath}),await this.store.upsert(g,f),this.graphStore)try{s.graphCleared||await this.graphStore.deleteBySourcePath(n.relativePath);let e=r(n.content,n.relativePath);e.nodes.length>0&&S.push(...e.nodes),e.edges.length>0&&C.push(...e.edges),w++,w>=50&&await T()}catch(e){l.warn(`Graph extraction failed`,{sourcePath:n.relativePath,...o(e)})}p++,h+=d.length},_,(e,t)=>l.error(`Processing failed`,{sourcePath:e.relativePath,...o(t)})),await T(),b.length>0&&(i?.({phase:`cleanup`,filesTotal:x,filesProcessed:p,chunksTotal:h,chunksProcessed:h}),await u(b,async e=>{await this.store.deleteBySourcePath(e),this.graphStore&&await this.graphStore.deleteBySourcePath(e).catch(t=>l.warn(`Graph cleanup failed`,{sourcePath:e,...o(t)})),g++},_,(e,t)=>l.error(`Cleanup failed`,{sourcePath:e,...o(t)}))),i?.({phase:`done`,filesTotal:x,filesProcessed:p,chunksTotal:h,chunksProcessed:h}),{filesProcessed:p,filesSkipped:m,chunksCreated:h,filesRemoved:g,durationMs:Date.now()-f}}async reindexAll(e,t){if(this.indexing)throw Error(`Indexing is already in progress`);this.indexing=!0;try{if(await this.store.dropTable(),this.graphStore)try{let e=await this.graphStore.getStats();e.nodeCount>0&&(await this.graphStore.clear(),l.info(`Graph store cleared`,{nodeCount:e.nodeCount,edgeCount:e.edgeCount}))}catch(e){l.warn(`Graph store clear failed`,o(e))}return await this.doReindex(e,t)}catch(e){throw this.indexing=!1,e}}async doReindex(e,t){try{return await this.doIndex(e,t,{skipHashCheck:!0,graphCleared:!0})}finally{this.indexing=!1}}async getStats(){return this.store.getStats()}};export{f as IncrementalIndexer};
2
+ //# sourceMappingURL=incremental-indexer.js.map