@vpxa/kb 0.1.15 → 0.1.17

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (454) hide show
  1. package/README.md +62 -15
  2. package/package.json +5 -1
  3. package/packages/analyzers/dist/blast-radius-analyzer.d.ts +1 -2
  4. package/packages/analyzers/dist/blast-radius-analyzer.js +1 -2
  5. package/packages/analyzers/dist/dependency-analyzer.d.ts +2 -3
  6. package/packages/analyzers/dist/dependency-analyzer.js +3 -4
  7. package/packages/analyzers/dist/diagram-generator.d.ts +1 -2
  8. package/packages/analyzers/dist/diagram-generator.js +1 -2
  9. package/packages/analyzers/dist/entry-point-analyzer.d.ts +1 -2
  10. package/packages/analyzers/dist/entry-point-analyzer.js +3 -4
  11. package/packages/analyzers/dist/knowledge-producer.d.ts +6 -2
  12. package/packages/analyzers/dist/knowledge-producer.js +4 -3
  13. package/packages/analyzers/dist/pattern-analyzer.d.ts +1 -2
  14. package/packages/analyzers/dist/pattern-analyzer.js +2 -3
  15. package/packages/analyzers/dist/regex-call-graph.d.ts +1 -2
  16. package/packages/analyzers/dist/regex-call-graph.js +1 -2
  17. package/packages/analyzers/dist/structure-analyzer.d.ts +1 -2
  18. package/packages/analyzers/dist/structure-analyzer.js +1 -2
  19. package/packages/analyzers/dist/symbol-analyzer.d.ts +3 -3
  20. package/packages/analyzers/dist/symbol-analyzer.js +8 -7
  21. package/packages/analyzers/dist/ts-call-graph.d.ts +1 -2
  22. package/packages/analyzers/dist/ts-call-graph.js +1 -2
  23. package/packages/analyzers/dist/types.d.ts +1 -2
  24. package/packages/chunker/dist/call-graph-extractor.d.ts +3 -3
  25. package/packages/chunker/dist/call-graph-extractor.js +1 -2
  26. package/packages/chunker/dist/chunker-factory.d.ts +12 -4
  27. package/packages/chunker/dist/chunker-factory.js +1 -2
  28. package/packages/chunker/dist/chunker.interface.d.ts +1 -2
  29. package/packages/chunker/dist/code-chunker.d.ts +1 -2
  30. package/packages/chunker/dist/code-chunker.js +1 -2
  31. package/packages/chunker/dist/extractors/call-extractor.d.ts +24 -0
  32. package/packages/chunker/dist/extractors/call-extractor.js +1 -0
  33. package/packages/chunker/dist/extractors/entry-point-detector.d.ts +14 -0
  34. package/packages/chunker/dist/extractors/entry-point-detector.js +1 -0
  35. package/packages/chunker/dist/extractors/import-extractor.d.ts +14 -0
  36. package/packages/chunker/dist/extractors/import-extractor.js +1 -0
  37. package/packages/chunker/dist/extractors/pattern-detector.d.ts +14 -0
  38. package/packages/chunker/dist/extractors/pattern-detector.js +1 -0
  39. package/packages/chunker/dist/extractors/scope-resolver.d.ts +26 -0
  40. package/packages/chunker/dist/extractors/scope-resolver.js +1 -0
  41. package/packages/chunker/dist/extractors/symbol-extractor.d.ts +14 -0
  42. package/packages/chunker/dist/extractors/symbol-extractor.js +1 -0
  43. package/packages/chunker/dist/extractors/types.d.ts +36 -0
  44. package/packages/chunker/dist/extractors/types.js +1 -0
  45. package/packages/chunker/dist/generic-chunker.d.ts +1 -2
  46. package/packages/chunker/dist/generic-chunker.js +1 -2
  47. package/packages/chunker/dist/index.d.ts +15 -4
  48. package/packages/chunker/dist/index.js +1 -1
  49. package/packages/chunker/dist/markdown-chunker.d.ts +1 -2
  50. package/packages/chunker/dist/markdown-chunker.js +1 -2
  51. package/packages/chunker/dist/wasm/languages.d.ts +18 -0
  52. package/packages/chunker/dist/wasm/languages.js +1 -0
  53. package/packages/chunker/dist/wasm/query-executor.d.ts +70 -0
  54. package/packages/chunker/dist/wasm/query-executor.js +1 -0
  55. package/packages/chunker/dist/wasm/runtime.d.ts +44 -0
  56. package/packages/chunker/dist/wasm/runtime.js +1 -0
  57. package/packages/chunker/dist/wasm/types.d.ts +84 -0
  58. package/packages/chunker/dist/wasm/types.js +1 -0
  59. package/packages/chunker/dist/wasm-chunker.d.ts +23 -0
  60. package/packages/chunker/dist/wasm-chunker.js +6 -0
  61. package/packages/chunker/src/queries/go/calls.scm +11 -0
  62. package/packages/chunker/src/queries/go/entry-points.scm +20 -0
  63. package/packages/chunker/src/queries/go/imports.scm +6 -0
  64. package/packages/chunker/src/queries/go/patterns.scm +25 -0
  65. package/packages/chunker/src/queries/go/symbols.scm +26 -0
  66. package/packages/chunker/src/queries/java/calls.scm +10 -0
  67. package/packages/chunker/src/queries/java/entry-points.scm +27 -0
  68. package/packages/chunker/src/queries/java/imports.scm +11 -0
  69. package/packages/chunker/src/queries/java/patterns.scm +27 -0
  70. package/packages/chunker/src/queries/java/symbols.scm +28 -0
  71. package/packages/chunker/src/queries/javascript/calls.scm +21 -0
  72. package/packages/chunker/src/queries/javascript/entry-points.scm +31 -0
  73. package/packages/chunker/src/queries/javascript/imports.scm +32 -0
  74. package/packages/chunker/src/queries/javascript/patterns.scm +28 -0
  75. package/packages/chunker/src/queries/javascript/symbols.scm +52 -0
  76. package/packages/chunker/src/queries/python/calls.scm +11 -0
  77. package/packages/chunker/src/queries/python/entry-points.scm +21 -0
  78. package/packages/chunker/src/queries/python/imports.scm +14 -0
  79. package/packages/chunker/src/queries/python/patterns.scm +25 -0
  80. package/packages/chunker/src/queries/python/symbols.scm +17 -0
  81. package/packages/chunker/src/queries/rust/calls.scm +20 -0
  82. package/packages/chunker/src/queries/rust/entry-points.scm +7 -0
  83. package/packages/chunker/src/queries/rust/imports.scm +26 -0
  84. package/packages/chunker/src/queries/rust/patterns.scm +18 -0
  85. package/packages/chunker/src/queries/rust/symbols.scm +73 -0
  86. package/packages/chunker/src/queries/typescript/calls.scm +21 -0
  87. package/packages/chunker/src/queries/typescript/entry-points.scm +48 -0
  88. package/packages/chunker/src/queries/typescript/imports.scm +35 -0
  89. package/packages/chunker/src/queries/typescript/patterns.scm +47 -0
  90. package/packages/chunker/src/queries/typescript/symbols.scm +79 -0
  91. package/packages/chunker/wasm/tree-sitter-go.wasm +0 -0
  92. package/packages/chunker/wasm/tree-sitter-java.wasm +0 -0
  93. package/packages/chunker/wasm/tree-sitter-javascript.wasm +0 -0
  94. package/packages/chunker/wasm/tree-sitter-python.wasm +0 -0
  95. package/packages/chunker/wasm/tree-sitter-rust.wasm +0 -0
  96. package/packages/chunker/wasm/tree-sitter-typescript.wasm +0 -0
  97. package/packages/chunker/wasm/tree-sitter.wasm +0 -0
  98. package/packages/cli/dist/commands/analyze.d.ts +1 -2
  99. package/packages/cli/dist/commands/analyze.js +1 -2
  100. package/packages/cli/dist/commands/context-cmds.d.ts +1 -2
  101. package/packages/cli/dist/commands/context-cmds.js +1 -2
  102. package/packages/cli/dist/commands/environment.d.ts +1 -2
  103. package/packages/cli/dist/commands/environment.js +1 -2
  104. package/packages/cli/dist/commands/execution.d.ts +1 -2
  105. package/packages/cli/dist/commands/execution.js +1 -2
  106. package/packages/cli/dist/commands/graph.d.ts +1 -2
  107. package/packages/cli/dist/commands/graph.js +1 -2
  108. package/packages/cli/dist/commands/init/adapters.d.ts +4 -3
  109. package/packages/cli/dist/commands/init/adapters.js +1 -2
  110. package/packages/cli/dist/commands/init/config.d.ts +1 -2
  111. package/packages/cli/dist/commands/init/config.js +3 -4
  112. package/packages/cli/dist/commands/init/constants.d.ts +18 -0
  113. package/packages/cli/dist/commands/init/constants.js +1 -0
  114. package/packages/cli/dist/commands/init/curated.d.ts +1 -2
  115. package/packages/cli/dist/commands/init/curated.js +1 -2
  116. package/packages/cli/dist/commands/init/global.d.ts +34 -0
  117. package/packages/cli/dist/commands/init/global.js +5 -0
  118. package/packages/cli/dist/commands/init/index.d.ts +7 -2
  119. package/packages/cli/dist/commands/init/index.js +5 -3
  120. package/packages/cli/dist/commands/init/scaffold.d.ts +1 -2
  121. package/packages/cli/dist/commands/init/scaffold.js +1 -2
  122. package/packages/cli/dist/commands/init/templates.d.ts +3 -4
  123. package/packages/cli/dist/commands/init/templates.js +106 -243
  124. package/packages/cli/dist/commands/knowledge.d.ts +1 -2
  125. package/packages/cli/dist/commands/knowledge.js +1 -2
  126. package/packages/cli/dist/commands/search.d.ts +1 -2
  127. package/packages/cli/dist/commands/search.js +1 -2
  128. package/packages/cli/dist/commands/system.d.ts +1 -2
  129. package/packages/cli/dist/commands/system.js +3 -4
  130. package/packages/cli/dist/commands/workspace.d.ts +1 -2
  131. package/packages/cli/dist/commands/workspace.js +1 -2
  132. package/packages/cli/dist/context.d.ts +1 -2
  133. package/packages/cli/dist/context.js +1 -2
  134. package/packages/cli/dist/helpers.d.ts +1 -2
  135. package/packages/cli/dist/helpers.js +1 -2
  136. package/packages/cli/dist/index.d.ts +1 -2
  137. package/packages/cli/dist/index.js +1 -2
  138. package/packages/cli/dist/kb-init.d.ts +1 -2
  139. package/packages/cli/dist/kb-init.js +1 -2
  140. package/packages/cli/dist/types.d.ts +1 -2
  141. package/packages/core/dist/constants.d.ts +23 -2
  142. package/packages/core/dist/constants.js +1 -2
  143. package/packages/core/dist/content-detector.d.ts +1 -2
  144. package/packages/core/dist/content-detector.js +1 -2
  145. package/packages/core/dist/errors.d.ts +1 -2
  146. package/packages/core/dist/errors.js +1 -2
  147. package/packages/core/dist/global-registry.d.ts +63 -0
  148. package/packages/core/dist/global-registry.js +1 -0
  149. package/packages/core/dist/index.d.ts +4 -3
  150. package/packages/core/dist/index.js +1 -1
  151. package/packages/core/dist/logger.d.ts +4 -2
  152. package/packages/core/dist/logger.js +1 -2
  153. package/packages/core/dist/types.d.ts +1 -2
  154. package/packages/core/dist/types.js +1 -2
  155. package/packages/embeddings/dist/embedder.interface.d.ts +2 -3
  156. package/packages/embeddings/dist/onnx-embedder.d.ts +2 -3
  157. package/packages/embeddings/dist/onnx-embedder.js +1 -2
  158. package/packages/enterprise-bridge/dist/cache.d.ts +1 -2
  159. package/packages/enterprise-bridge/dist/cache.js +1 -2
  160. package/packages/enterprise-bridge/dist/er-client.d.ts +1 -2
  161. package/packages/enterprise-bridge/dist/er-client.js +1 -2
  162. package/packages/enterprise-bridge/dist/evolution-collector.d.ts +1 -2
  163. package/packages/enterprise-bridge/dist/evolution-collector.js +1 -2
  164. package/packages/enterprise-bridge/dist/policy-store.d.ts +1 -2
  165. package/packages/enterprise-bridge/dist/policy-store.js +1 -2
  166. package/packages/enterprise-bridge/dist/push-adapter.d.ts +1 -2
  167. package/packages/enterprise-bridge/dist/push-adapter.js +1 -2
  168. package/packages/enterprise-bridge/dist/result-merger.d.ts +1 -2
  169. package/packages/enterprise-bridge/dist/result-merger.js +1 -2
  170. package/packages/enterprise-bridge/dist/types.d.ts +1 -2
  171. package/packages/enterprise-bridge/dist/types.js +1 -2
  172. package/packages/indexer/dist/file-hasher.d.ts +1 -2
  173. package/packages/indexer/dist/file-hasher.js +1 -2
  174. package/packages/indexer/dist/filesystem-crawler.d.ts +1 -2
  175. package/packages/indexer/dist/filesystem-crawler.js +1 -2
  176. package/packages/indexer/dist/graph-extractor.d.ts +1 -2
  177. package/packages/indexer/dist/graph-extractor.js +1 -2
  178. package/packages/indexer/dist/hash-cache.d.ts +24 -0
  179. package/packages/indexer/dist/hash-cache.js +1 -0
  180. package/packages/indexer/dist/incremental-indexer.d.ts +6 -3
  181. package/packages/indexer/dist/incremental-indexer.js +1 -2
  182. package/packages/indexer/dist/index.d.ts +2 -1
  183. package/packages/indexer/dist/index.js +1 -1
  184. package/packages/server/dist/config.d.ts +1 -2
  185. package/packages/server/dist/config.js +1 -2
  186. package/packages/server/dist/cross-workspace.d.ts +43 -0
  187. package/packages/server/dist/cross-workspace.js +1 -0
  188. package/packages/server/dist/curated-manager.d.ts +2 -3
  189. package/packages/server/dist/curated-manager.js +4 -5
  190. package/packages/server/dist/index.js +1 -2
  191. package/packages/server/dist/replay-interceptor.d.ts +1 -2
  192. package/packages/server/dist/replay-interceptor.js +1 -2
  193. package/packages/server/dist/resources/resources.d.ts +1 -2
  194. package/packages/server/dist/resources/resources.js +1 -2
  195. package/packages/server/dist/server.d.ts +4 -2
  196. package/packages/server/dist/server.js +1 -2
  197. package/packages/server/dist/tools/analyze.tools.d.ts +3 -4
  198. package/packages/server/dist/tools/analyze.tools.js +2 -2
  199. package/packages/server/dist/tools/audit.tool.d.ts +1 -2
  200. package/packages/server/dist/tools/audit.tool.js +1 -2
  201. package/packages/server/dist/tools/bridge.tools.d.ts +1 -2
  202. package/packages/server/dist/tools/bridge.tools.js +1 -2
  203. package/packages/server/dist/tools/evolution.tools.d.ts +1 -2
  204. package/packages/server/dist/tools/evolution.tools.js +1 -2
  205. package/packages/server/dist/tools/forge.tools.d.ts +1 -2
  206. package/packages/server/dist/tools/forge.tools.js +5 -6
  207. package/packages/server/dist/tools/forget.tool.d.ts +1 -2
  208. package/packages/server/dist/tools/forget.tool.js +1 -2
  209. package/packages/server/dist/tools/graph.tool.d.ts +1 -2
  210. package/packages/server/dist/tools/graph.tool.js +1 -2
  211. package/packages/server/dist/tools/list.tool.d.ts +1 -2
  212. package/packages/server/dist/tools/list.tool.js +1 -2
  213. package/packages/server/dist/tools/lookup.tool.d.ts +1 -2
  214. package/packages/server/dist/tools/lookup.tool.js +1 -2
  215. package/packages/server/dist/tools/onboard.tool.d.ts +1 -2
  216. package/packages/server/dist/tools/onboard.tool.js +1 -2
  217. package/packages/server/dist/tools/policy.tools.d.ts +1 -2
  218. package/packages/server/dist/tools/policy.tools.js +1 -2
  219. package/packages/server/dist/tools/produce.tool.d.ts +1 -2
  220. package/packages/server/dist/tools/produce.tool.js +1 -2
  221. package/packages/server/dist/tools/read.tool.d.ts +1 -2
  222. package/packages/server/dist/tools/read.tool.js +2 -3
  223. package/packages/server/dist/tools/reindex.tool.d.ts +1 -2
  224. package/packages/server/dist/tools/reindex.tool.js +3 -3
  225. package/packages/server/dist/tools/remember.tool.d.ts +1 -2
  226. package/packages/server/dist/tools/remember.tool.js +1 -2
  227. package/packages/server/dist/tools/replay.tool.d.ts +1 -2
  228. package/packages/server/dist/tools/replay.tool.js +1 -2
  229. package/packages/server/dist/tools/search.tool.d.ts +1 -2
  230. package/packages/server/dist/tools/search.tool.js +4 -5
  231. package/packages/server/dist/tools/status.tool.d.ts +7 -3
  232. package/packages/server/dist/tools/status.tool.js +2 -3
  233. package/packages/server/dist/tools/toolkit.tools.d.ts +3 -4
  234. package/packages/server/dist/tools/toolkit.tools.js +19 -19
  235. package/packages/server/dist/tools/update.tool.d.ts +1 -2
  236. package/packages/server/dist/tools/update.tool.js +1 -2
  237. package/packages/server/dist/tools/utility.tools.d.ts +1 -2
  238. package/packages/server/dist/tools/utility.tools.js +2 -3
  239. package/packages/server/dist/version-check.d.ts +1 -2
  240. package/packages/server/dist/version-check.js +1 -2
  241. package/packages/store/dist/graph-store.interface.d.ts +1 -2
  242. package/packages/store/dist/lance-store.d.ts +1 -2
  243. package/packages/store/dist/lance-store.js +1 -2
  244. package/packages/store/dist/sqlite-graph-store.d.ts +1 -2
  245. package/packages/store/dist/sqlite-graph-store.js +6 -7
  246. package/packages/store/dist/store-factory.d.ts +1 -2
  247. package/packages/store/dist/store-factory.js +1 -2
  248. package/packages/store/dist/store.interface.d.ts +1 -2
  249. package/packages/tools/dist/audit.d.ts +1 -2
  250. package/packages/tools/dist/audit.js +1 -2
  251. package/packages/tools/dist/batch.d.ts +1 -2
  252. package/packages/tools/dist/batch.js +1 -2
  253. package/packages/tools/dist/changelog.d.ts +1 -2
  254. package/packages/tools/dist/changelog.js +2 -3
  255. package/packages/tools/dist/check.d.ts +1 -2
  256. package/packages/tools/dist/check.js +2 -3
  257. package/packages/tools/dist/checkpoint.d.ts +1 -2
  258. package/packages/tools/dist/checkpoint.js +1 -2
  259. package/packages/tools/dist/codemod.d.ts +1 -2
  260. package/packages/tools/dist/codemod.js +1 -2
  261. package/packages/tools/dist/compact.d.ts +1 -2
  262. package/packages/tools/dist/compact.js +1 -2
  263. package/packages/tools/dist/data-transform.d.ts +1 -2
  264. package/packages/tools/dist/data-transform.js +1 -2
  265. package/packages/tools/dist/dead-symbols.d.ts +1 -2
  266. package/packages/tools/dist/dead-symbols.js +2 -3
  267. package/packages/tools/dist/delegate.d.ts +1 -2
  268. package/packages/tools/dist/delegate.js +1 -2
  269. package/packages/tools/dist/diff-parse.d.ts +1 -2
  270. package/packages/tools/dist/diff-parse.js +1 -2
  271. package/packages/tools/dist/digest.d.ts +1 -2
  272. package/packages/tools/dist/digest.js +1 -2
  273. package/packages/tools/dist/dogfood-log.d.ts +49 -0
  274. package/packages/tools/dist/dogfood-log.js +2 -0
  275. package/packages/tools/dist/encode.d.ts +1 -2
  276. package/packages/tools/dist/encode.js +1 -2
  277. package/packages/tools/dist/env-info.d.ts +1 -2
  278. package/packages/tools/dist/env-info.js +1 -2
  279. package/packages/tools/dist/eval.d.ts +1 -2
  280. package/packages/tools/dist/eval.js +1 -2
  281. package/packages/tools/dist/evidence-map.d.ts +1 -2
  282. package/packages/tools/dist/evidence-map.js +2 -3
  283. package/packages/tools/dist/file-cache.d.ts +1 -2
  284. package/packages/tools/dist/file-cache.js +1 -2
  285. package/packages/tools/dist/file-summary.d.ts +17 -2
  286. package/packages/tools/dist/file-summary.js +2 -3
  287. package/packages/tools/dist/file-walk.d.ts +1 -2
  288. package/packages/tools/dist/file-walk.js +1 -2
  289. package/packages/tools/dist/find-examples.d.ts +1 -2
  290. package/packages/tools/dist/find-examples.js +1 -2
  291. package/packages/tools/dist/find.d.ts +1 -2
  292. package/packages/tools/dist/find.js +1 -2
  293. package/packages/tools/dist/forge-classify.d.ts +1 -2
  294. package/packages/tools/dist/forge-classify.js +2 -3
  295. package/packages/tools/dist/forge-ground.d.ts +1 -2
  296. package/packages/tools/dist/forge-ground.js +1 -2
  297. package/packages/tools/dist/git-context.d.ts +1 -2
  298. package/packages/tools/dist/git-context.js +1 -2
  299. package/packages/tools/dist/graph-query.d.ts +1 -2
  300. package/packages/tools/dist/graph-query.js +1 -2
  301. package/packages/tools/dist/guide.d.ts +1 -2
  302. package/packages/tools/dist/guide.js +1 -2
  303. package/packages/tools/dist/health.d.ts +1 -2
  304. package/packages/tools/dist/health.js +1 -2
  305. package/packages/tools/dist/http-request.d.ts +1 -2
  306. package/packages/tools/dist/http-request.js +1 -2
  307. package/packages/tools/dist/index.d.ts +3 -2
  308. package/packages/tools/dist/index.js +1 -1
  309. package/packages/tools/dist/lane.d.ts +1 -2
  310. package/packages/tools/dist/lane.js +3 -4
  311. package/packages/tools/dist/measure.d.ts +4 -3
  312. package/packages/tools/dist/measure.js +2 -3
  313. package/packages/tools/dist/onboard.d.ts +1 -2
  314. package/packages/tools/dist/onboard.js +14 -15
  315. package/packages/tools/dist/parse-output.d.ts +1 -2
  316. package/packages/tools/dist/parse-output.js +2 -3
  317. package/packages/tools/dist/path-resolver.d.ts +1 -2
  318. package/packages/tools/dist/path-resolver.js +1 -2
  319. package/packages/tools/dist/process-manager.d.ts +1 -2
  320. package/packages/tools/dist/process-manager.js +1 -2
  321. package/packages/tools/dist/queue.d.ts +1 -2
  322. package/packages/tools/dist/queue.js +1 -2
  323. package/packages/tools/dist/regex-test.d.ts +1 -2
  324. package/packages/tools/dist/regex-test.js +1 -2
  325. package/packages/tools/dist/rename.d.ts +1 -2
  326. package/packages/tools/dist/rename.js +1 -2
  327. package/packages/tools/dist/replay.d.ts +2 -3
  328. package/packages/tools/dist/replay.js +4 -5
  329. package/packages/tools/dist/response-envelope.d.ts +1 -2
  330. package/packages/tools/dist/response-envelope.js +1 -2
  331. package/packages/tools/dist/schema-validate.d.ts +1 -2
  332. package/packages/tools/dist/schema-validate.js +1 -2
  333. package/packages/tools/dist/scope-map.d.ts +1 -2
  334. package/packages/tools/dist/scope-map.js +1 -2
  335. package/packages/tools/dist/snippet.d.ts +1 -2
  336. package/packages/tools/dist/snippet.js +1 -2
  337. package/packages/tools/dist/stash.d.ts +1 -2
  338. package/packages/tools/dist/stash.js +1 -2
  339. package/packages/tools/dist/stratum-card.d.ts +1 -2
  340. package/packages/tools/dist/stratum-card.js +1 -2
  341. package/packages/tools/dist/symbol.d.ts +16 -3
  342. package/packages/tools/dist/symbol.js +3 -4
  343. package/packages/tools/dist/test-run.d.ts +1 -2
  344. package/packages/tools/dist/test-run.js +2 -3
  345. package/packages/tools/dist/text-utils.d.ts +1 -2
  346. package/packages/tools/dist/text-utils.js +1 -2
  347. package/packages/tools/dist/time-utils.d.ts +1 -2
  348. package/packages/tools/dist/time-utils.js +1 -2
  349. package/packages/tools/dist/trace.d.ts +3 -2
  350. package/packages/tools/dist/trace.js +2 -3
  351. package/packages/tools/dist/truncation.d.ts +1 -2
  352. package/packages/tools/dist/truncation.js +1 -2
  353. package/packages/tools/dist/watch.d.ts +1 -2
  354. package/packages/tools/dist/watch.js +1 -2
  355. package/packages/tools/dist/web-fetch.d.ts +1 -2
  356. package/packages/tools/dist/web-fetch.js +1 -2
  357. package/packages/tools/dist/web-search.d.ts +1 -2
  358. package/packages/tools/dist/web-search.js +1 -2
  359. package/packages/tools/dist/workset.d.ts +1 -2
  360. package/packages/tools/dist/workset.js +1 -2
  361. package/packages/tui/dist/{App-BAlmxCCw.js → App-CYLNJLr6.js} +1 -2
  362. package/packages/tui/dist/App.d.ts +2 -3
  363. package/packages/tui/dist/App.js +1 -1
  364. package/packages/tui/dist/CuratedPanel-sYdZAICX.js +1 -2
  365. package/packages/tui/dist/LogPanel-DtMnoyXT.js +3 -0
  366. package/packages/tui/dist/SearchPanel-DREo6zgt.js +1 -2
  367. package/packages/tui/dist/StatusPanel-2ex8fLOO.js +1 -2
  368. package/packages/tui/dist/devtools-DUyj952l.js +1 -2
  369. package/packages/tui/dist/{embedder.interface-D4ew0HPW.d.ts → embedder.interface-IFCBpOlX.d.ts} +2 -3
  370. package/packages/tui/dist/{index-B9VpfVPP.d.ts → index-C8NmOF18.d.ts} +2 -3
  371. package/packages/tui/dist/index.d.ts +1 -1
  372. package/packages/tui/dist/index.js +1 -2
  373. package/packages/tui/dist/jsx-runtime-Cof-kwFn.js +1 -2
  374. package/packages/tui/dist/panels/CuratedPanel.d.ts +1 -2
  375. package/packages/tui/dist/panels/LogPanel.d.ts +1 -2
  376. package/packages/tui/dist/panels/LogPanel.js +1 -1
  377. package/packages/tui/dist/panels/SearchPanel.d.ts +2 -3
  378. package/packages/tui/dist/panels/StatusPanel.d.ts +1 -2
  379. package/packages/tui/dist/store.interface-CnY6SPOH.d.ts +1 -2
  380. package/scaffold/copilot/agents/Architect-Reviewer-Alpha.agent.md +7 -0
  381. package/scaffold/copilot/agents/Architect-Reviewer-Beta.agent.md +7 -0
  382. package/scaffold/copilot/agents/Documenter.agent.md +7 -0
  383. package/scaffold/copilot/agents/Orchestrator.agent.md +8 -0
  384. package/scaffold/copilot/agents/Planner.agent.md +9 -0
  385. package/scaffold/copilot/agents/Refactor.agent.md +6 -0
  386. package/scaffold/copilot/agents/Researcher-Alpha.agent.md +8 -0
  387. package/scaffold/copilot/agents/Researcher-Beta.agent.md +8 -0
  388. package/scaffold/copilot/agents/Researcher-Delta.agent.md +8 -0
  389. package/scaffold/copilot/agents/Researcher-Gamma.agent.md +8 -0
  390. package/scaffold/general/agents/Architect-Reviewer-Alpha.agent.md +21 -0
  391. package/scaffold/general/agents/Architect-Reviewer-Beta.agent.md +21 -0
  392. package/scaffold/general/agents/Documenter.agent.md +42 -0
  393. package/scaffold/general/agents/Orchestrator.agent.md +104 -0
  394. package/scaffold/general/agents/Planner.agent.md +55 -0
  395. package/scaffold/general/agents/Refactor.agent.md +36 -0
  396. package/scaffold/general/agents/Researcher-Alpha.agent.md +20 -0
  397. package/scaffold/general/agents/Researcher-Beta.agent.md +20 -0
  398. package/scaffold/general/agents/Researcher-Delta.agent.md +20 -0
  399. package/scaffold/general/agents/Researcher-Gamma.agent.md +20 -0
  400. package/scaffold/{copilot → general}/agents/_shared/code-agent-base.md +18 -0
  401. package/skills/adr-skill/SKILL.md +329 -0
  402. package/skills/adr-skill/assets/templates/adr-madr.md +89 -0
  403. package/skills/adr-skill/assets/templates/adr-readme.md +20 -0
  404. package/skills/adr-skill/assets/templates/adr-simple.md +46 -0
  405. package/skills/adr-skill/references/adr-conventions.md +95 -0
  406. package/skills/adr-skill/references/examples.md +193 -0
  407. package/skills/adr-skill/references/review-checklist.md +77 -0
  408. package/skills/adr-skill/references/template-variants.md +52 -0
  409. package/skills/adr-skill/scripts/bootstrap_adr.js +259 -0
  410. package/skills/adr-skill/scripts/new_adr.js +391 -0
  411. package/skills/adr-skill/scripts/set_adr_status.js +169 -0
  412. package/skills/c4-architecture/SKILL.md +295 -0
  413. package/skills/c4-architecture/references/advanced-patterns.md +552 -0
  414. package/skills/c4-architecture/references/c4-syntax.md +492 -0
  415. package/skills/c4-architecture/references/common-mistakes.md +437 -0
  416. package/skills/knowledge-base/SKILL.md +78 -1
  417. package/skills/lesson-learned/SKILL.md +105 -0
  418. package/skills/lesson-learned/references/anti-patterns.md +55 -0
  419. package/skills/lesson-learned/references/se-principles.md +109 -0
  420. package/skills/requirements-clarity/SKILL.md +324 -0
  421. package/skills/session-handoff/SKILL.md +189 -0
  422. package/skills/session-handoff/references/handoff-template.md +139 -0
  423. package/skills/session-handoff/references/resume-checklist.md +80 -0
  424. package/skills/session-handoff/scripts/check_staleness.js +269 -0
  425. package/skills/session-handoff/scripts/create_handoff.js +299 -0
  426. package/skills/session-handoff/scripts/list_handoffs.js +113 -0
  427. package/skills/session-handoff/scripts/validate_handoff.js +241 -0
  428. package/packages/chunker/dist/treesitter-chunker.d.ts +0 -44
  429. package/packages/chunker/dist/treesitter-chunker.js +0 -7
  430. package/packages/cli/dist/commands/init.d.ts +0 -15
  431. package/packages/cli/dist/commands/init.js +0 -305
  432. package/packages/tui/dist/LogPanel-DVB8Sv46.js +0 -4
  433. /package/scaffold/{copilot → general}/agents/Code-Reviewer-Alpha.agent.md +0 -0
  434. /package/scaffold/{copilot → general}/agents/Code-Reviewer-Beta.agent.md +0 -0
  435. /package/scaffold/{copilot → general}/agents/Debugger.agent.md +0 -0
  436. /package/scaffold/{copilot → general}/agents/Explorer.agent.md +0 -0
  437. /package/scaffold/{copilot → general}/agents/Frontend.agent.md +0 -0
  438. /package/scaffold/{copilot → general}/agents/Implementer.agent.md +0 -0
  439. /package/scaffold/{copilot → general}/agents/README.md +0 -0
  440. /package/scaffold/{copilot → general}/agents/Security.agent.md +0 -0
  441. /package/scaffold/{copilot → general}/agents/_shared/adr-protocol.md +0 -0
  442. /package/scaffold/{copilot → general}/agents/_shared/architect-reviewer-base.md +0 -0
  443. /package/scaffold/{copilot → general}/agents/_shared/code-reviewer-base.md +0 -0
  444. /package/scaffold/{copilot → general}/agents/_shared/decision-protocol.md +0 -0
  445. /package/scaffold/{copilot → general}/agents/_shared/forge-protocol.md +0 -0
  446. /package/scaffold/{copilot → general}/agents/_shared/researcher-base.md +0 -0
  447. /package/scaffold/{copilot → general}/agents/templates/adr-template.md +0 -0
  448. /package/scaffold/{copilot → general}/agents/templates/execution-state.md +0 -0
  449. /package/scaffold/{copilot → general}/prompts/ask.prompt.md +0 -0
  450. /package/scaffold/{copilot → general}/prompts/debug.prompt.md +0 -0
  451. /package/scaffold/{copilot → general}/prompts/design.prompt.md +0 -0
  452. /package/scaffold/{copilot → general}/prompts/implement.prompt.md +0 -0
  453. /package/scaffold/{copilot → general}/prompts/plan.prompt.md +0 -0
  454. /package/scaffold/{copilot → general}/prompts/review.prompt.md +0 -0
@@ -1,20 +1,20 @@
1
- import{CONTENT_TYPES as e,createLogger as t,serializeError as n}from"../../../core/dist/index.js";import{addToWorkset as r,batch as i,check as a,checkpointLatest as o,checkpointList as s,checkpointLoad as c,checkpointSave as l,codemod as u,compact as d,dataTransform as ee,delegate as te,delegateListModels as ne,deleteWorkset as re,diffParse as ie,evaluate as ae,fileSummary as oe,find as f,findDeadSymbols as p,findExamples as m,getWorkset as h,gitContext as g,guide as _,health as v,laneCreate as y,laneDiff as b,laneDiscard as x,laneList as S,laneMerge as C,laneStatus as w,listWorksets as T,parseOutput as E,processList as D,processLogs as O,processStart as k,processStatus as A,processStop as j,queueClear as M,queueCreate as N,queueDelete as P,queueDone as F,queueFail as I,queueGet as L,queueList as R,queueNext as z,queuePush as B,removeFromWorkset as V,rename as H,saveWorkset as U,scopeMap as W,stashClear as G,stashDelete as K,stashGet as q,stashList as se,stashSet as ce,summarizeCheckResult as le,symbol as ue,testRun as de,trace as fe,truncateToTokenBudget as J,watchList as pe,watchStart as me,watchStop as he,webFetch as ge}from"../../../tools/dist/index.js";import{z as Y}from"zod";const X=t(`tools`);function _e(e,t,r){e.registerTool(`compact`,{description:"Compress text to relevant sections using embedding similarity (no LLM). Provide either `text` or `path` (server reads the file — saves a round-trip). Segments by paragraph/sentence/line.",inputSchema:{text:Y.string().optional().describe(`The text to compress (provide this OR path, not both)`),path:Y.string().optional().describe(`File path to read server-side — avoids read_file round-trip + token doubling (provide this OR text)`),query:Y.string().describe(`Focus query — what are you trying to understand?`),max_chars:Y.number().min(100).max(5e4).default(3e3).describe(`Target output size in characters`),segmentation:Y.enum([`paragraph`,`sentence`,`line`]).default(`paragraph`).describe(`How to split the text for scoring`)}},async({text:e,path:i,query:a,max_chars:o,segmentation:s})=>{try{if(!e&&!i)return{content:[{type:`text`,text:`Error: Either "text" or "path" must be provided.`}],isError:!0};let n=await d(t,{text:e,path:i,query:a,maxChars:o,segmentation:s,cache:r});return{content:[{type:`text`,text:[`Compressed ${n.originalChars} → ${n.compressedChars} chars (${(n.ratio*100).toFixed(0)}%)`,`Kept ${n.segmentsKept}/${n.segmentsTotal} segments`,``,n.text].join(`
2
- `)}]}}catch(e){return X.error(`Compact failed`,n(e)),{content:[{type:`text`,text:`Compact failed. Check server logs for details.`}],isError:!0}}})}function ve(t,r,i){t.registerTool(`scope_map`,{description:`Generate a task-scoped reading plan. Given a task description, identifies which files and sections are relevant, with estimated token counts and suggested reading order.`,inputSchema:{task:Y.string().describe(`Description of the task to scope`),max_files:Y.number().min(1).max(50).default(15).describe(`Maximum files to include`),content_type:Y.enum(e).optional().describe(`Filter by content type`),max_tokens:Y.number().min(100).max(5e4).optional().describe(`Maximum token budget for the response. When set, output is truncated to fit.`)}},async({task:e,max_files:t,content_type:a,max_tokens:o})=>{try{let n=await W(r,i,{task:e,maxFiles:t,contentType:a}),s=[`## Scope Map: ${e}`,`Total estimated tokens: ~${n.totalEstimatedTokens}`,``,`### Files (by relevance)`,...n.files.map((e,t)=>`${t+1}. **${e.path}** (~${e.estimatedTokens} tokens, ${(e.relevance*100).toFixed(0)}% relevant)\n ${e.reason}\n Focus: ${e.focusRanges.map(e=>`L${e.start}-${e.end}`).join(`, `)}`),``,`### Suggested Reading Order`,...n.readingOrder.map((e,t)=>`${t+1}. ${e}`),``,`### Suggested Compact Calls`,`_Estimated compressed total: ~${Math.ceil(n.totalEstimatedTokens/5)} tokens_`,...n.compactCommands.map((e,t)=>`${t+1}. ${e}`)].join(`
3
- `)+"\n\n---\n_Next: Use `search` to dive into specific files, or `compact` to compress file contents for context._";return{content:[{type:`text`,text:o?J(s,o):s}]}}catch(e){return X.error(`Scope map failed`,n(e)),{content:[{type:`text`,text:`Scope map failed. Check server logs for details.`}],isError:!0}}})}function ye(t,r,i){t.registerTool(`find`,{description:`Federated search across vector similarity, keyword (FTS), file glob, and regex pattern. Combines strategies, deduplicates, and returns unified results. Use mode "examples" to find real usage examples of a symbol or pattern.`,inputSchema:{query:Y.string().optional().describe(`Semantic/keyword search query (required for mode "examples")`),glob:Y.string().optional().describe(`File glob pattern (search mode only)`),pattern:Y.string().optional().describe(`Regex pattern to match in content (search mode only)`),limit:Y.number().min(1).max(50).default(10).describe(`Max results`),content_type:Y.enum(e).optional().describe(`Filter by content type`),mode:Y.enum([`search`,`examples`]).default(`search`).describe(`Mode: "search" (default) for federated search, "examples" to find usage examples of a symbol/pattern`),max_tokens:Y.number().min(100).max(5e4).optional().describe(`Maximum token budget for the response. When set, output is truncated to fit.`)}},async({query:e,glob:t,pattern:a,limit:o,content_type:s,mode:c,max_tokens:l})=>{try{if(c===`examples`){if(!e)return{content:[{type:`text`,text:`Error: "query" is required for mode "examples".`}],isError:!0};let t=await m(r,i,{query:e,limit:o,contentType:s}),n=JSON.stringify(t,null,2);return{content:[{type:`text`,text:l?J(n,l):n}]}}let n=await f(r,i,{query:e,glob:t,pattern:a,limit:o,contentType:s});if(n.results.length===0)return{content:[{type:`text`,text:`No results found.`}]};let u=[`Found ${n.totalFound} results via ${n.strategies.join(` + `)}`,``,...n.results.map(e=>{let t=e.lineRange?`:${e.lineRange.start}-${e.lineRange.end}`:``,n=e.preview?`\n ${e.preview.slice(0,100)}...`:``;return`- [${e.source}] ${e.path}${t} (${(e.score*100).toFixed(0)}%)${n}`})];return{content:[{type:`text`,text:l?J(u.join(`
4
- `),l):u.join(`
5
- `)}]}}catch(e){return X.error(`Find failed`,n(e)),{content:[{type:`text`,text:`Find failed. Check server logs for details.`}],isError:!0}}})}function be(e){e.registerTool(`parse_output`,{description:`Parse structured data from build tool output. Supports tsc, vitest, biome, and git status. Auto-detects the tool or specify explicitly.`,inputSchema:{output:Y.string().max(5e5).describe(`Raw output text from a build tool`),tool:Y.enum([`tsc`,`vitest`,`biome`,`git-status`]).optional().describe(`Tool to parse as (auto-detects if omitted)`)}},async({output:e,tool:t})=>{try{let n=E(e.replace(/\\n/g,`
6
- `).replace(/\\t/g,` `),t);return{content:[{type:`text`,text:JSON.stringify(n,null,2)}]}}catch(e){return X.error(`Parse failed`,n(e)),{content:[{type:`text`,text:`Parse failed. Check server logs for details.`}],isError:!0}}})}function xe(e){e.registerTool(`workset`,{description:`Manage named file sets (worksets). Save, load, list, add/remove files. Worksets persist across sessions in .kb-state/worksets.json.`,inputSchema:{action:Y.enum([`save`,`get`,`list`,`delete`,`add`,`remove`]).describe(`Operation to perform`),name:Y.string().optional().describe(`Workset name (required for all except list)`),files:Y.array(Y.string()).optional().describe(`File paths (required for save, add, remove)`),description:Y.string().optional().describe(`Description (for save)`)}},async({action:e,name:t,files:i,description:a})=>{try{switch(e){case`save`:{if(!t||!i)throw Error(`name and files required for save`);let e=U(t,i,{description:a});return{content:[{type:`text`,text:`Saved workset "${e.name}" with ${e.files.length} files.`}]}}case`get`:{if(!t)throw Error(`name required for get`);let e=h(t);return e?{content:[{type:`text`,text:JSON.stringify(e,null,2)}]}:{content:[{type:`text`,text:`Workset "${t}" not found.`}]}}case`list`:{let e=T();return e.length===0?{content:[{type:`text`,text:`No worksets.`}]}:{content:[{type:`text`,text:e.map(e=>`- **${e.name}** (${e.files.length} files) — ${e.description??`no description`}`).join(`
7
- `)}]}}case`delete`:if(!t)throw Error(`name required for delete`);return{content:[{type:`text`,text:re(t)?`Deleted workset "${t}".`:`Workset "${t}" not found.`}]};case`add`:{if(!t||!i)throw Error(`name and files required for add`);let e=r(t,i);return{content:[{type:`text`,text:`Added to workset "${e.name}": now ${e.files.length} files.`}]}}case`remove`:{if(!t||!i)throw Error(`name and files required for remove`);let e=V(t,i);return e?{content:[{type:`text`,text:`Removed from workset "${e.name}": now ${e.files.length} files.`}]}:{content:[{type:`text`,text:`Workset "${t}" not found.`}]}}}}catch(e){return X.error(`Workset operation failed`,n(e)),{content:[{type:`text`,text:`Workset operation failed. Check server logs for details.`}],isError:!0}}})}function Se(e){e.registerTool(`check`,{description:`Run incremental typecheck (tsc) and lint (biome) on the project or specific files. Returns structured error and warning lists. Default detail level is "summary" (~300 tokens).`,inputSchema:{files:Y.array(Y.string()).optional().describe(`Specific files to check (if omitted, checks all)`),cwd:Y.string().optional().describe(`Working directory`),skip_types:Y.boolean().default(!1).describe(`Skip TypeScript typecheck`),skip_lint:Y.boolean().default(!1).describe(`Skip Biome lint`),detail:Y.enum([`summary`,`errors`,`full`]).default(`summary`).describe(`Output detail level: summary (default, ~300 tokens — pass/fail + counts + top errors), errors (parsed error objects), full (includes raw terminal output)`)}},async({files:e,cwd:t,skip_types:r,skip_lint:i,detail:o})=>{try{let n=await a({files:e,cwd:t,skipTypes:r,skipLint:i,detail:o===`summary`?`errors`:o});if(o===`summary`){let e=le(n),t=[];if(n.passed)t.push({tool:`test_run`,reason:`Types and lint clean — run tests next`});else{let e=n.tsc.errors[0]?.file??n.biome.errors[0]?.file;e&&t.push({tool:`symbol`,reason:`Resolve failing symbol in ${e}`,suggested_args:{name:e}}),t.push({tool:`check`,reason:`Re-check after fixing errors`,suggested_args:{detail:`errors`}})}return{content:[{type:`text`,text:JSON.stringify({...e,_next:t},null,2)}]}}return{content:[{type:`text`,text:JSON.stringify(n,null,2)}]}}catch(e){return X.error(`Check failed`,n(e)),{content:[{type:`text`,text:`Check failed. Check server logs for details.`}],isError:!0}}})}function Ce(e,t,r){e.registerTool(`batch`,{description:`Execute multiple built-in operations in parallel with concurrency control. Supported operation types: search, find, and check.`,inputSchema:{operations:Y.array(Y.object({id:Y.string().describe(`Unique ID for this operation`),type:Y.enum([`search`,`find`,`check`]).describe(`Built-in operation type`),args:Y.record(Y.string(),Y.unknown()).describe(`Arguments for the operation`)})).min(1).max(100).describe(`Operations to execute`),concurrency:Y.number().min(1).max(20).default(4).describe(`Max concurrent operations`)}},async({operations:e,concurrency:a})=>{try{let n=await i(e,async e=>Ge(e,t,r),{concurrency:a});return{content:[{type:`text`,text:JSON.stringify(n,null,2)}]}}catch(e){return X.error(`Batch failed`,n(e)),{content:[{type:`text`,text:`Batch failed. Check server logs for details.`}],isError:!0}}})}function we(e,t,r){e.registerTool(`symbol`,{description:`Resolve a symbol: find where it is defined, who imports it, and where it is referenced. Works on TypeScript and JavaScript codebases.`,inputSchema:{name:Y.string().describe(`Symbol name to look up (function, class, type, etc.)`),limit:Y.number().min(1).max(50).default(20).describe(`Max results per category`)}},async({name:e,limit:i})=>{try{return{content:[{type:`text`,text:Je(await ue(t,r,{name:e,limit:i}))}]}}catch(e){return X.error(`Symbol lookup failed`,n(e)),{content:[{type:`text`,text:`Symbol lookup failed. Check server logs for details.`}],isError:!0}}})}function Te(e){e.registerTool(`eval`,{description:`Execute a JavaScript or TypeScript snippet in a constrained VM sandbox with a timeout. Captures console output and returned values.`,inputSchema:{code:Y.string().max(1e5).describe(`Code snippet to execute`),lang:Y.enum([`js`,`ts`]).default(`js`).optional().describe(`Language mode: js executes directly, ts strips common type syntax first`),timeout:Y.number().min(1).max(6e4).default(5e3).optional().describe(`Execution timeout in milliseconds`)}},async({code:e,lang:t,timeout:r})=>{try{let n=ae({code:e,lang:t,timeout:r});return n.success?{content:[{type:`text`,text:`Eval succeeded in ${n.durationMs}ms\n\n${n.output}`}]}:{content:[{type:`text`,text:`Eval failed in ${n.durationMs}ms: ${n.error??`Unknown error`}`}],isError:!0}}catch(e){return X.error(`Eval failed`,n(e)),{content:[{type:`text`,text:`Eval failed. Check server logs for details.`}],isError:!0}}})}function Ee(e){e.registerTool(`test_run`,{description:`Run Vitest for the current project or a subset of files, then return a structured summary of passing and failing tests.`,inputSchema:{files:Y.array(Y.string()).optional().describe(`Specific test files or patterns to run`),grep:Y.string().optional().describe(`Only run tests whose names match this pattern`),cwd:Y.string().optional().describe(`Working directory for the test run`)}},async({files:e,grep:t,cwd:r})=>{try{let n=await de({files:e,grep:t,cwd:r});return{content:[{type:`text`,text:Ye(n)}],isError:!n.passed}}catch(e){return X.error(`Test run failed`,n(e)),{content:[{type:`text`,text:`Test run failed. Check server logs for details.`}],isError:!0}}})}function De(e){e.registerTool(`stash`,{description:`Persist and retrieve named values in .kb-state/stash.json for intermediate results between tool calls.`,inputSchema:{action:Y.enum([`set`,`get`,`list`,`delete`,`clear`]).describe(`Operation to perform on the stash`),key:Y.string().optional().describe(`Entry key for set/get/delete operations`),value:Y.string().optional().describe(`String or JSON value for set operations`)}},async({action:e,key:t,value:r})=>{try{switch(e){case`set`:{if(!t)throw Error(`key required for set`);let e=ce(t,$e(r??``));return{content:[{type:`text`,text:`Stored stash entry "${e.key}" (${e.type}) at ${e.storedAt}.`}]}}case`get`:{if(!t)throw Error(`key required for get`);let e=q(t);return{content:[{type:`text`,text:e?JSON.stringify(e,null,2):`Stash entry "${t}" not found.`}]}}case`list`:{let e=se();return{content:[{type:`text`,text:e.length===0?`Stash is empty.`:e.map(e=>`- ${e.key} (${e.type}) — ${e.storedAt}`).join(`
8
- `)}]}}case`delete`:if(!t)throw Error(`key required for delete`);return{content:[{type:`text`,text:K(t)?`Deleted stash entry "${t}".`:`Stash entry "${t}" not found.`}]};case`clear`:{let e=G();return{content:[{type:`text`,text:`Cleared ${e} stash entr${e===1?`y`:`ies`}.`}]}}}}catch(e){return X.error(`Stash operation failed`,n(e)),{content:[{type:`text`,text:`Stash operation failed. Check server logs for details.`}],isError:!0}}})}function Oe(e){e.registerTool(`git_context`,{description:`Summarize the current Git branch, working tree state, recent commits, and optional diff statistics for the repository.`,inputSchema:{cwd:Y.string().optional().describe(`Repository root or working directory`),commit_count:Y.number().min(1).max(50).default(5).optional().describe(`How many recent commits to include`),include_diff:Y.boolean().default(!1).optional().describe(`Include diff stat for working tree changes`)}},async({cwd:e,commit_count:t,include_diff:r})=>{try{return{content:[{type:`text`,text:Xe(await g({cwd:e,commitCount:t,includeDiff:r}))}]}}catch(e){return X.error(`Git context failed`,n(e)),{content:[{type:`text`,text:`Git context failed. Check server logs for details.`}],isError:!0}}})}function ke(e){e.registerTool(`diff_parse`,{description:`Parse raw unified diff text into file-level and hunk-level structural changes.`,inputSchema:{diff:Y.string().max(1e6).describe(`Raw unified diff text`)}},async({diff:e})=>{try{return{content:[{type:`text`,text:Ze(ie({diff:e.replace(/\\n/g,`
9
- `).replace(/\\t/g,` `)}))}]}}catch(e){return X.error(`Diff parse failed`,n(e)),{content:[{type:`text`,text:`Diff parse failed. Check server logs for details.`}],isError:!0}}})}function Ae(e){e.registerTool(`rename`,{description:`Rename a symbol across files using whole-word regex matching for exports, imports, and general usage references.`,inputSchema:{old_name:Y.string().describe(`Existing symbol name to replace`),new_name:Y.string().describe(`New symbol name to use`),root_path:Y.string().describe(`Root directory to search within`),extensions:Y.array(Y.string()).optional().describe(`Optional file extensions to include, such as .ts,.tsx,.js,.jsx`),dry_run:Y.boolean().default(!0).describe(`Preview changes without writing files`)}},async({old_name:e,new_name:t,root_path:r,extensions:i,dry_run:a})=>{try{let n=await H({oldName:e,newName:t,rootPath:r,extensions:i,dryRun:a});return{content:[{type:`text`,text:JSON.stringify(n,null,2)}]}}catch(e){return X.error(`Rename failed`,n(e)),{content:[{type:`text`,text:`Rename failed. Check server logs for details.`}],isError:!0}}})}function je(e){e.registerTool(`codemod`,{description:`Apply regex-based codemod rules across files and return structured before/after changes for each affected line.`,inputSchema:{root_path:Y.string().describe(`Root directory to transform within`),rules:Y.array(Y.object({description:Y.string().describe(`What the codemod rule does`),pattern:Y.string().describe(`Regex pattern in string form`),replacement:Y.string().describe(`Replacement string with optional capture groups`)})).min(1).describe(`Codemod rules to apply`),dry_run:Y.boolean().default(!0).describe(`Preview changes without writing files`)}},async({root_path:e,rules:t,dry_run:r})=>{try{let n=await u({rootPath:e,rules:t,dryRun:r});return{content:[{type:`text`,text:JSON.stringify(n,null,2)}]}}catch(e){return X.error(`Codemod failed`,n(e)),{content:[{type:`text`,text:`Codemod failed. Check server logs for details.`}],isError:!0}}})}function Me(e,t){e.registerTool(`file_summary`,{description:`Create a concise structural summary of a source file: imports, exports, functions, classes, interfaces, and types.`,inputSchema:{path:Y.string().describe(`Absolute path to the file to summarize`)}},async({path:e})=>{try{return{content:[{type:`text`,text:Qe(await oe({path:e,content:(await t.get(e)).content}))}]}}catch(e){return X.error(`File summary failed`,n(e)),{content:[{type:`text`,text:`File summary failed. Check server logs for details.`}],isError:!0}}})}function Ne(e){e.registerTool(`checkpoint`,{description:`Save and restore lightweight session checkpoints in .kb-state/checkpoints for cross-session continuity.`,inputSchema:{action:Y.enum([`save`,`load`,`list`,`latest`]).describe(`Checkpoint action to perform`),label:Y.string().optional().describe(`Checkpoint label for save, or checkpoint id for load`),data:Y.string().max(5e5).optional().describe(`JSON object string for save actions`),notes:Y.string().max(1e4).optional().describe(`Optional notes for save actions`)}},async({action:e,label:t,data:r,notes:i})=>{try{switch(e){case`save`:if(!t)throw Error(`label required for save`);return{content:[{type:`text`,text:Q(l(t,et(r),{notes:i}))}]};case`load`:{if(!t)throw Error(`label required for load`);let e=c(t);return{content:[{type:`text`,text:e?Q(e):`Checkpoint "${t}" not found.`}]}}case`list`:{let e=s();return{content:[{type:`text`,text:e.length===0?`No checkpoints saved.`:e.map(e=>`- ${e.id} — ${e.label} (${e.createdAt})`).join(`
10
- `)}]}}case`latest`:{let e=o();return{content:[{type:`text`,text:e?Q(e):`No checkpoints saved.`}]}}}}catch(e){return X.error(`Checkpoint failed`,n(e)),{content:[{type:`text`,text:`Checkpoint failed. Check server logs for details.`}],isError:!0}}})}function Pe(e){e.registerTool(`data_transform`,{description:`Apply small jq-like transforms to JSON input for filtering, projection, grouping, and path extraction.`,inputSchema:{input:Y.string().max(5e5).describe(`Input JSON string`),expression:Y.string().max(1e4).describe(`Transform expression to apply`)}},async({input:e,expression:t})=>{try{return{content:[{type:`text`,text:ee({input:e,expression:t}).outputString}]}}catch(e){return X.error(`Data transform failed`,n(e)),{content:[{type:`text`,text:`Data transform failed. Check server logs for details.`}],isError:!0}}})}function Fe(e,t,r){e.registerTool(`trace`,{description:`Trace data flow through a codebase by following imports, call sites, and references from a starting symbol or file location.`,inputSchema:{start:Y.string().describe(`Starting point — symbol name or file:line reference`),direction:Y.enum([`forward`,`backward`,`both`]).describe(`Which direction to trace relationships`),max_depth:Y.number().min(1).max(10).default(3).optional().describe(`Maximum trace depth`)}},async({start:e,direction:i,max_depth:a})=>{try{let n=await fe(t,r,{start:e,direction:i,maxDepth:a});return{content:[{type:`text`,text:JSON.stringify(n,null,2)}]}}catch(e){return X.error(`Trace failed`,n(e)),{content:[{type:`text`,text:`Trace failed. Check server logs for details.`}],isError:!0}}})}function Ie(e){e.registerTool(`process`,{description:`Start, stop, inspect, list, and tail logs for in-memory managed child processes.`,inputSchema:{action:Y.enum([`start`,`stop`,`status`,`list`,`logs`]).describe(`Process action to perform`),id:Y.string().optional().describe(`Managed process ID`),command:Y.string().optional().describe(`Executable to start`),args:Y.array(Y.string()).optional().describe(`Arguments for start actions`),tail:Y.number().min(1).max(500).optional().describe(`Log lines to return for logs actions`)}},async({action:e,id:t,command:r,args:i,tail:a})=>{try{switch(e){case`start`:if(!t||!r)throw Error(`id and command are required for start`);return{content:[{type:`text`,text:JSON.stringify(k(t,r,i??[]),null,2)}]};case`stop`:if(!t)throw Error(`id is required for stop`);return{content:[{type:`text`,text:JSON.stringify(j(t)??null,null,2)}]};case`status`:if(!t)throw Error(`id is required for status`);return{content:[{type:`text`,text:JSON.stringify(A(t)??null,null,2)}]};case`list`:return{content:[{type:`text`,text:JSON.stringify(D(),null,2)}]};case`logs`:if(!t)throw Error(`id is required for logs`);return{content:[{type:`text`,text:JSON.stringify(O(t,a),null,2)}]}}}catch(e){return X.error(`Process action failed`,n(e)),{content:[{type:`text`,text:`Process action failed. Check server logs for details.`}],isError:!0}}})}function Le(e){e.registerTool(`watch`,{description:`Start, stop, and list in-memory filesystem watchers for a directory.`,inputSchema:{action:Y.enum([`start`,`stop`,`list`]).describe(`Watch action to perform`),path:Y.string().optional().describe(`Directory path to watch for start actions`),id:Y.string().optional().describe(`Watcher ID for stop actions`)}},async({action:e,path:t,id:r})=>{try{switch(e){case`start`:if(!t)throw Error(`path is required for start`);return{content:[{type:`text`,text:JSON.stringify(me({path:t}),null,2)}]};case`stop`:if(!r)throw Error(`id is required for stop`);return{content:[{type:`text`,text:JSON.stringify({stopped:he(r)},null,2)}]};case`list`:return{content:[{type:`text`,text:JSON.stringify(pe(),null,2)}]}}}catch(e){return X.error(`Watch action failed`,n(e)),{content:[{type:`text`,text:`Watch action failed. Check server logs for details.`}],isError:!0}}})}function Re(e,t,r){e.registerTool(`dead_symbols`,{description:`Find exported symbols that appear to be unused because they are never imported or re-exported.`,inputSchema:{path:Y.string().optional().describe(`Root path to scope the search (default: cwd)`),limit:Y.number().min(1).max(500).default(100).optional().describe(`Maximum exported symbols to scan`)}},async({path:e,limit:i})=>{try{let n=await p(t,r,{rootPath:e,limit:i}),a=[`## Dead Symbol Analysis`,``,`**Exports scanned:** ${n.totalExports}`,`**Dead in source:** ${n.totalDeadSource} (actionable)`,`**Dead in docs:** ${n.totalDeadDocs} (informational — code samples in .md files)`,``];if(n.deadInSource.length>0){a.push(`### Dead in Source (actionable)`);for(let e of n.deadInSource)a.push(`- \`${e.name}\` (${e.kind}) — ${e.path}:${e.line}`);a.push(``)}if(n.deadInDocs.length>0){a.push(`### Dead in Docs (informational)`),a.push(`_${n.totalDeadDocs} symbol(s) found only in documentation code samples — not actionable dead code._`);for(let e of n.deadInDocs.slice(0,5))a.push(`- \`${e.name}\` — ${e.path}:${e.line}`);n.deadInDocs.length>5&&a.push(`- _... ${n.deadInDocs.length-5} more omitted_`)}return n.totalDeadSource>0?a.push(``,`---`,`_Next: \`codemod\` to remove ${n.totalDeadSource} unused exports | \`symbol\` to verify usage before removing_`):a.push(``,`---`,"_Next: `check` no dead symbols found, validate types and lint_"),{content:[{type:`text`,text:a.join(`
11
- `)}]}}catch(e){return X.error(`Dead symbol scan failed`,n(e)),{content:[{type:`text`,text:`Dead symbol scan failed. Check server logs for details.`}],isError:!0}}})}function ze(e){e.registerTool(`delegate`,{description:`Delegate a subtask to a local Ollama model. Use for summarization, classification, naming, or any task that can offload work from the host agent. Fails fast if Ollama is not running.`,inputSchema:{prompt:Y.string().max(2e5).describe(`The task or question to send to the local model`),model:Y.string().optional().describe(`Ollama model name (default: first available model)`),system:Y.string().optional().describe(`System prompt for the model`),context:Y.string().max(5e5).optional().describe(`Context text to include before the prompt (e.g. file contents)`),temperature:Y.number().min(0).max(2).default(.3).optional().describe(`Sampling temperature (0=deterministic, default 0.3)`),timeout:Y.number().min(1e3).max(6e5).default(12e4).optional().describe(`Timeout in milliseconds (default 120000)`),action:Y.enum([`generate`,`list_models`]).default(`generate`).optional().describe(`Action: generate a response or list available models`)}},async({prompt:e,model:t,system:r,context:i,temperature:a,timeout:o,action:s})=>{try{if(s===`list_models`){let e=await ne();return{content:[{type:`text`,text:JSON.stringify({models:e,count:e.length,_Next:`Use delegate with a model name`},null,2)}]}}let n=await te({prompt:e,model:t,system:r,context:i,temperature:a,timeout:o});return n.error?{content:[{type:`text`,text:JSON.stringify({error:n.error,model:n.model,durationMs:n.durationMs},null,2)}],isError:!0}:{content:[{type:`text`,text:JSON.stringify({model:n.model,response:n.response,durationMs:n.durationMs,tokenCount:n.tokenCount,_Next:`Use the response in your workflow. stash to save it.`},null,2)}]}}catch(e){return X.error(`Delegate failed`,n(e)),{content:[{type:`text`,text:`Delegate failed. Check server logs for details.`}],isError:!0}}})}function Be(e){e.registerTool(`lane`,{description:`Manage verified lanes — isolated file copies for parallel exploration. Create a lane, make changes, diff, merge back, or discard.`,inputSchema:{action:Y.enum([`create`,`list`,`status`,`diff`,`merge`,`discard`]).describe(`Lane action to perform`),name:Y.string().optional().describe(`Lane name (required for create/status/diff/merge/discard)`),files:Y.array(Y.string()).optional().describe(`File paths to copy into the lane (required for create)`)}},async({action:e,name:t,files:r})=>{try{switch(e){case`create`:{if(!t)throw Error(`name is required for create`);if(!r||r.length===0)throw Error(`files are required for create`);let e=y(t,r);return{content:[{type:`text`,text:JSON.stringify(e,null,2)}]}}case`list`:return{content:[{type:`text`,text:JSON.stringify(S(),null,2)}]};case`status`:if(!t)throw Error(`name is required for status`);return{content:[{type:`text`,text:JSON.stringify(w(t),null,2)}]};case`diff`:if(!t)throw Error(`name is required for diff`);return{content:[{type:`text`,text:JSON.stringify(b(t),null,2)}]};case`merge`:if(!t)throw Error(`name is required for merge`);return{content:[{type:`text`,text:JSON.stringify(C(t),null,2)}]};case`discard`:if(!t)throw Error(`name is required for discard`);return{content:[{type:`text`,text:JSON.stringify({discarded:x(t)},null,2)}]}}}catch(e){return X.error(`Lane action failed`,n(e)),{content:[{type:`text`,text:`Lane action failed. Check server logs for details.`}],isError:!0}}})}function Ve(e){e.registerTool(`health`,{description:`Run project health checks — verifies package.json, tsconfig, scripts, lockfile, README, LICENSE, .gitignore.`,inputSchema:{path:Y.string().optional().describe(`Root directory to check (defaults to cwd)`)}},async({path:e})=>{try{let t=v(e);return{content:[{type:`text`,text:JSON.stringify(t,null,2)}]}}catch(e){return X.error(`Health check failed`,n(e)),{content:[{type:`text`,text:`Health check failed. Check server logs for details.`}],isError:!0}}})}function He(e){e.registerTool(`queue`,{description:`Manage task queues for sequential agent operations. Push items, take next, mark done/failed, list queues.`,inputSchema:{action:Y.enum([`create`,`push`,`next`,`done`,`fail`,`get`,`list`,`clear`,`delete`]).describe(`Queue action`),name:Y.string().optional().describe(`Queue name (required for most actions)`),title:Y.string().optional().describe(`Item title (required for push)`),id:Y.string().optional().describe(`Item ID (required for done/fail)`),data:Y.unknown().optional().describe(`Arbitrary data to attach to a queue item`),error:Y.string().optional().describe(`Error message (required for fail)`)}},async({action:e,name:t,title:r,id:i,data:a,error:o})=>{try{switch(e){case`create`:if(!t)throw Error(`name is required for create`);return{content:[{type:`text`,text:JSON.stringify(N(t),null,2)}]};case`push`:if(!t)throw Error(`name is required for push`);if(!r)throw Error(`title is required for push`);return{content:[{type:`text`,text:JSON.stringify(B(t,r,a),null,2)}]};case`next`:{if(!t)throw Error(`name is required for next`);let e=z(t);return{content:[{type:`text`,text:JSON.stringify(e,null,2)}]}}case`done`:if(!t)throw Error(`name is required for done`);if(!i)throw Error(`id is required for done`);return{content:[{type:`text`,text:JSON.stringify(F(t,i),null,2)}]};case`fail`:if(!t)throw Error(`name is required for fail`);if(!i)throw Error(`id is required for fail`);if(!o)throw Error(`error is required for fail`);return{content:[{type:`text`,text:JSON.stringify(I(t,i,o),null,2)}]};case`get`:if(!t)throw Error(`name is required for get`);return{content:[{type:`text`,text:JSON.stringify(L(t),null,2)}]};case`list`:return{content:[{type:`text`,text:JSON.stringify(R(),null,2)}]};case`clear`:if(!t)throw Error(`name is required for clear`);return{content:[{type:`text`,text:JSON.stringify({cleared:M(t)},null,2)}]};case`delete`:if(!t)throw Error(`name is required for delete`);return{content:[{type:`text`,text:JSON.stringify({deleted:P(t)},null,2)}]}}}catch(e){return X.error(`Queue action failed`,n(e)),{content:[{type:`text`,text:`Queue action failed. Check server logs for details.`}],isError:!0}}})}const Ue=Y.object({query:Y.string(),limit:Y.number().min(1).max(20).default(5).optional(),search_mode:Y.enum([`hybrid`,`semantic`,`keyword`]).default(`hybrid`).optional(),content_type:Y.enum(e).optional(),origin:Y.enum([`indexed`,`curated`,`produced`]).optional(),category:Y.string().optional(),tags:Y.array(Y.string()).optional(),min_score:Y.number().min(0).max(1).default(.25).optional()}),Z=Y.object({query:Y.string().optional(),glob:Y.string().optional(),pattern:Y.string().optional(),limit:Y.number().min(1).max(50).default(10).optional(),content_type:Y.enum(e).optional(),cwd:Y.string().optional()}),We=Y.object({files:Y.array(Y.string()).optional(),cwd:Y.string().optional(),skip_types:Y.boolean().optional(),skip_lint:Y.boolean().optional()});async function Ge(e,t,n){switch(e.type){case`search`:return Ke(t,n,Ue.parse(e.args));case`find`:{let r=Z.parse(e.args);if(!r.query&&!r.glob&&!r.pattern)throw Error(`find operation requires query, glob, or pattern`);return f(t,n,{query:r.query,glob:r.glob,pattern:r.pattern,limit:r.limit,contentType:r.content_type,cwd:r.cwd})}case`check`:{let t=We.parse(e.args);return a({files:t.files,cwd:t.cwd,skipTypes:t.skip_types,skipLint:t.skip_lint})}default:throw Error(`Unsupported batch operation type: ${e.type}`)}}async function Ke(e,t,n){let r=n.limit??5,i={limit:r,minScore:n.min_score??.25,contentType:n.content_type,origin:n.origin,category:n.category,tags:n.tags},a=e.embedQuery?.bind(e)??e.embed.bind(e);if(n.search_mode===`keyword`)return(await t.ftsSearch(n.query,i)).slice(0,r);let o=await a(n.query);if(n.search_mode===`semantic`)return t.search(o,i);let[s,c]=await Promise.all([t.search(o,{...i,limit:r*2}),t.ftsSearch(n.query,{...i,limit:r*2})]);return qe(s,c).slice(0,r)}function qe(e,t,n=60){let r=new Map;for(let t=0;t<e.length;t++){let i=e[t];r.set(i.record.id,{record:i.record,score:1/(n+t+1)})}for(let e=0;e<t.length;e++){let i=t[e],a=r.get(i.record.id);if(a){a.score+=1/(n+e+1);continue}r.set(i.record.id,{record:i.record,score:1/(n+e+1)})}return[...r.values()].sort((e,t)=>t.score-e.score)}function Je(e){let t=[`Symbol: ${e.name}`];if(e.definedIn?t.push(`Defined in: ${e.definedIn.path}:${e.definedIn.line} (${e.definedIn.kind})`):t.push(`Defined in: not found`),t.push(``,`Imported by:`),e.importedBy.length===0)t.push(` none`);else for(let n of e.importedBy)t.push(` - ${n.path}:${n.line} ${n.importStatement}`);if(t.push(``,`Referenced in:`),e.referencedIn.length===0)t.push(` none`);else for(let n of e.referencedIn)t.push(` - ${n.path}:${n.line} ${n.context}`);return t.join(`
12
- `)}function Ye(e){let t=[`Vitest run: ${e.passed?`passed`:`failed`}`,`Duration: ${e.durationMs}ms`,`Passed: ${e.summary.passed}`,`Failed: ${e.summary.failed}`,`Skipped: ${e.summary.skipped}`];e.summary.suites!==void 0&&t.push(`Suites: ${e.summary.suites}`);let n=e.summary.tests.filter(e=>e.status===`fail`);if(n.length>0){t.push(``,`Failed tests:`);for(let e of n)t.push(`- ${e.name}${e.file?` (${e.file})`:``}`),e.error&&t.push(` ${e.error}`)}return t.join(`
13
- `)}function Xe(e){let t=[`Branch: ${e.branch}`,`Staged: ${e.status.staged.length}`,...e.status.staged.map(e=>` - ${e}`),`Modified: ${e.status.modified.length}`,...e.status.modified.map(e=>` - ${e}`),`Untracked: ${e.status.untracked.length}`,...e.status.untracked.map(e=>` - ${e}`),``,`Recent commits:`];if(e.recentCommits.length===0)t.push(` none`);else for(let n of e.recentCommits)t.push(` - ${n.hash} ${n.message}`),t.push(` ${n.author} @ ${n.date}`);return e.diff&&t.push(``,`Diff stat:`,e.diff),t.join(`
14
- `)}function Ze(e){if(e.length===0)return`No diff files found.`;let t=[];for(let n of e){let e=n.oldPath?` (from ${n.oldPath})`:``;t.push(`${n.path}${e} [${n.status}] +${n.additions} -${n.deletions} (${n.hunks.length} hunks)`);for(let e of n.hunks){let n=e.header?` ${e.header}`:``;t.push(` @@ -${e.oldStart},${e.oldLines} +${e.newStart},${e.newLines} @@${n}`)}}return t.join(`
15
- `)}function Qe(e){return[e.path,`Language: ${e.language}`,`Lines: ${e.lines}`,`Estimated tokens: ~${e.estimatedTokens}`,``,`Imports (${e.imports.length}):`,...$(e.imports),``,`Exports (${e.exports.length}):`,...$(e.exports),``,`Functions (${e.functions.length}):`,...$(e.functions.map(e=>`${e.name} @ line ${e.line}${e.exported?` [exported]`:``}`)),``,`Classes (${e.classes.length}):`,...$(e.classes.map(e=>`${e.name} @ line ${e.line}${e.exported?` [exported]`:``}`)),``,`Interfaces (${e.interfaces.length}):`,...$(e.interfaces.map(e=>`${e.name} @ line ${e.line}`)),``,`Types (${e.types.length}):`,...$(e.types.map(e=>`${e.name} @ line ${e.line}`))].join(`
1
+ import{fanOutSearch as e,openWorkspaceStores as t,resolveWorkspaces as n}from"../cross-workspace.js";import{CONTENT_TYPES as r,computePartitionKey as i,createLogger as a,serializeError as o}from"../../../core/dist/index.js";import{addToWorkset as s,batch as c,check as l,checkpointLatest as u,checkpointList as d,checkpointLoad as f,checkpointSave as p,codemod as m,compact as h,dataTransform as g,delegate as ee,delegateListModels as te,deleteWorkset as ne,diffParse as re,evaluate as ie,fileSummary as ae,find as _,findDeadSymbols as oe,findExamples as se,getWorkset as v,gitContext as y,guide as b,health as x,laneCreate as S,laneDiff as C,laneDiscard as w,laneList as T,laneMerge as E,laneStatus as D,listWorksets as O,parseOutput as k,processList as A,processLogs as j,processStart as M,processStatus as N,processStop as P,queueClear as F,queueCreate as I,queueDelete as L,queueDone as R,queueFail as z,queueGet as B,queueList as V,queueNext as H,queuePush as U,removeFromWorkset as W,rename as G,saveWorkset as K,scopeMap as ce,stashClear as le,stashDelete as ue,stashGet as de,stashList as fe,stashSet as pe,summarizeCheckResult as me,symbol as q,testRun as he,trace as ge,truncateToTokenBudget as J,watchList as _e,watchStart as ve,watchStop as ye,webFetch as be}from"../../../tools/dist/index.js";import{z as Y}from"zod";const X=a(`tools`);function xe(e,t,n){e.registerTool(`compact`,{description:"Compress text to relevant sections using embedding similarity (no LLM). Provide either `text` or `path` (server reads the file — saves a round-trip). Segments by paragraph/sentence/line.",inputSchema:{text:Y.string().optional().describe(`The text to compress (provide this OR path, not both)`),path:Y.string().optional().describe(`File path to read server-side — avoids read_file round-trip + token doubling (provide this OR text)`),query:Y.string().describe(`Focus query — what are you trying to understand?`),max_chars:Y.number().min(100).max(5e4).default(3e3).describe(`Target output size in characters`),segmentation:Y.enum([`paragraph`,`sentence`,`line`]).default(`paragraph`).describe(`How to split the text for scoring`)}},async({text:e,path:r,query:i,max_chars:a,segmentation:s})=>{try{if(!e&&!r)return{content:[{type:`text`,text:`Error: Either "text" or "path" must be provided.`}],isError:!0};let o=await h(t,{text:e,path:r,query:i,maxChars:a,segmentation:s,cache:n});return{content:[{type:`text`,text:[`Compressed ${o.originalChars} → ${o.compressedChars} chars (${(o.ratio*100).toFixed(0)}%)`,`Kept ${o.segmentsKept}/${o.segmentsTotal} segments`,``,o.text].join(`
2
+ `)}]}}catch(e){return X.error(`Compact failed`,o(e)),{content:[{type:`text`,text:`Compact failed: ${e instanceof Error?e.message:String(e)}`}],isError:!0}}})}function Se(e,t,n){e.registerTool(`scope_map`,{description:`Generate a task-scoped reading plan. Given a task description, identifies which files and sections are relevant, with estimated token counts and suggested reading order.`,inputSchema:{task:Y.string().describe(`Description of the task to scope`),max_files:Y.number().min(1).max(50).default(15).describe(`Maximum files to include`),content_type:Y.enum(r).optional().describe(`Filter by content type`),max_tokens:Y.number().min(100).max(5e4).optional().describe(`Maximum token budget for the response. When set, output is truncated to fit.`)}},async({task:e,max_files:r,content_type:i,max_tokens:a})=>{try{let o=await ce(t,n,{task:e,maxFiles:r,contentType:i}),s=[`## Scope Map: ${e}`,`Total estimated tokens: ~${o.totalEstimatedTokens}`,``,`### Files (by relevance)`,...o.files.map((e,t)=>`${t+1}. **${e.path}** (~${e.estimatedTokens} tokens, ${(e.relevance*100).toFixed(0)}% relevant)\n ${e.reason}\n Focus: ${e.focusRanges.map(e=>`L${e.start}-${e.end}`).join(`, `)}`),``,`### Suggested Reading Order`,...o.readingOrder.map((e,t)=>`${t+1}. ${e}`),``,`### Suggested Compact Calls`,`_Estimated compressed total: ~${Math.ceil(o.totalEstimatedTokens/5)} tokens_`,...o.compactCommands.map((e,t)=>`${t+1}. ${e}`)].join(`
3
+ `)+"\n\n---\n_Next: Use `search` to dive into specific files, or `compact` to compress file contents for context._";return{content:[{type:`text`,text:a?J(s,a):s}]}}catch(e){return X.error(`Scope map failed`,o(e)),{content:[{type:`text`,text:`Scope map failed: ${e instanceof Error?e.message:String(e)}`}],isError:!0}}})}function Ce(a,s,c){a.registerTool(`find`,{description:`Federated search across vector similarity, keyword (FTS), file glob, and regex pattern. Combines strategies, deduplicates, and returns unified results. Use mode "examples" to find real usage examples of a symbol or pattern.`,inputSchema:{query:Y.string().optional().describe(`Semantic/keyword search query (required for mode "examples")`),glob:Y.string().optional().describe(`File glob pattern (search mode only)`),pattern:Y.string().optional().describe(`Regex pattern to match in content (search mode only)`),limit:Y.number().min(1).max(50).default(10).describe(`Max results`),content_type:Y.enum(r).optional().describe(`Filter by content type`),mode:Y.enum([`search`,`examples`]).default(`search`).describe(`Mode: "search" (default) for federated search, "examples" to find usage examples of a symbol/pattern`),max_tokens:Y.number().min(100).max(5e4).optional().describe(`Maximum token budget for the response. When set, output is truncated to fit.`),workspaces:Y.array(Y.string()).optional().describe(`Cross-workspace search: partition names or folder basenames to include. Use ["*"] for all. Global mode only.`)}},async({query:r,glob:a,pattern:l,limit:u,content_type:d,mode:f,max_tokens:p,workspaces:m})=>{try{if(f===`examples`){if(!r)return{content:[{type:`text`,text:`Error: "query" is required for mode "examples".`}],isError:!0};let e=await se(s,c,{query:r,limit:u,contentType:d}),t=JSON.stringify(e,null,2);return{content:[{type:`text`,text:p?J(t,p):t}]}}let o=await _(s,c,{query:r,glob:a,pattern:l,limit:u,contentType:d}),h=``;if(m&&m.length>0&&r){let a=n(m,i(process.cwd()));if(a.length>0){let{stores:n,closeAll:i}=await t(a);try{let t=await e(n,await s.embedQuery(r),{limit:u,contentType:d});for(let e of t)o.results.push({path:`[${e.workspace}] ${e.record.sourcePath}`,score:e.score,source:`cross-workspace`,lineRange:e.record.startLine?{start:e.record.startLine,end:e.record.endLine}:void 0,preview:e.record.content.slice(0,200)});o.results.sort((e,t)=>t.score-e.score),o.results=o.results.slice(0,u),o.totalFound=o.results.length,h=` + ${a.length} workspace(s)`}finally{await i()}}}if(o.results.length===0)return{content:[{type:`text`,text:`No results found.`}]};let g=[`Found ${o.totalFound} results via ${o.strategies.join(` + `)}${h}`,``,...o.results.map(e=>{let t=e.lineRange?`:${e.lineRange.start}-${e.lineRange.end}`:``,n=e.preview?`\n ${e.preview.slice(0,100)}...`:``;return`- [${e.source}] ${e.path}${t} (${(e.score*100).toFixed(0)}%)${n}`})];return{content:[{type:`text`,text:p?J(g.join(`
4
+ `),p):g.join(`
5
+ `)}]}}catch(e){return X.error(`Find failed`,o(e)),{content:[{type:`text`,text:`Find failed. Check server logs for details.`}],isError:!0}}})}function we(e){e.registerTool(`parse_output`,{description:`Parse structured data from build tool output. Supports tsc, vitest, biome, and git status. Auto-detects the tool or specify explicitly.`,inputSchema:{output:Y.string().max(5e5).describe(`Raw output text from a build tool`),tool:Y.enum([`tsc`,`vitest`,`biome`,`git-status`]).optional().describe(`Tool to parse as (auto-detects if omitted)`)}},async({output:e,tool:t})=>{try{let n=k(e.replace(/\\n/g,`
6
+ `).replace(/\\t/g,` `),t);return{content:[{type:`text`,text:JSON.stringify(n,null,2)}]}}catch(e){return X.error(`Parse failed`,o(e)),{content:[{type:`text`,text:`Parse failed. Check server logs for details.`}],isError:!0}}})}function Te(e){e.registerTool(`workset`,{description:`Manage named file sets (worksets). Save, load, list, add/remove files. Worksets persist across sessions in .kb-state/worksets.json.`,inputSchema:{action:Y.enum([`save`,`get`,`list`,`delete`,`add`,`remove`]).describe(`Operation to perform`),name:Y.string().optional().describe(`Workset name (required for all except list)`),files:Y.array(Y.string()).optional().describe(`File paths (required for save, add, remove)`),description:Y.string().optional().describe(`Description (for save)`)}},async({action:e,name:t,files:n,description:r})=>{try{switch(e){case`save`:{if(!t||!n)throw Error(`name and files required for save`);let e=K(t,n,{description:r});return{content:[{type:`text`,text:`Saved workset "${e.name}" with ${e.files.length} files.`}]}}case`get`:{if(!t)throw Error(`name required for get`);let e=v(t);return e?{content:[{type:`text`,text:JSON.stringify(e,null,2)}]}:{content:[{type:`text`,text:`Workset "${t}" not found.`}]}}case`list`:{let e=O();return e.length===0?{content:[{type:`text`,text:`No worksets.`}]}:{content:[{type:`text`,text:e.map(e=>`- **${e.name}** (${e.files.length} files) — ${e.description??`no description`}`).join(`
7
+ `)}]}}case`delete`:if(!t)throw Error(`name required for delete`);return{content:[{type:`text`,text:ne(t)?`Deleted workset "${t}".`:`Workset "${t}" not found.`}]};case`add`:{if(!t||!n)throw Error(`name and files required for add`);let e=s(t,n);return{content:[{type:`text`,text:`Added to workset "${e.name}": now ${e.files.length} files.`}]}}case`remove`:{if(!t||!n)throw Error(`name and files required for remove`);let e=W(t,n);return e?{content:[{type:`text`,text:`Removed from workset "${e.name}": now ${e.files.length} files.`}]}:{content:[{type:`text`,text:`Workset "${t}" not found.`}]}}}}catch(e){return X.error(`Workset operation failed`,o(e)),{content:[{type:`text`,text:`Workset operation failed. Check server logs for details.`}],isError:!0}}})}function Ee(e){e.registerTool(`check`,{description:`Run incremental typecheck (tsc) and lint (biome) on the project or specific files. Returns structured error and warning lists. Default detail level is "summary" (~300 tokens).`,inputSchema:{files:Y.array(Y.string()).optional().describe(`Specific files to check (if omitted, checks all)`),cwd:Y.string().optional().describe(`Working directory`),skip_types:Y.boolean().default(!1).describe(`Skip TypeScript typecheck`),skip_lint:Y.boolean().default(!1).describe(`Skip Biome lint`),detail:Y.enum([`summary`,`errors`,`full`]).default(`summary`).describe(`Output detail level: summary (default, ~300 tokens — pass/fail + counts + top errors), errors (parsed error objects), full (includes raw terminal output)`)}},async({files:e,cwd:t,skip_types:n,skip_lint:r,detail:i})=>{try{let a=await l({files:e,cwd:t,skipTypes:n,skipLint:r,detail:i===`summary`?`errors`:i});if(i===`summary`){let e=me(a),t=[];if(a.passed)t.push({tool:`test_run`,reason:`Types and lint clean — run tests next`});else{let e=a.tsc.errors[0]?.file??a.biome.errors[0]?.file;e&&t.push({tool:`symbol`,reason:`Resolve failing symbol in ${e}`,suggested_args:{name:e}}),t.push({tool:`check`,reason:`Re-check after fixing errors`,suggested_args:{detail:`errors`}})}return{content:[{type:`text`,text:JSON.stringify({...e,_next:t},null,2)}]}}return{content:[{type:`text`,text:JSON.stringify(a,null,2)}]}}catch(e){return X.error(`Check failed`,o(e)),{content:[{type:`text`,text:`Check failed. Check server logs for details.`}],isError:!0}}})}function De(e,t,n){e.registerTool(`batch`,{description:`Execute multiple built-in operations in parallel with concurrency control. Supported operation types: search, find, and check.`,inputSchema:{operations:Y.array(Y.object({id:Y.string().describe(`Unique ID for this operation`),type:Y.enum([`search`,`find`,`check`]).describe(`Built-in operation type`),args:Y.record(Y.string(),Y.unknown()).describe(`Arguments for the operation`)})).min(1).max(100).describe(`Operations to execute`),concurrency:Y.number().min(1).max(20).default(4).describe(`Max concurrent operations`)}},async({operations:e,concurrency:r})=>{try{let i=await c(e,async e=>Xe(e,t,n),{concurrency:r});return{content:[{type:`text`,text:JSON.stringify(i,null,2)}]}}catch(e){return X.error(`Batch failed`,o(e)),{content:[{type:`text`,text:`Batch failed. Check server logs for details.`}],isError:!0}}})}function Oe(e,r,a,s){e.registerTool(`symbol`,{description:`Resolve a symbol: find where it is defined, who imports it, and where it is referenced. Works on TypeScript and JavaScript codebases.`,inputSchema:{name:Y.string().describe(`Symbol name to look up (function, class, type, etc.)`),limit:Y.number().min(1).max(50).default(20).describe(`Max results per category`),workspaces:Y.array(Y.string()).optional().describe(`Cross-workspace search: partition names or folder basenames to include. Use ["*"] for all. Global mode only.`)}},async({name:e,limit:c,workspaces:l})=>{try{let o=await q(r,a,{name:e,limit:c,graphStore:s});if(l&&l.length>0){let a=n(l,i(process.cwd()));if(a.length>0){let{stores:n,closeAll:i}=await t(a);try{for(let[t,i]of n){let n=await q(r,i,{name:e,limit:c});n.definedIn&&!o.definedIn&&(o.definedIn={...n.definedIn,path:`[${t}] ${n.definedIn.path}`});for(let e of n.referencedIn)o.referencedIn.push({...e,path:`[${t}] ${e.path}`});if(n.importedBy){o.importedBy=o.importedBy??[];for(let e of n.importedBy)o.importedBy.push({...e,path:`[${t}] ${e.path}`})}}}finally{await i()}}}return{content:[{type:`text`,text:Qe(o)}]}}catch(e){return X.error(`Symbol lookup failed`,o(e)),{content:[{type:`text`,text:`Symbol lookup failed. Check server logs for details.`}],isError:!0}}})}function ke(e){e.registerTool(`eval`,{description:`Execute a JavaScript or TypeScript snippet in a constrained VM sandbox with a timeout. Captures console output and returned values.`,inputSchema:{code:Y.string().max(1e5).describe(`Code snippet to execute`),lang:Y.enum([`js`,`ts`]).default(`js`).optional().describe(`Language mode: js executes directly, ts strips common type syntax first`),timeout:Y.number().min(1).max(6e4).default(5e3).optional().describe(`Execution timeout in milliseconds`)}},async({code:e,lang:t,timeout:n})=>{try{let r=ie({code:e,lang:t,timeout:n});return r.success?{content:[{type:`text`,text:`Eval succeeded in ${r.durationMs}ms\n\n${r.output}`}]}:{content:[{type:`text`,text:`Eval failed in ${r.durationMs}ms: ${r.error??`Unknown error`}`}],isError:!0}}catch(e){return X.error(`Eval failed`,o(e)),{content:[{type:`text`,text:`Eval failed. Check server logs for details.`}],isError:!0}}})}function Ae(e){e.registerTool(`test_run`,{description:`Run Vitest for the current project or a subset of files, then return a structured summary of passing and failing tests.`,inputSchema:{files:Y.array(Y.string()).optional().describe(`Specific test files or patterns to run`),grep:Y.string().optional().describe(`Only run tests whose names match this pattern`),cwd:Y.string().optional().describe(`Working directory for the test run`)}},async({files:e,grep:t,cwd:n})=>{try{let r=await he({files:e,grep:t,cwd:n});return{content:[{type:`text`,text:$e(r)}],isError:!r.passed}}catch(e){return X.error(`Test run failed`,o(e)),{content:[{type:`text`,text:`Test run failed. Check server logs for details.`}],isError:!0}}})}function je(e){e.registerTool(`stash`,{description:`Persist and retrieve named values in .kb-state/stash.json for intermediate results between tool calls.`,inputSchema:{action:Y.enum([`set`,`get`,`list`,`delete`,`clear`]).describe(`Operation to perform on the stash`),key:Y.string().optional().describe(`Entry key for set/get/delete operations`),value:Y.string().optional().describe(`String or JSON value for set operations`)}},async({action:e,key:t,value:n})=>{try{switch(e){case`set`:{if(!t)throw Error(`key required for set`);let e=pe(t,rt(n??``));return{content:[{type:`text`,text:`Stored stash entry "${e.key}" (${e.type}) at ${e.storedAt}.`}]}}case`get`:{if(!t)throw Error(`key required for get`);let e=de(t);return{content:[{type:`text`,text:e?JSON.stringify(e,null,2):`Stash entry "${t}" not found.`}]}}case`list`:{let e=fe();return{content:[{type:`text`,text:e.length===0?`Stash is empty.`:e.map(e=>`- ${e.key} (${e.type}) — ${e.storedAt}`).join(`
8
+ `)}]}}case`delete`:if(!t)throw Error(`key required for delete`);return{content:[{type:`text`,text:ue(t)?`Deleted stash entry "${t}".`:`Stash entry "${t}" not found.`}]};case`clear`:{let e=le();return{content:[{type:`text`,text:`Cleared ${e} stash entr${e===1?`y`:`ies`}.`}]}}}}catch(e){return X.error(`Stash operation failed`,o(e)),{content:[{type:`text`,text:`Stash operation failed. Check server logs for details.`}],isError:!0}}})}function Me(e){e.registerTool(`git_context`,{description:`Summarize the current Git branch, working tree state, recent commits, and optional diff statistics for the repository.`,inputSchema:{cwd:Y.string().optional().describe(`Repository root or working directory`),commit_count:Y.number().min(1).max(50).default(5).optional().describe(`How many recent commits to include`),include_diff:Y.boolean().default(!1).optional().describe(`Include diff stat for working tree changes`)}},async({cwd:e,commit_count:t,include_diff:n})=>{try{return{content:[{type:`text`,text:et(await y({cwd:e,commitCount:t,includeDiff:n}))}]}}catch(e){return X.error(`Git context failed`,o(e)),{content:[{type:`text`,text:`Git context failed. Check server logs for details.`}],isError:!0}}})}function Ne(e){e.registerTool(`diff_parse`,{description:`Parse raw unified diff text into file-level and hunk-level structural changes.`,inputSchema:{diff:Y.string().max(1e6).describe(`Raw unified diff text`)}},async({diff:e})=>{try{return{content:[{type:`text`,text:tt(re({diff:e.replace(/\\n/g,`
9
+ `).replace(/\\t/g,` `)}))}]}}catch(e){return X.error(`Diff parse failed`,o(e)),{content:[{type:`text`,text:`Diff parse failed. Check server logs for details.`}],isError:!0}}})}function Pe(e){e.registerTool(`rename`,{description:`Rename a symbol across files using whole-word regex matching for exports, imports, and general usage references.`,inputSchema:{old_name:Y.string().describe(`Existing symbol name to replace`),new_name:Y.string().describe(`New symbol name to use`),root_path:Y.string().describe(`Root directory to search within`),extensions:Y.array(Y.string()).optional().describe(`Optional file extensions to include, such as .ts,.tsx,.js,.jsx`),dry_run:Y.boolean().default(!0).describe(`Preview changes without writing files`)}},async({old_name:e,new_name:t,root_path:n,extensions:r,dry_run:i})=>{try{let a=await G({oldName:e,newName:t,rootPath:n,extensions:r,dryRun:i});return{content:[{type:`text`,text:JSON.stringify(a,null,2)}]}}catch(e){return X.error(`Rename failed`,o(e)),{content:[{type:`text`,text:`Rename failed. Check server logs for details.`}],isError:!0}}})}function Fe(e){e.registerTool(`codemod`,{description:`Apply regex-based codemod rules across files and return structured before/after changes for each affected line.`,inputSchema:{root_path:Y.string().describe(`Root directory to transform within`),rules:Y.array(Y.object({description:Y.string().describe(`What the codemod rule does`),pattern:Y.string().describe(`Regex pattern in string form`),replacement:Y.string().describe(`Replacement string with optional capture groups`)})).min(1).describe(`Codemod rules to apply`),dry_run:Y.boolean().default(!0).describe(`Preview changes without writing files`)}},async({root_path:e,rules:t,dry_run:n})=>{try{let r=await m({rootPath:e,rules:t,dryRun:n});return{content:[{type:`text`,text:JSON.stringify(r,null,2)}]}}catch(e){return X.error(`Codemod failed`,o(e)),{content:[{type:`text`,text:`Codemod failed. Check server logs for details.`}],isError:!0}}})}function Ie(e,t){e.registerTool(`file_summary`,{description:`Create a concise structural summary of a source file: imports, exports, functions, classes, interfaces, and types.`,inputSchema:{path:Y.string().describe(`Absolute path to the file to summarize`)}},async({path:e})=>{try{return{content:[{type:`text`,text:nt(await ae({path:e,content:(await t.get(e)).content}))}]}}catch(e){return X.error(`File summary failed`,o(e)),{content:[{type:`text`,text:`File summary failed. Check server logs for details.`}],isError:!0}}})}function Le(e){e.registerTool(`checkpoint`,{description:`Save and restore lightweight session checkpoints in .kb-state/checkpoints for cross-session continuity.`,inputSchema:{action:Y.enum([`save`,`load`,`list`,`latest`]).describe(`Checkpoint action to perform`),label:Y.string().optional().describe(`Checkpoint label for save, or checkpoint id for load`),data:Y.string().max(5e5).optional().describe(`JSON object string for save actions`),notes:Y.string().max(1e4).optional().describe(`Optional notes for save actions`)}},async({action:e,label:t,data:n,notes:r})=>{try{switch(e){case`save`:if(!t)throw Error(`label required for save`);return{content:[{type:`text`,text:Q(p(t,it(n),{notes:r}))}]};case`load`:{if(!t)throw Error(`label required for load`);let e=f(t);return{content:[{type:`text`,text:e?Q(e):`Checkpoint "${t}" not found.`}]}}case`list`:{let e=d();return{content:[{type:`text`,text:e.length===0?`No checkpoints saved.`:e.map(e=>`- ${e.id} — ${e.label} (${e.createdAt})`).join(`
10
+ `)}]}}case`latest`:{let e=u();return{content:[{type:`text`,text:e?Q(e):`No checkpoints saved.`}]}}}}catch(e){return X.error(`Checkpoint failed`,o(e)),{content:[{type:`text`,text:`Checkpoint failed. Check server logs for details.`}],isError:!0}}})}function Re(e){e.registerTool(`data_transform`,{description:`Apply small jq-like transforms to JSON input for filtering, projection, grouping, and path extraction.`,inputSchema:{input:Y.string().max(5e5).describe(`Input JSON string`),expression:Y.string().max(1e4).describe(`Transform expression to apply`)}},async({input:e,expression:t})=>{try{return{content:[{type:`text`,text:g({input:e,expression:t}).outputString}]}}catch(e){return X.error(`Data transform failed`,o(e)),{content:[{type:`text`,text:`Data transform failed. Check server logs for details.`}],isError:!0}}})}function ze(e,t,n){e.registerTool(`trace`,{description:`Trace data flow through a codebase by following imports, call sites, and references from a starting symbol or file location.`,inputSchema:{start:Y.string().describe(`Starting point — symbol name or file:line reference`),direction:Y.enum([`forward`,`backward`,`both`]).describe(`Which direction to trace relationships`),max_depth:Y.number().min(1).max(10).default(3).optional().describe(`Maximum trace depth`)}},async({start:e,direction:r,max_depth:i})=>{try{let a=await ge(t,n,{start:e,direction:r,maxDepth:i}),o=[`## Trace: ${a.start}`,`Direction: ${a.direction} | Depth: ${a.depth}`,``];if(a.nodes.length===0)o.push(`No connections found.`);else{let e=a.nodes.filter(e=>e.relationship===`calls`),t=a.nodes.filter(e=>e.relationship===`called-by`),n=a.nodes.filter(e=>e.relationship===`imports`),r=a.nodes.filter(e=>e.relationship===`imported-by`),i=a.nodes.filter(e=>e.relationship===`references`);if(e.length>0){o.push(`### Calls (${e.length})`);for(let t of e){let e=t.scope?` (from ${t.scope}())`:``;o.push(`- ${t.symbol}() ${t.path}:${t.line}${e}`)}o.push(``)}if(t.length>0){o.push(`### Called by (${t.length})`);for(let e of t){let t=e.scope?` in ${e.scope}()`:``;o.push(`- ${e.symbol}()${t} ${e.path}:${e.line}`)}o.push(``)}if(n.length>0){o.push(`### Imports (${n.length})`);for(let e of n)o.push(`- ${e.symbol} — ${e.path}:${e.line}`);o.push(``)}if(r.length>0){o.push(`### Imported by (${r.length})`);for(let e of r)o.push(`- ${e.path}:${e.line}`);o.push(``)}if(i.length>0){o.push(`### References (${i.length})`);for(let e of i)o.push(`- ${e.path}:${e.line}`);o.push(``)}}return o.push(`---`,"_Next: `symbol` for definition details | `compact` to read a referenced file | `blast_radius` for impact analysis_"),{content:[{type:`text`,text:o.join(`
11
+ `)}]}}catch(e){return X.error(`Trace failed`,o(e)),{content:[{type:`text`,text:`Trace failed. Check server logs for details.`}],isError:!0}}})}function Be(e){e.registerTool(`process`,{description:`Start, stop, inspect, list, and tail logs for in-memory managed child processes.`,inputSchema:{action:Y.enum([`start`,`stop`,`status`,`list`,`logs`]).describe(`Process action to perform`),id:Y.string().optional().describe(`Managed process ID`),command:Y.string().optional().describe(`Executable to start`),args:Y.array(Y.string()).optional().describe(`Arguments for start actions`),tail:Y.number().min(1).max(500).optional().describe(`Log lines to return for logs actions`)}},async({action:e,id:t,command:n,args:r,tail:i})=>{try{switch(e){case`start`:if(!t||!n)throw Error(`id and command are required for start`);return{content:[{type:`text`,text:JSON.stringify(M(t,n,r??[]),null,2)}]};case`stop`:if(!t)throw Error(`id is required for stop`);return{content:[{type:`text`,text:JSON.stringify(P(t)??null,null,2)}]};case`status`:if(!t)throw Error(`id is required for status`);return{content:[{type:`text`,text:JSON.stringify(N(t)??null,null,2)}]};case`list`:return{content:[{type:`text`,text:JSON.stringify(A(),null,2)}]};case`logs`:if(!t)throw Error(`id is required for logs`);return{content:[{type:`text`,text:JSON.stringify(j(t,i),null,2)}]}}}catch(e){return X.error(`Process action failed`,o(e)),{content:[{type:`text`,text:`Process action failed. Check server logs for details.`}],isError:!0}}})}function Ve(e){e.registerTool(`watch`,{description:`Start, stop, and list in-memory filesystem watchers for a directory.`,inputSchema:{action:Y.enum([`start`,`stop`,`list`]).describe(`Watch action to perform`),path:Y.string().optional().describe(`Directory path to watch for start actions`),id:Y.string().optional().describe(`Watcher ID for stop actions`)}},async({action:e,path:t,id:n})=>{try{switch(e){case`start`:if(!t)throw Error(`path is required for start`);return{content:[{type:`text`,text:JSON.stringify(ve({path:t}),null,2)}]};case`stop`:if(!n)throw Error(`id is required for stop`);return{content:[{type:`text`,text:JSON.stringify({stopped:ye(n)},null,2)}]};case`list`:return{content:[{type:`text`,text:JSON.stringify(_e(),null,2)}]}}}catch(e){return X.error(`Watch action failed`,o(e)),{content:[{type:`text`,text:`Watch action failed. Check server logs for details.`}],isError:!0}}})}function He(e,t,n){e.registerTool(`dead_symbols`,{description:`Find exported symbols that appear to be unused because they are never imported or re-exported.`,inputSchema:{path:Y.string().optional().describe(`Root path to scope the search (default: cwd)`),limit:Y.number().min(1).max(500).default(100).optional().describe(`Maximum exported symbols to scan`)}},async({path:e,limit:r})=>{try{let i=await oe(t,n,{rootPath:e,limit:r}),a=[`## Dead Symbol Analysis`,``,`**Exports scanned:** ${i.totalExports}`,`**Dead in source:** ${i.totalDeadSource} (actionable)`,`**Dead in docs:** ${i.totalDeadDocs} (informational code samples in .md files)`,``];if(i.deadInSource.length>0){a.push(`### Dead in Source (actionable)`);for(let e of i.deadInSource)a.push(`- \`${e.name}\` (${e.kind}) ${e.path}:${e.line}`);a.push(``)}if(i.deadInDocs.length>0){a.push(`### Dead in Docs (informational)`),a.push(`_${i.totalDeadDocs} symbol(s) found only in documentation code samples — not actionable dead code._`);for(let e of i.deadInDocs.slice(0,5))a.push(`- \`${e.name}\` — ${e.path}:${e.line}`);i.deadInDocs.length>5&&a.push(`- _... ${i.deadInDocs.length-5} more omitted_`)}return i.totalDeadSource>0?a.push(``,`---`,`_Next: \`codemod\` to remove ${i.totalDeadSource} unused exports | \`symbol\` to verify usage before removing_`):a.push(``,`---`,"_Next: `check` — no dead symbols found, validate types and lint_"),{content:[{type:`text`,text:a.join(`
12
+ `)}]}}catch(e){return X.error(`Dead symbol scan failed`,o(e)),{content:[{type:`text`,text:`Dead symbol scan failed. Check server logs for details.`}],isError:!0}}})}function Ue(e){e.registerTool(`delegate`,{description:`Delegate a subtask to a local Ollama model. Use for summarization, classification, naming, or any task that can offload work from the host agent. Fails fast if Ollama is not running.`,inputSchema:{prompt:Y.string().max(2e5).describe(`The task or question to send to the local model`),model:Y.string().optional().describe(`Ollama model name (default: first available model)`),system:Y.string().optional().describe(`System prompt for the model`),context:Y.string().max(5e5).optional().describe(`Context text to include before the prompt (e.g. file contents)`),temperature:Y.number().min(0).max(2).default(.3).optional().describe(`Sampling temperature (0=deterministic, default 0.3)`),timeout:Y.number().min(1e3).max(6e5).default(12e4).optional().describe(`Timeout in milliseconds (default 120000)`),action:Y.enum([`generate`,`list_models`]).default(`generate`).optional().describe(`Action: generate a response or list available models`)}},async({prompt:e,model:t,system:n,context:r,temperature:i,timeout:a,action:s})=>{try{if(s===`list_models`){let e=await te();return{content:[{type:`text`,text:JSON.stringify({models:e,count:e.length,_Next:`Use delegate with a model name`},null,2)}]}}let o=await ee({prompt:e,model:t,system:n,context:r,temperature:i,timeout:a});return o.error?{content:[{type:`text`,text:JSON.stringify({error:o.error,model:o.model,durationMs:o.durationMs},null,2)}],isError:!0}:{content:[{type:`text`,text:JSON.stringify({model:o.model,response:o.response,durationMs:o.durationMs,tokenCount:o.tokenCount,_Next:`Use the response in your workflow. stash to save it.`},null,2)}]}}catch(e){return X.error(`Delegate failed`,o(e)),{content:[{type:`text`,text:`Delegate failed. Check server logs for details.`}],isError:!0}}})}function We(e){e.registerTool(`lane`,{description:`Manage verified lanes — isolated file copies for parallel exploration. Create a lane, make changes, diff, merge back, or discard.`,inputSchema:{action:Y.enum([`create`,`list`,`status`,`diff`,`merge`,`discard`]).describe(`Lane action to perform`),name:Y.string().optional().describe(`Lane name (required for create/status/diff/merge/discard)`),files:Y.array(Y.string()).optional().describe(`File paths to copy into the lane (required for create)`)}},async({action:e,name:t,files:n})=>{try{switch(e){case`create`:{if(!t)throw Error(`name is required for create`);if(!n||n.length===0)throw Error(`files are required for create`);let e=S(t,n);return{content:[{type:`text`,text:JSON.stringify(e,null,2)}]}}case`list`:return{content:[{type:`text`,text:JSON.stringify(T(),null,2)}]};case`status`:if(!t)throw Error(`name is required for status`);return{content:[{type:`text`,text:JSON.stringify(D(t),null,2)}]};case`diff`:if(!t)throw Error(`name is required for diff`);return{content:[{type:`text`,text:JSON.stringify(C(t),null,2)}]};case`merge`:if(!t)throw Error(`name is required for merge`);return{content:[{type:`text`,text:JSON.stringify(E(t),null,2)}]};case`discard`:if(!t)throw Error(`name is required for discard`);return{content:[{type:`text`,text:JSON.stringify({discarded:w(t)},null,2)}]}}}catch(e){return X.error(`Lane action failed`,o(e)),{content:[{type:`text`,text:`Lane action failed. Check server logs for details.`}],isError:!0}}})}function Ge(e){e.registerTool(`health`,{description:`Run project health checks — verifies package.json, tsconfig, scripts, lockfile, README, LICENSE, .gitignore.`,inputSchema:{path:Y.string().optional().describe(`Root directory to check (defaults to cwd)`)}},async({path:e})=>{try{let t=x(e);return{content:[{type:`text`,text:JSON.stringify(t,null,2)}]}}catch(e){return X.error(`Health check failed`,o(e)),{content:[{type:`text`,text:`Health check failed. Check server logs for details.`}],isError:!0}}})}function Ke(e){e.registerTool(`queue`,{description:`Manage task queues for sequential agent operations. Push items, take next, mark done/failed, list queues.`,inputSchema:{action:Y.enum([`create`,`push`,`next`,`done`,`fail`,`get`,`list`,`clear`,`delete`]).describe(`Queue action`),name:Y.string().optional().describe(`Queue name (required for most actions)`),title:Y.string().optional().describe(`Item title (required for push)`),id:Y.string().optional().describe(`Item ID (required for done/fail)`),data:Y.unknown().optional().describe(`Arbitrary data to attach to a queue item`),error:Y.string().optional().describe(`Error message (required for fail)`)}},async({action:e,name:t,title:n,id:r,data:i,error:a})=>{try{switch(e){case`create`:if(!t)throw Error(`name is required for create`);return{content:[{type:`text`,text:JSON.stringify(I(t),null,2)}]};case`push`:if(!t)throw Error(`name is required for push`);if(!n)throw Error(`title is required for push`);return{content:[{type:`text`,text:JSON.stringify(U(t,n,i),null,2)}]};case`next`:{if(!t)throw Error(`name is required for next`);let e=H(t);return{content:[{type:`text`,text:JSON.stringify(e,null,2)}]}}case`done`:if(!t)throw Error(`name is required for done`);if(!r)throw Error(`id is required for done`);return{content:[{type:`text`,text:JSON.stringify(R(t,r),null,2)}]};case`fail`:if(!t)throw Error(`name is required for fail`);if(!r)throw Error(`id is required for fail`);if(!a)throw Error(`error is required for fail`);return{content:[{type:`text`,text:JSON.stringify(z(t,r,a),null,2)}]};case`get`:if(!t)throw Error(`name is required for get`);return{content:[{type:`text`,text:JSON.stringify(B(t),null,2)}]};case`list`:return{content:[{type:`text`,text:JSON.stringify(V(),null,2)}]};case`clear`:if(!t)throw Error(`name is required for clear`);return{content:[{type:`text`,text:JSON.stringify({cleared:F(t)},null,2)}]};case`delete`:if(!t)throw Error(`name is required for delete`);return{content:[{type:`text`,text:JSON.stringify({deleted:L(t)},null,2)}]}}}catch(e){return X.error(`Queue action failed`,o(e)),{content:[{type:`text`,text:`Queue action failed. Check server logs for details.`}],isError:!0}}})}const qe=Y.object({query:Y.string(),limit:Y.number().min(1).max(20).default(5).optional(),search_mode:Y.enum([`hybrid`,`semantic`,`keyword`]).default(`hybrid`).optional(),content_type:Y.enum(r).optional(),origin:Y.enum([`indexed`,`curated`,`produced`]).optional(),category:Y.string().optional(),tags:Y.array(Y.string()).optional(),min_score:Y.number().min(0).max(1).default(.25).optional()}),Je=Y.object({query:Y.string().optional(),glob:Y.string().optional(),pattern:Y.string().optional(),limit:Y.number().min(1).max(50).default(10).optional(),content_type:Y.enum(r).optional(),cwd:Y.string().optional()}),Ye=Y.object({files:Y.array(Y.string()).optional(),cwd:Y.string().optional(),skip_types:Y.boolean().optional(),skip_lint:Y.boolean().optional()});async function Xe(e,t,n){switch(e.type){case`search`:return Z(t,n,qe.parse(e.args));case`find`:{let r=Je.parse(e.args);if(!r.query&&!r.glob&&!r.pattern)throw Error(`find operation requires query, glob, or pattern`);return _(t,n,{query:r.query,glob:r.glob,pattern:r.pattern,limit:r.limit,contentType:r.content_type,cwd:r.cwd})}case`check`:{let t=Ye.parse(e.args);return l({files:t.files,cwd:t.cwd,skipTypes:t.skip_types,skipLint:t.skip_lint})}default:throw Error(`Unsupported batch operation type: ${e.type}`)}}async function Z(e,t,n){let r=n.limit??5,i={limit:r,minScore:n.min_score??.25,contentType:n.content_type,origin:n.origin,category:n.category,tags:n.tags},a=e.embedQuery?.bind(e)??e.embed.bind(e);if(n.search_mode===`keyword`)return(await t.ftsSearch(n.query,i)).slice(0,r);let o=await a(n.query);if(n.search_mode===`semantic`)return t.search(o,i);let[s,c]=await Promise.all([t.search(o,{...i,limit:r*2}),t.ftsSearch(n.query,{...i,limit:r*2})]);return Ze(s,c).slice(0,r)}function Ze(e,t,n=60){let r=new Map;for(let t=0;t<e.length;t++){let i=e[t];r.set(i.record.id,{record:i.record,score:1/(n+t+1)})}for(let e=0;e<t.length;e++){let i=t[e],a=r.get(i.record.id);if(a){a.score+=1/(n+e+1);continue}r.set(i.record.id,{record:i.record,score:1/(n+e+1)})}return[...r.values()].sort((e,t)=>t.score-e.score)}function Qe(e){let t=[`Symbol: ${e.name}`];if(e.definedIn){let n=`Defined in: ${e.definedIn.path}:${e.definedIn.line} (${e.definedIn.kind})`;e.definedIn.signature&&(n+=`\nSignature: ${e.definedIn.signature}`),t.push(n)}else t.push(`Defined in: not found`);if(t.push(``,`Imported by:`),e.importedBy.length===0)t.push(` none`);else for(let n of e.importedBy)t.push(` - ${n.path}:${n.line} ${n.importStatement}`);if(t.push(``,`Referenced in:`),e.referencedIn.length===0)t.push(` none`);else for(let n of e.referencedIn){let e=`scope`in n&&n.scope?` in ${n.scope}()`:``;t.push(` - ${n.path}:${n.line}${e} ${n.context}`)}if(e.graphContext){let n=e.graphContext;t.push(``,`Graph context:`),n.definingModule&&t.push(` Module: ${n.definingModule}`),n.importedByModules.length>0&&t.push(` Imported by modules: ${n.importedByModules.join(`, `)}`),n.siblingSymbols.length>0&&t.push(` Sibling symbols: ${n.siblingSymbols.join(`, `)}`)}return t.join(`
13
+ `)}function $e(e){let t=[`Vitest run: ${e.passed?`passed`:`failed`}`,`Duration: ${e.durationMs}ms`,`Passed: ${e.summary.passed}`,`Failed: ${e.summary.failed}`,`Skipped: ${e.summary.skipped}`];e.summary.suites!==void 0&&t.push(`Suites: ${e.summary.suites}`);let n=e.summary.tests.filter(e=>e.status===`fail`);if(n.length>0){t.push(``,`Failed tests:`);for(let e of n)t.push(`- ${e.name}${e.file?` (${e.file})`:``}`),e.error&&t.push(` ${e.error}`)}return t.join(`
14
+ `)}function et(e){let t=[`Branch: ${e.branch}`,`Staged: ${e.status.staged.length}`,...e.status.staged.map(e=>` - ${e}`),`Modified: ${e.status.modified.length}`,...e.status.modified.map(e=>` - ${e}`),`Untracked: ${e.status.untracked.length}`,...e.status.untracked.map(e=>` - ${e}`),``,`Recent commits:`];if(e.recentCommits.length===0)t.push(` none`);else for(let n of e.recentCommits)t.push(` - ${n.hash} ${n.message}`),t.push(` ${n.author} @ ${n.date}`);return e.diff&&t.push(``,`Diff stat:`,e.diff),t.join(`
15
+ `)}function tt(e){if(e.length===0)return`No diff files found.`;let t=[];for(let n of e){let e=n.oldPath?` (from ${n.oldPath})`:``;t.push(`${n.path}${e} [${n.status}] +${n.additions} -${n.deletions} (${n.hunks.length} hunks)`);for(let e of n.hunks){let n=e.header?` ${e.header}`:``;t.push(` @@ -${e.oldStart},${e.oldLines} +${e.newStart},${e.newLines} @@${n}`)}}return t.join(`
16
+ `)}function nt(e){let t=[e.path,`Language: ${e.language}`,`Lines: ${e.lines}`,`Estimated tokens: ~${e.estimatedTokens}`,``,`Imports (${e.imports.length}):`,...$(e.imports),``,`Exports (${e.exports.length}):`,...$(e.exports),``,`Functions (${e.functions.length}):`,...$(e.functions.map(e=>`${e.name} @ line ${e.line}${e.exported?` [exported]`:``}${`signature`in e&&e.signature?` — ${e.signature}`:``}`)),``,`Classes (${e.classes.length}):`,...$(e.classes.map(e=>`${e.name} @ line ${e.line}${e.exported?` [exported]`:``}${e.signature?` — ${e.signature}`:``}`)),``,`Interfaces (${e.interfaces.length}):`,...$(e.interfaces.map(e=>`${e.name} @ line ${e.line}${`exported`in e&&e.exported?` [exported]`:``}`)),``,`Types (${e.types.length}):`,...$(e.types.map(e=>`${e.name} @ line ${e.line}${`exported`in e&&e.exported?` [exported]`:``}`))];if(`importDetails`in e&&e.importDetails&&e.importDetails.length>0){let n=e.importDetails.filter(e=>e.isExternal).length,r=e.importDetails.length-n;t.push(``,`Import breakdown: ${n} external, ${r} internal`)}if(`callEdges`in e&&e.callEdges&&e.callEdges.length>0){t.push(``,`Call edges (${e.callEdges.length} intra-file):`);for(let n of e.callEdges.slice(0,30))t.push(` - ${n.caller}() → ${n.callee}() @ line ${n.line}`);e.callEdges.length>30&&t.push(` - ... ${e.callEdges.length-30} more`)}return t.join(`
16
17
  `)}function Q(e){let t=[e.id,`Label: ${e.label}`,`Created: ${e.createdAt}`];if(e.notes&&t.push(`Notes: ${e.notes}`),e.files?.length){t.push(`Files: ${e.files.length}`);for(let n of e.files)t.push(` - ${n}`)}return t.push(``,`Data:`,JSON.stringify(e.data,null,2)),t.join(`
17
- `)}function $(e){return e.length===0?[` none`]:e.map(e=>` - ${e}`)}function $e(e){let t=e.trim();if(!t)return``;try{return JSON.parse(t)}catch{return e}}function et(e){let t=e?.trim();if(!t)return{};let n;try{n=JSON.parse(t)}catch{throw Error(`data must be a valid JSON object string`)}if(!n||typeof n!=`object`||Array.isArray(n))throw Error(`data must be a JSON object string`);return n}function tt(e){e.registerTool(`web_fetch`,{description:`PREFERRED web fetcher — fetch any URL and convert to LLM-optimized markdown. Supports CSS selectors, 4 output modes (markdown/raw/links/outline), smart paragraph-boundary truncation. Strips scripts/styles/nav automatically.`,inputSchema:{url:Y.string().url().describe(`URL to fetch (http/https only)`),mode:Y.enum([`markdown`,`raw`,`links`,`outline`]).default(`markdown`).describe(`Output mode: markdown (clean content), raw (HTML), links (extracted URLs), outline (heading hierarchy)`),selector:Y.string().optional().describe(`CSS selector to extract a specific element instead of auto-detecting main content`),max_length:Y.number().min(500).max(1e5).default(15e3).describe(`Max characters in output — truncates at paragraph boundaries`),include_metadata:Y.boolean().default(!0).describe(`Include page title, description, and URL as a header`),include_links:Y.boolean().default(!1).describe(`Append extracted links list at the end`),include_images:Y.boolean().default(!1).describe(`Include image alt texts inline`),timeout:Y.number().min(1e3).max(6e4).default(15e3).describe(`Request timeout in milliseconds`)}},async({url:e,mode:t,selector:r,max_length:i,include_metadata:a,include_links:o,include_images:s,timeout:c})=>{try{let n=await ge({url:e,mode:t,selector:r,maxLength:i,includeMetadata:a,includeLinks:o,includeImages:s,timeout:c}),l=[`## ${n.title||`Web Page`}`,``,n.content];return n.truncated&&l.push(``,`_Original length: ${n.originalLength.toLocaleString()} chars_`),l.push(``,`---`,"_Next: Use `remember` to save key findings, or `web_fetch` with a `selector` to extract a specific section._"),{content:[{type:`text`,text:l.join(`
18
- `)}]}}catch(e){return X.error(`Web fetch failed`,n(e)),{content:[{type:`text`,text:`Web fetch failed. Check server logs for details.`}],isError:!0}}})}function nt(e){e.registerTool(`guide`,{description:`Tool discovery — given a goal description, recommends which KB tools to use and in what order. Matches against 10 predefined workflows: onboard, audit, bugfix, implement, refactor, search, context, memory, validate, analyze.`,inputSchema:{goal:Y.string().describe(`What you want to accomplish (e.g., "audit this monorepo", "fix a failing test")`),max_recommendations:Y.number().min(1).max(10).default(5).describe(`Maximum number of tool recommendations`)}},async({goal:e,max_recommendations:t})=>{try{let n=_(e,t),r=[`## Recommended Workflow: **${n.workflow}**`,n.description,``,`### Tools`,...n.tools.map(e=>{let t=e.suggestedArgs?` — \`${JSON.stringify(e.suggestedArgs)}\``:``;return`${e.order}. **${e.tool}** — ${e.reason}${t}`})];return n.alternativeWorkflows.length>0&&r.push(``,`_Alternative workflows: ${n.alternativeWorkflows.join(`, `)}_`),r.push(``,`---`,"_Next: Run the first recommended tool, or use `guide` again with a more specific goal._"),{content:[{type:`text`,text:r.join(`
19
- `)}]}}catch(e){return X.error(`Guide failed`,n(e)),{content:[{type:`text`,text:`Guide failed. Check server logs for details.`}],isError:!0}}})}export{Ce as registerBatchTool,Se as registerCheckTool,Ne as registerCheckpointTool,je as registerCodemodTool,_e as registerCompactTool,Pe as registerDataTransformTool,Re as registerDeadSymbolsTool,ze as registerDelegateTool,ke as registerDiffParseTool,Te as registerEvalTool,Me as registerFileSummaryTool,ye as registerFindTool,Oe as registerGitContextTool,nt as registerGuideTool,Ve as registerHealthTool,Be as registerLaneTool,be as registerParseOutputTool,Ie as registerProcessTool,He as registerQueueTool,Ae as registerRenameTool,ve as registerScopeMapTool,De as registerStashTool,we as registerSymbolTool,Ee as registerTestRunTool,Fe as registerTraceTool,Le as registerWatchTool,tt as registerWebFetchTool,xe as registerWorksetTool};
20
- //# sourceMappingURL=toolkit.tools.js.map
18
+ `)}function $(e){return e.length===0?[` none`]:e.map(e=>` - ${e}`)}function rt(e){let t=e.trim();if(!t)return``;try{return JSON.parse(t)}catch{return e}}function it(e){let t=e?.trim();if(!t)return{};let n;try{n=JSON.parse(t)}catch{throw Error(`data must be a valid JSON object string`)}if(!n||typeof n!=`object`||Array.isArray(n))throw Error(`data must be a JSON object string`);return n}function at(e){e.registerTool(`web_fetch`,{description:`PREFERRED web fetcher — fetch any URL and convert to LLM-optimized markdown. Supports CSS selectors, 4 output modes (markdown/raw/links/outline), smart paragraph-boundary truncation. Strips scripts/styles/nav automatically.`,inputSchema:{url:Y.string().url().describe(`URL to fetch (http/https only)`),mode:Y.enum([`markdown`,`raw`,`links`,`outline`]).default(`markdown`).describe(`Output mode: markdown (clean content), raw (HTML), links (extracted URLs), outline (heading hierarchy)`),selector:Y.string().optional().describe(`CSS selector to extract a specific element instead of auto-detecting main content`),max_length:Y.number().min(500).max(1e5).default(15e3).describe(`Max characters in output — truncates at paragraph boundaries`),include_metadata:Y.boolean().default(!0).describe(`Include page title, description, and URL as a header`),include_links:Y.boolean().default(!1).describe(`Append extracted links list at the end`),include_images:Y.boolean().default(!1).describe(`Include image alt texts inline`),timeout:Y.number().min(1e3).max(6e4).default(15e3).describe(`Request timeout in milliseconds`)}},async({url:e,mode:t,selector:n,max_length:r,include_metadata:i,include_links:a,include_images:s,timeout:c})=>{try{let o=await be({url:e,mode:t,selector:n,maxLength:r,includeMetadata:i,includeLinks:a,includeImages:s,timeout:c}),l=[`## ${o.title||`Web Page`}`,``,o.content];return o.truncated&&l.push(``,`_Original length: ${o.originalLength.toLocaleString()} chars_`),l.push(``,`---`,"_Next: Use `remember` to save key findings, or `web_fetch` with a `selector` to extract a specific section._"),{content:[{type:`text`,text:l.join(`
19
+ `)}]}}catch(e){return X.error(`Web fetch failed`,o(e)),{content:[{type:`text`,text:`Web fetch failed. Check server logs for details.`}],isError:!0}}})}function ot(e){e.registerTool(`guide`,{description:`Tool discovery — given a goal description, recommends which KB tools to use and in what order. Matches against 10 predefined workflows: onboard, audit, bugfix, implement, refactor, search, context, memory, validate, analyze.`,inputSchema:{goal:Y.string().describe(`What you want to accomplish (e.g., "audit this monorepo", "fix a failing test")`),max_recommendations:Y.number().min(1).max(10).default(5).describe(`Maximum number of tool recommendations`)}},async({goal:e,max_recommendations:t})=>{try{let n=b(e,t),r=[`## Recommended Workflow: **${n.workflow}**`,n.description,``,`### Tools`,...n.tools.map(e=>{let t=e.suggestedArgs?` — \`${JSON.stringify(e.suggestedArgs)}\``:``;return`${e.order}. **${e.tool}** — ${e.reason}${t}`})];return n.alternativeWorkflows.length>0&&r.push(``,`_Alternative workflows: ${n.alternativeWorkflows.join(`, `)}_`),r.push(``,`---`,"_Next: Run the first recommended tool, or use `guide` again with a more specific goal._"),{content:[{type:`text`,text:r.join(`
20
+ `)}]}}catch(e){return X.error(`Guide failed`,o(e)),{content:[{type:`text`,text:`Guide failed: ${e instanceof Error?e.message:String(e)}`}],isError:!0}}})}export{De as registerBatchTool,Ee as registerCheckTool,Le as registerCheckpointTool,Fe as registerCodemodTool,xe as registerCompactTool,Re as registerDataTransformTool,He as registerDeadSymbolsTool,Ue as registerDelegateTool,Ne as registerDiffParseTool,ke as registerEvalTool,Ie as registerFileSummaryTool,Ce as registerFindTool,Me as registerGitContextTool,ot as registerGuideTool,Ge as registerHealthTool,We as registerLaneTool,we as registerParseOutputTool,Be as registerProcessTool,Ke as registerQueueTool,Pe as registerRenameTool,Se as registerScopeMapTool,je as registerStashTool,Oe as registerSymbolTool,Ae as registerTestRunTool,ze as registerTraceTool,Ve as registerWatchTool,at as registerWebFetchTool,Te as registerWorksetTool};
@@ -4,5 +4,4 @@ import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
4
4
  //#region packages/server/src/tools/update.tool.d.ts
5
5
  declare function registerUpdateTool(server: McpServer, curated: CuratedKnowledgeManager): void;
6
6
  //#endregion
7
- export { registerUpdateTool };
8
- //# sourceMappingURL=update.tool.d.ts.map
7
+ export { registerUpdateTool };
@@ -1,2 +1 @@
1
- import{createLogger as e,serializeError as t}from"../../../core/dist/index.js";import{z as n}from"zod";const r=e(`tools`);function i(e,i){e.registerTool(`update`,{description:`Update an existing curated knowledge entry. Increments version and records the reason in the changelog.`,inputSchema:{path:n.string().describe(`Relative path within curated/ (e.g., "decisions/use-lancedb.md")`),content:n.string().min(10).max(1e5).describe(`New markdown content to replace existing content`),reason:n.string().min(3).max(1e3).describe(`Why this update is being made (recorded in changelog)`)}},async({path:e,content:n,reason:a})=>{try{let t=await i.update(e,n,a);return{content:[{type:`text`,text:`Updated: \`curated/${t.path}\` → version ${t.version}\n\nReason: ${a}\n\n---\n_Next: Use \`read\` to verify the updated content, or \`search\` to test searchability._`}]}}catch(e){return r.error(`Update failed`,t(e)),{content:[{type:`text`,text:`Update failed. Check server logs for details.`}],isError:!0}}})}export{i as registerUpdateTool};
2
- //# sourceMappingURL=update.tool.js.map
1
+ import{createLogger as e,serializeError as t}from"../../../core/dist/index.js";import{z as n}from"zod";const r=e(`tools`);function i(e,i){e.registerTool(`update`,{description:`Update an existing curated knowledge entry. Increments version and records the reason in the changelog.`,inputSchema:{path:n.string().describe(`Relative path within .ai/curated/ (e.g., "decisions/use-lancedb.md")`),content:n.string().min(10).max(1e5).describe(`New markdown content to replace existing content`),reason:n.string().min(3).max(1e3).describe(`Why this update is being made (recorded in changelog)`)}},async({path:e,content:n,reason:a})=>{try{let t=await i.update(e,n,a);return{content:[{type:`text`,text:`Updated: \`.ai/curated/${t.path}\` → version ${t.version}\n\nReason: ${a}\n\n---\n_Next: Use \`read\` to verify the updated content, or \`search\` to test searchability._`}]}}catch(e){return r.error(`Update failed`,t(e)),{content:[{type:`text`,text:`Update failed. Check server logs for details.`}],isError:!0}}})}export{i as registerUpdateTool};
@@ -12,5 +12,4 @@ declare function registerSnippetTool(server: McpServer): void;
12
12
  declare function registerEnvTool(server: McpServer): void;
13
13
  declare function registerTimeTool(server: McpServer): void;
14
14
  //#endregion
15
- export { registerChangelogTool, registerEncodeTool, registerEnvTool, registerHttpTool, registerMeasureTool, registerRegexTestTool, registerSchemaValidateTool, registerSnippetTool, registerTimeTool, registerWebSearchTool };
16
- //# sourceMappingURL=utility.tools.d.ts.map
15
+ export { registerChangelogTool, registerEncodeTool, registerEnvTool, registerHttpTool, registerMeasureTool, registerRegexTestTool, registerSchemaValidateTool, registerSnippetTool, registerTimeTool, registerWebSearchTool };
@@ -1,12 +1,11 @@
1
1
  import{createLogger as e,serializeError as t}from"../../../core/dist/index.js";import{changelog as n,encode as r,envInfo as i,httpRequest as a,measure as o,regexTest as s,schemaValidate as c,snippet as l,timeUtils as u,webSearch as d}from"../../../tools/dist/index.js";import{z as f}from"zod";const p=e(`tools`);function m(e){e.registerTool(`web_search`,{description:`PREFERRED web search — search the web via DuckDuckGo (no API key). Returns structured results with title, URL, and snippet.`,inputSchema:{query:f.string().max(2e3).describe(`Search query`),limit:f.number().min(1).max(20).default(5).describe(`Max results to return`),site:f.string().optional().describe(`Restrict to domain (e.g., "docs.aws.amazon.com")`)}},async({query:e,limit:n,site:r})=>{try{let t=await d({query:e,limit:n,site:r}),i=[`## Search: ${t.query}`,``];if(t.results.length===0)i.push(`No results found.`);else for(let e of t.results)i.push(`### [${e.title}](${e.url})`,e.snippet,``);return i.push(`---`,"_Next: Use `web_fetch` to read any of these pages in full._"),{content:[{type:`text`,text:i.join(`
2
2
  `)}]}}catch(e){return p.error(`Web search failed`,t(e)),{content:[{type:`text`,text:`Web search failed. Check server logs for details.`}],isError:!0}}})}function h(e){e.registerTool(`http`,{description:`Make HTTP requests (GET/POST/PUT/PATCH/DELETE/HEAD) for API testing. Returns status, headers, and formatted body with timing info.`,inputSchema:{url:f.string().url().describe(`Request URL (http/https only)`),method:f.enum([`GET`,`POST`,`PUT`,`PATCH`,`DELETE`,`HEAD`]).default(`GET`).describe(`HTTP method`),headers:f.record(f.string(),f.string()).optional().describe(`Request headers as key-value pairs`),body:f.string().optional().describe(`Request body (for POST/PUT/PATCH)`),timeout:f.number().min(1e3).max(6e4).default(15e3).describe(`Timeout in milliseconds`)}},async({url:e,method:n,headers:r,body:i,timeout:o})=>{try{let t=await a({url:e,method:n,headers:r,body:i,timeout:o}),s=[`## ${n} ${e}`,``,`**Status:** ${t.status} ${t.statusText}`,`**Time:** ${t.durationMs}ms`,`**Size:** ${t.sizeBytes} bytes`,`**Content-Type:** ${t.contentType}`,``,`### Headers`,"```json",JSON.stringify(t.headers,null,2),"```",``,`### Body`,t.contentType.includes(`json`)?"```json":"```",t.body,"```"];return t.truncated&&s.push(``,`_Response truncated — total size: ${t.sizeBytes} bytes_`),{content:[{type:`text`,text:s.join(`
3
3
  `)}]}}catch(e){return p.error(`HTTP request failed`,t(e)),{content:[{type:`text`,text:`HTTP request failed. Check server logs for details.`}],isError:!0}}})}function g(e){e.registerTool(`regex_test`,{description:`Test a regex pattern against sample strings. Supports match, replace, and split modes.`,inputSchema:{pattern:f.string().max(500).describe(`Regex pattern (without delimiters)`),flags:f.string().max(10).regex(/^[gimsuy]*$/).default(``).describe(`Regex flags (g, i, m, s, etc.)`),test_strings:f.array(f.string().max(1e4)).max(50).describe(`Strings to test the pattern against`),mode:f.enum([`match`,`replace`,`split`]).default(`match`).describe(`Test mode`),replacement:f.string().optional().describe(`Replacement string (for replace mode)`)}},async({pattern:e,flags:t,test_strings:n,mode:r,replacement:i})=>{let a=s({pattern:e,flags:t,testStrings:n,mode:r,replacement:i});if(!a.valid)return{content:[{type:`text`,text:`Invalid regex: ${a.error}`}],isError:!0};let o=[`## Regex: \`/${a.pattern}/${a.flags}\``,``,`Mode: ${r}`,``];for(let e of a.results){if(o.push(`**Input:** \`${e.input}\``),o.push(`**Matched:** ${e.matched}`),e.matches)for(let t of e.matches){let e=t.groups.length>0?` groups: [${t.groups.join(`, `)}]`:``;o.push(` - "${t.full}" at index ${t.index}${e}`)}e.replaced!==void 0&&o.push(`**Result:** \`${e.replaced}\``),e.split&&o.push(`**Split:** ${JSON.stringify(e.split)}`),o.push(``)}return{content:[{type:`text`,text:o.join(`
4
- `)}]}})}function _(e){e.registerTool(`encode`,{description:`Encode, decode, or hash text. Supports base64, URL encoding, SHA-256, MD5, JWT decode, hex.`,inputSchema:{operation:f.enum([`base64_encode`,`base64_decode`,`url_encode`,`url_decode`,`sha256`,`md5`,`jwt_decode`,`hex_encode`,`hex_decode`]).describe(`Operation to perform`),input:f.string().max(1e6).describe(`Input text`)}},async({operation:e,input:n})=>{try{let t=r({operation:e,input:n});return{content:[{type:`text`,text:`## ${e}\n\n**Input:** \`${n.length>100?`${n.slice(0,100)}...`:n}\`\n**Output:**\n\`\`\`\n${t.output}\n\`\`\``}]}}catch(e){return p.error(`Encode failed`,t(e)),{content:[{type:`text`,text:`Encode failed. Check server logs for details.`}],isError:!0}}})}function v(e){e.registerTool(`measure`,{description:`Measure code complexity, line counts, and function counts for a file or directory. Returns per-file metrics sorted by complexity.`,inputSchema:{path:f.string().describe(`File or directory path to measure`),extensions:f.array(f.string()).optional().describe(`File extensions to include (default: .ts,.tsx,.js,.jsx)`)}},async({path:e,extensions:n})=>{try{let t=o({path:e,extensions:n}),r=[`## Code Metrics`,``,`**Files:** ${t.summary.totalFiles}`,`**Total lines:** ${t.summary.totalLines} (${t.summary.totalCodeLines} code)`,`**Functions:** ${t.summary.totalFunctions}`,`**Avg complexity:** ${t.summary.avgComplexity}`,`**Max complexity:** ${t.summary.maxComplexity.value} (${t.summary.maxComplexity.file})`,``,`### Top files by complexity`,``,`| File | Lines | Code | Complexity | Functions | Imports |`,`|------|-------|------|------------|-----------|---------|`];for(let e of t.files.slice(0,20))r.push(`| ${e.path} | ${e.lines.total} | ${e.lines.code} | ${e.complexity} | ${e.functions} | ${e.imports} |`);return t.files.length>20&&r.push(``,`_...and ${t.files.length-20} more files_`),{content:[{type:`text`,text:r.join(`
4
+ `)}]}})}function _(e){e.registerTool(`encode`,{description:`Encode, decode, or hash text. Supports base64, URL encoding, SHA-256, MD5, JWT decode, hex.`,inputSchema:{operation:f.enum([`base64_encode`,`base64_decode`,`url_encode`,`url_decode`,`sha256`,`md5`,`jwt_decode`,`hex_encode`,`hex_decode`]).describe(`Operation to perform`),input:f.string().max(1e6).describe(`Input text`)}},async({operation:e,input:n})=>{try{let t=r({operation:e,input:n});return{content:[{type:`text`,text:`## ${e}\n\n**Input:** \`${n.length>100?`${n.slice(0,100)}...`:n}\`\n**Output:**\n\`\`\`\n${t.output}\n\`\`\``}]}}catch(e){return p.error(`Encode failed`,t(e)),{content:[{type:`text`,text:`Encode failed. Check server logs for details.`}],isError:!0}}})}function v(e){e.registerTool(`measure`,{description:`Measure code complexity, line counts, and function counts for a file or directory. Returns per-file metrics sorted by complexity.`,inputSchema:{path:f.string().describe(`File or directory path to measure`),extensions:f.array(f.string()).optional().describe(`File extensions to include (default: .ts,.tsx,.js,.jsx)`)}},async({path:e,extensions:n})=>{try{let t=await o({path:e,extensions:n}),r=[`## Code Metrics`,``,`**Files:** ${t.summary.totalFiles}`,`**Total lines:** ${t.summary.totalLines} (${t.summary.totalCodeLines} code)`,`**Functions:** ${t.summary.totalFunctions}`,`**Avg complexity:** ${t.summary.avgComplexity}`,`**Max complexity:** ${t.summary.maxComplexity.value} (${t.summary.maxComplexity.file})`,``,`### Top files by complexity`,``,`| File | Lines | Code | Complexity | Cognitive | Functions | Imports |`,`|------|-------|------|------------|-----------|-----------|---------|`];for(let e of t.files.slice(0,20)){let t=e.cognitiveComplexity===void 0?`—`:String(e.cognitiveComplexity);r.push(`| ${e.path} | ${e.lines.total} | ${e.lines.code} | ${e.complexity} | ${t} | ${e.functions} | ${e.imports} |`)}return t.files.length>20&&r.push(``,`_...and ${t.files.length-20} more files_`),{content:[{type:`text`,text:r.join(`
5
5
  `)}]}}catch(e){return p.error(`Measure failed`,t(e)),{content:[{type:`text`,text:`Measure failed. Check server logs for details.`}],isError:!0}}})}function y(e){e.registerTool(`changelog`,{description:`Generate a changelog from git history between two refs. Groups by conventional commit type.`,inputSchema:{from:f.string().max(200).describe(`Start ref (tag, SHA, HEAD~N)`),to:f.string().max(200).default(`HEAD`).describe(`End ref (default: HEAD)`),format:f.enum([`grouped`,`chronological`,`per-scope`]).default(`grouped`).describe(`Output format`),include_breaking:f.boolean().default(!0).describe(`Highlight breaking changes`)}},async({from:e,to:r,format:i,include_breaking:a})=>{try{let t=n({from:e,to:r,format:i,includeBreaking:a}),o=`${t.stats.total} commits (${Object.entries(t.stats.types).map(([e,t])=>`${t} ${e}`).join(`, `)})`;return{content:[{type:`text`,text:`${t.markdown}\n---\n_${o}_`}]}}catch(e){return p.error(`Changelog failed`,t(e)),{content:[{type:`text`,text:`Changelog failed. Check server logs for details.`}],isError:!0}}})}function b(e){e.registerTool(`schema_validate`,{description:`Validate JSON data against a JSON Schema. Supports type, required, properties, items, enum, pattern, min/max.`,inputSchema:{data:f.string().max(5e5).describe(`JSON data to validate (as string)`),schema:f.string().max(5e5).describe(`JSON Schema to validate against (as string)`)}},async({data:e,schema:n})=>{try{let t=c({data:JSON.parse(e),schema:JSON.parse(n)});if(t.valid)return{content:[{type:`text`,text:`## Validation: PASSED
6
6
 
7
7
  Data matches the schema.`}]};let r=[`## Validation: FAILED`,``,`**${t.errors.length} error(s):**`,``];for(let e of t.errors){let t=e.expected?` (expected: ${e.expected}, got: ${e.received})`:``;r.push(`- \`${e.path}\`: ${e.message}${t}`)}return{content:[{type:`text`,text:r.join(`
8
8
  `)}]}}catch(e){return p.error(`Schema validation failed`,t(e)),{content:[{type:`text`,text:`Schema validation failed. Check server logs for details.`}],isError:!0}}})}function x(e){e.registerTool(`snippet`,{description:`Save, retrieve, search, and manage persistent code snippets/templates.`,inputSchema:{action:f.enum([`save`,`get`,`list`,`search`,`delete`]).describe(`Operation to perform`),name:f.string().optional().describe(`Snippet name (required for save/get/delete)`),language:f.string().optional().describe(`Language tag (for save)`),code:f.string().max(1e5).optional().describe(`Code content (for save)`),tags:f.array(f.string()).optional().describe(`Tags for categorization (for save)`),query:f.string().optional().describe(`Search query (for search)`)}},async({action:e,name:n,language:r,code:i,tags:a,query:o})=>{try{let t=l({action:e,name:n,language:r,code:i,tags:a,query:o});if(`deleted`in t)return{content:[{type:`text`,text:t.deleted?`Snippet "${n}" deleted.`:`Snippet "${n}" not found.`}]};if(`snippets`in t){if(t.snippets.length===0)return{content:[{type:`text`,text:`No snippets found.`}]};let e=[`## Snippets`,``];for(let n of t.snippets){let t=n.tags.length>0?` [${n.tags.join(`, `)}]`:``;e.push(`- **${n.name}** (${n.language})${t}`)}return{content:[{type:`text`,text:e.join(`
9
9
  `)}]}}let s=t,c=s.tags.length>0?`\nTags: ${s.tags.join(`, `)}`:``;return{content:[{type:`text`,text:`## ${s.name} (${s.language})${c}\n\n\`\`\`${s.language}\n${s.code}\n\`\`\``}]}}catch(e){return p.error(`Snippet failed`,t(e)),{content:[{type:`text`,text:`Snippet failed. Check server logs for details.`}],isError:!0}}})}function S(e){e.registerTool(`env`,{description:`Get system and runtime environment info. Sensitive env vars are redacted by default.`,inputSchema:{include_env:f.boolean().default(!1).describe(`Include environment variables`),filter_env:f.string().optional().describe(`Filter env vars by name substring`),show_sensitive:f.boolean().default(!1).describe(`Show sensitive values (keys, tokens, etc.) — redacted by default`)}},async({include_env:e,filter_env:t,show_sensitive:n})=>{let r=i({includeEnv:e,filterEnv:t,showSensitive:n}),a=[`## Environment`,``,`**Platform:** ${r.system.platform} ${r.system.arch}`,`**OS:** ${r.system.type} ${r.system.release}`,`**Host:** ${r.system.hostname}`,`**CPUs:** ${r.system.cpus}`,`**Memory:** ${r.system.memoryFreeGb}GB free / ${r.system.memoryTotalGb}GB total`,``,`**Node:** ${r.runtime.node}`,`**V8:** ${r.runtime.v8}`,`**CWD:** ${r.cwd}`];if(r.env){a.push(``,`### Environment Variables`,``);for(let[e,t]of Object.entries(r.env))a.push(`- \`${e}\`: ${t}`)}return{content:[{type:`text`,text:a.join(`
10
10
  `)}]}})}function C(e){e.registerTool(`time`,{description:`Parse dates, convert timezones, calculate durations, add time. Supports ISO 8601, unix timestamps, and human-readable formats.`,inputSchema:{operation:f.enum([`now`,`parse`,`convert`,`diff`,`add`]).describe(`now: current time | parse: parse a date string | convert: timezone conversion | diff: duration between two dates | add: add duration to date`),input:f.string().optional().describe(`Date input (ISO, unix timestamp, or parseable string). For diff: two comma-separated dates`),timezone:f.string().optional().describe(`Target timezone (e.g., "America/New_York", "Asia/Tokyo")`),duration:f.string().optional().describe(`Duration to add (e.g., "2h30m", "1d", "30s") — for add operation`)}},async({operation:e,input:n,timezone:r,duration:i})=>{try{let t=u({operation:e,input:n,timezone:r,duration:i}),a=[`**${t.output}**`,``,`ISO: ${t.iso}`,`Unix: ${t.unix}`];return t.details&&a.push(``,"```json",JSON.stringify(t.details,null,2),"```"),{content:[{type:`text`,text:a.join(`
11
- `)}]}}catch(e){return p.error(`Time failed`,t(e)),{content:[{type:`text`,text:`Time failed. Check server logs for details.`}],isError:!0}}})}export{y as registerChangelogTool,_ as registerEncodeTool,S as registerEnvTool,h as registerHttpTool,v as registerMeasureTool,g as registerRegexTestTool,b as registerSchemaValidateTool,x as registerSnippetTool,C as registerTimeTool,m as registerWebSearchTool};
12
- //# sourceMappingURL=utility.tools.js.map
11
+ `)}]}}catch(e){return p.error(`Time failed`,t(e)),{content:[{type:`text`,text:`Time failed. Check server logs for details.`}],isError:!0}}})}export{y as registerChangelogTool,_ as registerEncodeTool,S as registerEnvTool,h as registerHttpTool,v as registerMeasureTool,g as registerRegexTestTool,b as registerSchemaValidateTool,x as registerSnippetTool,C as registerTimeTool,m as registerWebSearchTool};
@@ -10,5 +10,4 @@ declare function getCurrentVersion(): string;
10
10
  */
11
11
  declare function checkForUpdates(): void;
12
12
  //#endregion
13
- export { checkForUpdates, getCurrentVersion };
14
- //# sourceMappingURL=version-check.d.ts.map
13
+ export { checkForUpdates, getCurrentVersion };
@@ -1,2 +1 @@
1
- import{readFileSync as e}from"node:fs";import{dirname as t,resolve as n}from"node:path";import{fileURLToPath as r}from"node:url";import{createLogger as i}from"../../core/dist/index.js";const a=i(`server`);function o(){let i=n(t(r(import.meta.url)),`..`,`..`,`..`,`package.json`);try{return JSON.parse(e(i,`utf-8`)).version??`0.0.0`}catch{return`0.0.0`}}function s(e,t){let n=e.split(`.`).map(Number),r=t.split(`.`).map(Number);for(let e=0;e<3;e++){let t=(n[e]??0)-(r[e]??0);if(t!==0)return t>0?1:-1}return 0}function c(){let e=o();fetch(`https://registry.npmjs.org/@vpxa/kb/latest`,{signal:AbortSignal.timeout(5e3)}).then(e=>{if(e.ok)return e.json()}).then(t=>{if(!t||typeof t!=`object`)return;let n=t.version;n&&s(e,n)<0&&a.warn(`Update available`,{currentVersion:e,latestVersion:n,updateCommand:`npx @vpxa/kb@${n} serve`,configHint:`update your mcp.json`})}).catch(()=>{})}export{c as checkForUpdates,o as getCurrentVersion};
2
- //# sourceMappingURL=version-check.js.map
1
+ import{readFileSync as e}from"node:fs";import{dirname as t,resolve as n}from"node:path";import{fileURLToPath as r}from"node:url";import{createLogger as i}from"../../core/dist/index.js";const a=i(`server`);function o(){let i=n(t(r(import.meta.url)),`..`,`..`,`..`,`package.json`);try{return JSON.parse(e(i,`utf-8`)).version??`0.0.0`}catch{return`0.0.0`}}function s(e,t){let n=e.split(`.`).map(Number),r=t.split(`.`).map(Number);for(let e=0;e<3;e++){let t=(n[e]??0)-(r[e]??0);if(t!==0)return t>0?1:-1}return 0}function c(){let e=o();fetch(`https://registry.npmjs.org/@vpxa/kb/latest`,{signal:AbortSignal.timeout(5e3)}).then(e=>{if(e.ok)return e.json()}).then(t=>{if(!t||typeof t!=`object`)return;let n=t.version;n&&s(e,n)<0&&a.warn(`Update available`,{currentVersion:e,latestVersion:n,updateCommand:`npx @vpxa/kb@${n} serve`,configHint:`update your mcp.json`})}).catch(()=>{})}export{c as checkForUpdates,o as getCurrentVersion};
@@ -103,5 +103,4 @@ interface IGraphStore {
103
103
  close(): Promise<void>;
104
104
  }
105
105
  //#endregion
106
- export { GraphEdge, GraphNode, GraphStats, GraphTraversalOptions, GraphTraversalResult, IGraphStore };
107
- //# sourceMappingURL=graph-store.interface.d.ts.map
106
+ export { GraphEdge, GraphNode, GraphStats, GraphTraversalOptions, GraphTraversalResult, IGraphStore };
@@ -35,5 +35,4 @@ declare class LanceStore implements IKnowledgeStore {
35
35
  private fromLanceRecord;
36
36
  }
37
37
  //#endregion
38
- export { LanceStore };
39
- //# sourceMappingURL=lance-store.d.ts.map
38
+ export { LanceStore };
@@ -1,2 +1 @@
1
- import{EMBEDDING_DEFAULTS as e,SEARCH_DEFAULTS as t,STORE_DEFAULTS as n,createLogger as r,serializeError as i,sourceTypeContentTypes as a}from"../../core/dist/index.js";import{Index as o,connect as s}from"@lancedb/lancedb";function c(e){if(!e)return[];try{let t=JSON.parse(e);return Array.isArray(t)?t:[]}catch{return[]}}const l=/^[\w.\-/ ]+$/,u=r(`store`);function d(e,t){if(!l.test(e))throw Error(`Invalid ${t} filter value: contains disallowed characters`);return e.replace(/'/g,`''`)}var f=class{db=null;table=null;dbPath;tableName;_writeQueue=Promise.resolve();enqueueWrite(e){let t=this._writeQueue.then(()=>e());return this._writeQueue=t.then(()=>void 0,()=>void 0),t}constructor(e){this.dbPath=e?.path??n.path,this.tableName=e?.tableName??n.tableName}async initialize(){this.db=await s(this.dbPath),(await this.db.tableNames()).includes(this.tableName)&&(this.table=await this.db.openTable(this.tableName),await this.createFtsIndex())}async upsert(e,t){if(e.length!==0){if(e.length!==t.length)throw Error(`Record count (${e.length}) does not match vector count (${t.length})`);return this.enqueueWrite(()=>this._upsertImpl(e,t))}}async _upsertImpl(e,t){let n=e.map((e,n)=>({id:e.id,vector:Array.from(t[n]),content:e.content,sourcePath:e.sourcePath,contentType:e.contentType,headingPath:e.headingPath??``,chunkIndex:e.chunkIndex,totalChunks:e.totalChunks,startLine:e.startLine,endLine:e.endLine,fileHash:e.fileHash,indexedAt:e.indexedAt,origin:e.origin,tags:JSON.stringify(e.tags),category:e.category??``,version:e.version}));if(this.table){let t=[...new Set(e.map(e=>e.sourcePath))];for(let e of t)try{await this.table.delete(`sourcePath = '${d(e,`sourcePath`)}'`)}catch{}await this.table.add(n)}else try{this.table=await this.db?.createTable(this.tableName,n)??null}catch(e){if(String(e).includes(`already exists`)&&this.db)this.table=await this.db.openTable(this.tableName),await this.table.add(n);else throw e}}async search(e,n){if(!this.table)return[];let r=n?.limit??t.maxResults,i=n?.minScore??t.minScore,a=this.table.search(e).limit(r*2),o=this.buildFilterString(n);return o&&(a=a.where(o)),(await a.toArray()).map(e=>({record:this.fromLanceRecord(e),score:1-(e._distance??1)})).filter(e=>e.score>=i).slice(0,r)}async createFtsIndex(){return this.enqueueWrite(()=>this._createFtsIndexImpl())}async _createFtsIndexImpl(){if(this.table)try{await this.table.createIndex(`content`,{config:o.fts()}),u.info(`FTS index created`,{column:`content`})}catch(e){String(e).includes(`already exists`)||u.warn(`FTS index creation failed`,i(e))}}async ftsSearch(e,n){if(!this.table)return[];let r=n?.limit??t.maxResults;try{let t=this.table.search(e).limit(r*2),i=this.buildFilterString(n);return i&&(t=t.where(i)),(await t.toArray()).map(e=>({record:this.fromLanceRecord(e),score:e._score??e._relevance_score??0}))}catch(e){return u.warn(`FTS search failed`,i(e)),[]}}async getById(e){if(!this.table)return null;let t=await this.table.query().where(`id = '${d(e,`id`)}'`).limit(1).toArray();return t.length===0?null:this.fromLanceRecord(t[0])}async deleteBySourcePath(e){return this.enqueueWrite(()=>this._deleteBySourcePathImpl(e))}async _deleteBySourcePathImpl(e){if(!this.table)return 0;let t=await this.getBySourcePath(e);return t.length===0?0:(await this.table.delete(`sourcePath = '${d(e,`sourcePath`)}'`),t.length)}async deleteById(e){return this.enqueueWrite(()=>this._deleteByIdImpl(e))}async _deleteByIdImpl(e){return!this.table||!await this.getById(e)?!1:(await this.table.delete(`id = '${d(e,`id`)}'`),!0)}async getBySourcePath(e){return this.table?(await this.table.query().where(`sourcePath = '${d(e,`sourcePath`)}'`).limit(1e3).toArray()).map(e=>this.fromLanceRecord(e)):[]}async getStats(){if(!this.table)return{totalRecords:0,totalFiles:0,contentTypeBreakdown:{},lastIndexedAt:null,storeBackend:`lancedb`,embeddingModel:e.model};let t=await this.table.countRows(),n=await this.table.query().select([`sourcePath`,`contentType`,`indexedAt`]).limit(1e5).toArray(),r={},i=new Set,a=null;for(let e of n){let t=e;r[t.contentType]=(r[t.contentType]??0)+1,i.add(t.sourcePath),(!a||t.indexedAt>a)&&(a=t.indexedAt)}return{totalRecords:t,totalFiles:i.size,contentTypeBreakdown:r,lastIndexedAt:a,storeBackend:`lancedb`,embeddingModel:e.model}}async listSourcePaths(){if(!this.table)return[];let e=await this.table.query().select([`sourcePath`]).limit(1e5).toArray();return[...new Set(e.map(e=>e.sourcePath))]}async dropTable(){return this.enqueueWrite(()=>this._dropTableImpl())}async _dropTableImpl(){if(this.db&&(await this.db.tableNames()).includes(this.tableName))for(let e=1;e<=3;e++)try{await this.db.dropTable(this.tableName);break}catch(t){if(e===3)throw t;let n=e*500;u.warn(`dropTable attempt failed, retrying`,{attempt:e,delayMs:n}),await new Promise(e=>setTimeout(e,n))}this.table=null}async close(){try{this.db&&typeof this.db.close==`function`&&await this.db.close()}catch{}this.table=null,this.db=null}buildFilterString(e){let t=[];if(e?.contentType&&t.push(`contentType = '${d(e.contentType,`contentType`)}'`),e?.sourceType){let n=a(e.sourceType);if(n.length>0){let e=n.map(e=>`'${d(e,`sourceType`)}'`).join(`, `);t.push(`contentType IN (${e})`)}}if(e?.origin&&t.push(`origin = '${d(e.origin,`origin`)}'`),e?.category&&t.push(`category = '${d(e.category,`category`)}'`),e?.tags&&e.tags.length>0){let n=e.tags.map(e=>`tags LIKE '%${d(e,`tag`)}%'`);t.push(`(${n.join(` OR `)})`)}return t.length>0?t.join(` AND `):null}fromLanceRecord(e){return{id:e.id,content:e.content,sourcePath:e.sourcePath,contentType:e.contentType,headingPath:e.headingPath||void 0,chunkIndex:e.chunkIndex,totalChunks:e.totalChunks,startLine:e.startLine,endLine:e.endLine,fileHash:e.fileHash,indexedAt:e.indexedAt,origin:e.origin,tags:c(e.tags),category:e.category||void 0,version:e.version}}};export{f as LanceStore};
2
- //# sourceMappingURL=lance-store.js.map
1
+ import{EMBEDDING_DEFAULTS as e,SEARCH_DEFAULTS as t,STORE_DEFAULTS as n,createLogger as r,serializeError as i,sourceTypeContentTypes as a}from"../../core/dist/index.js";import{Index as o,connect as s}from"@lancedb/lancedb";function c(e){if(!e)return[];try{let t=JSON.parse(e);return Array.isArray(t)?t:[]}catch{return[]}}const l=/^[\w.\-/ ]+$/,u=r(`store`);function d(e,t){if(!l.test(e))throw Error(`Invalid ${t} filter value: contains disallowed characters`);return e.replace(/'/g,`''`)}var f=class{db=null;table=null;dbPath;tableName;_writeQueue=Promise.resolve();enqueueWrite(e){let t=this._writeQueue.then(()=>e());return this._writeQueue=t.then(()=>void 0,()=>void 0),t}constructor(e){this.dbPath=e?.path??n.path,this.tableName=e?.tableName??n.tableName}async initialize(){this.db=await s(this.dbPath),(await this.db.tableNames()).includes(this.tableName)&&(this.table=await this.db.openTable(this.tableName),await this.createFtsIndex())}async upsert(e,t){if(e.length!==0){if(e.length!==t.length)throw Error(`Record count (${e.length}) does not match vector count (${t.length})`);return this.enqueueWrite(()=>this._upsertImpl(e,t))}}async _upsertImpl(e,t){let n=e.map((e,n)=>({id:e.id,vector:Array.from(t[n]),content:e.content,sourcePath:e.sourcePath,contentType:e.contentType,headingPath:e.headingPath??``,chunkIndex:e.chunkIndex,totalChunks:e.totalChunks,startLine:e.startLine,endLine:e.endLine,fileHash:e.fileHash,indexedAt:e.indexedAt,origin:e.origin,tags:JSON.stringify(e.tags),category:e.category??``,version:e.version}));if(this.table){let t=[...new Set(e.map(e=>e.sourcePath))];for(let e of t)try{await this.table.delete(`sourcePath = '${d(e,`sourcePath`)}'`)}catch{}await this.table.add(n)}else try{this.table=await this.db?.createTable(this.tableName,n)??null}catch(e){if(String(e).includes(`already exists`)&&this.db)this.table=await this.db.openTable(this.tableName),await this.table.add(n);else throw e}}async search(e,n){if(!this.table)return[];let r=n?.limit??t.maxResults,i=n?.minScore??t.minScore,a=this.table.search(e).limit(r*2),o=this.buildFilterString(n);return o&&(a=a.where(o)),(await a.toArray()).map(e=>({record:this.fromLanceRecord(e),score:1-(e._distance??1)})).filter(e=>e.score>=i).slice(0,r)}async createFtsIndex(){return this.enqueueWrite(()=>this._createFtsIndexImpl())}async _createFtsIndexImpl(){if(this.table)try{await this.table.createIndex(`content`,{config:o.fts(),replace:!0}),u.info(`FTS index created/updated`,{column:`content`})}catch(e){u.warn(`FTS index creation failed`,i(e))}}async ftsSearch(e,n){if(!this.table)return[];let r=n?.limit??t.maxResults;try{let t=this.table.search(e).limit(r*2),i=this.buildFilterString(n);return i&&(t=t.where(i)),(await t.toArray()).map(e=>({record:this.fromLanceRecord(e),score:e._score??e._relevance_score??0}))}catch(e){return u.warn(`FTS search failed`,i(e)),[]}}async getById(e){if(!this.table)return null;let t=await this.table.query().where(`id = '${d(e,`id`)}'`).limit(1).toArray();return t.length===0?null:this.fromLanceRecord(t[0])}async deleteBySourcePath(e){return this.enqueueWrite(()=>this._deleteBySourcePathImpl(e))}async _deleteBySourcePathImpl(e){if(!this.table)return 0;let t=await this.getBySourcePath(e);return t.length===0?0:(await this.table.delete(`sourcePath = '${d(e,`sourcePath`)}'`),t.length)}async deleteById(e){return this.enqueueWrite(()=>this._deleteByIdImpl(e))}async _deleteByIdImpl(e){return!this.table||!await this.getById(e)?!1:(await this.table.delete(`id = '${d(e,`id`)}'`),!0)}async getBySourcePath(e){return this.table?(await this.table.query().where(`sourcePath = '${d(e,`sourcePath`)}'`).limit(1e3).toArray()).map(e=>this.fromLanceRecord(e)):[]}async getStats(){if(!this.table)return{totalRecords:0,totalFiles:0,contentTypeBreakdown:{},lastIndexedAt:null,storeBackend:`lancedb`,embeddingModel:e.model};let t=await this.table.countRows(),n=await this.table.query().select([`sourcePath`,`contentType`,`indexedAt`]).limit(1e5).toArray(),r={},i=new Set,a=null;for(let e of n){let t=e;r[t.contentType]=(r[t.contentType]??0)+1,i.add(t.sourcePath),(!a||t.indexedAt>a)&&(a=t.indexedAt)}return{totalRecords:t,totalFiles:i.size,contentTypeBreakdown:r,lastIndexedAt:a,storeBackend:`lancedb`,embeddingModel:e.model}}async listSourcePaths(){if(!this.table)return[];let e=await this.table.query().select([`sourcePath`]).limit(1e5).toArray();return[...new Set(e.map(e=>e.sourcePath))]}async dropTable(){return this.enqueueWrite(()=>this._dropTableImpl())}async _dropTableImpl(){if(this.db&&(await this.db.tableNames()).includes(this.tableName))for(let e=1;e<=3;e++)try{await this.db.dropTable(this.tableName);break}catch(t){if(e===3)throw t;let n=e*500;u.warn(`dropTable attempt failed, retrying`,{attempt:e,delayMs:n}),await new Promise(e=>setTimeout(e,n))}this.table=null}async close(){try{this.db&&typeof this.db.close==`function`&&await this.db.close()}catch{}this.table=null,this.db=null}buildFilterString(e){let t=[];if(e?.contentType&&t.push(`contentType = '${d(e.contentType,`contentType`)}'`),e?.sourceType){let n=a(e.sourceType);if(n.length>0){let e=n.map(e=>`'${d(e,`sourceType`)}'`).join(`, `);t.push(`contentType IN (${e})`)}}if(e?.origin&&t.push(`origin = '${d(e.origin,`origin`)}'`),e?.category&&t.push(`category = '${d(e.category,`category`)}'`),e?.tags&&e.tags.length>0){let n=e.tags.map(e=>`tags LIKE '%${d(e,`tag`)}%'`);t.push(`(${n.join(` OR `)})`)}return t.length>0?t.join(` AND `):null}fromLanceRecord(e){return{id:e.id,content:e.content,sourcePath:e.sourcePath,contentType:e.contentType,headingPath:e.headingPath||void 0,chunkIndex:e.chunkIndex,totalChunks:e.totalChunks,startLine:e.startLine,endLine:e.endLine,fileHash:e.fileHash,indexedAt:e.indexedAt,origin:e.origin,tags:c(e.tags),category:e.category||void 0,version:e.version}}};export{f as LanceStore};
@@ -41,5 +41,4 @@ declare class SqliteGraphStore implements IGraphStore {
41
41
  close(): Promise<void>;
42
42
  }
43
43
  //#endregion
44
- export { SqliteGraphStore };
45
- //# sourceMappingURL=sqlite-graph-store.d.ts.map
44
+ export { SqliteGraphStore };
@@ -1,4 +1,4 @@
1
- import{existsSync as e,mkdirSync as t,readFileSync as n,writeFileSync as r}from"node:fs";import{dirname as i,join as a}from"node:path";var o=class{db=null;dbPath;dirty=!1;constructor(e){this.dbPath=a(e?.path??`.kb-data`,`graph.db`)}async initialize(){let r=i(this.dbPath);e(r)||t(r,{recursive:!0});let a=(await import(`sql.js`)).default,o=await a();if(e(this.dbPath)){let e=n(this.dbPath);this.db=new o.Database(e)}else this.db=new o.Database;this.db.run(`PRAGMA journal_mode = WAL`),this.db.exec(`PRAGMA foreign_keys = ON;`),this.db.run(`
1
+ import{KB_PATHS as e}from"../../core/dist/index.js";import{existsSync as t,mkdirSync as n,readFileSync as r,writeFileSync as i}from"node:fs";import{dirname as a,join as o}from"node:path";var s=class{db=null;dbPath;dirty=!1;constructor(t){this.dbPath=o(t?.path??e.data,`graph.db`)}async initialize(){let e=a(this.dbPath);t(e)||n(e,{recursive:!0});let i=(await import(`sql.js`)).default,o=await i();if(t(this.dbPath)){let e=r(this.dbPath);this.db=new o.Database(e)}else this.db=new o.Database;this.db.run(`PRAGMA journal_mode = WAL`),this.db.exec(`PRAGMA foreign_keys = ON;`),this.db.run(`
2
2
  CREATE TABLE IF NOT EXISTS nodes (
3
3
  id TEXT PRIMARY KEY,
4
4
  type TEXT NOT NULL,
@@ -19,7 +19,7 @@ import{existsSync as e,mkdirSync as t,readFileSync as n,writeFileSync as r}from"
19
19
  FOREIGN KEY (from_id) REFERENCES nodes(id) ON DELETE CASCADE,
20
20
  FOREIGN KEY (to_id) REFERENCES nodes(id) ON DELETE CASCADE
21
21
  )
22
- `),this.db.run(`CREATE INDEX IF NOT EXISTS idx_nodes_type ON nodes(type)`),this.db.run(`CREATE INDEX IF NOT EXISTS idx_nodes_name ON nodes(name)`),this.db.run(`CREATE INDEX IF NOT EXISTS idx_nodes_source_path ON nodes(source_path)`),this.db.run(`CREATE INDEX IF NOT EXISTS idx_edges_from ON edges(from_id)`),this.db.run(`CREATE INDEX IF NOT EXISTS idx_edges_to ON edges(to_id)`),this.db.run(`CREATE INDEX IF NOT EXISTS idx_edges_type ON edges(type)`),this.persist()}ensureDb(){if(!this.db)throw Error(`Graph store not initialized — call initialize() first`);return this.db}persist(){if(!this.db)return;let e=this.db.export();try{r(this.dbPath,Buffer.from(e))}finally{this.db.exec(`PRAGMA foreign_keys = ON;`)}this.dirty=!1}markDirty(){this.dirty=!0}flushIfDirty(){this.dirty&&this.persist()}query(e,t=[]){let n=this.ensureDb().prepare(e);n.bind(t);let r=[];try{for(;n.step();)r.push(n.getAsObject())}finally{n.free()}return r}run(e,t=[]){this.ensureDb().run(e,t)}async upsertNode(e){this.run(`INSERT INTO nodes (id, type, name, properties, source_record_id, source_path, created_at)
22
+ `),this.db.run(`CREATE INDEX IF NOT EXISTS idx_nodes_type ON nodes(type)`),this.db.run(`CREATE INDEX IF NOT EXISTS idx_nodes_name ON nodes(name)`),this.db.run(`CREATE INDEX IF NOT EXISTS idx_nodes_source_path ON nodes(source_path)`),this.db.run(`CREATE INDEX IF NOT EXISTS idx_edges_from ON edges(from_id)`),this.db.run(`CREATE INDEX IF NOT EXISTS idx_edges_to ON edges(to_id)`),this.db.run(`CREATE INDEX IF NOT EXISTS idx_edges_type ON edges(type)`),this.persist()}ensureDb(){if(!this.db)throw Error(`Graph store not initialized — call initialize() first`);return this.db}persist(){if(!this.db)return;let e=this.db.export();try{i(this.dbPath,Buffer.from(e))}finally{this.db.exec(`PRAGMA foreign_keys = ON;`)}this.dirty=!1}markDirty(){this.dirty=!0}flushIfDirty(){this.dirty&&this.persist()}query(e,t=[]){let n=this.ensureDb().prepare(e);n.bind(t);let r=[];try{for(;n.step();)r.push(n.getAsObject())}finally{n.free()}return r}run(e,t=[]){this.ensureDb().run(e,t)}async upsertNode(e){this.run(`INSERT INTO nodes (id, type, name, properties, source_record_id, source_path, created_at)
23
23
  VALUES (?, ?, ?, ?, ?, ?, ?)
24
24
  ON CONFLICT(id) DO UPDATE SET
25
25
  type = excluded.type, name = excluded.name, properties = excluded.properties,
@@ -31,17 +31,16 @@ import{existsSync as e,mkdirSync as t,readFileSync as n,writeFileSync as r}from"
31
31
  VALUES (?, ?, ?, ?, ?, ?, ?)
32
32
  ON CONFLICT(id) DO UPDATE SET
33
33
  type = excluded.type, name = excluded.name, properties = excluded.properties,
34
- source_record_id = excluded.source_record_id, source_path = excluded.source_path`,[t.id,t.type,t.name,JSON.stringify(t.properties),t.sourceRecordId??null,t.sourcePath??null,t.createdAt??new Date().toISOString()]);t.run(`COMMIT`)}catch(e){throw t.run(`ROLLBACK`),e}this.markDirty(),this.flushIfDirty()}async upsertEdges(e){if(e.length===0)return;let t=this.ensureDb();t.run(`BEGIN TRANSACTION`);try{for(let t of e)this.run(`INSERT INTO edges (id, from_id, to_id, type, weight, properties)
34
+ source_record_id = excluded.source_record_id, source_path = excluded.source_path`,[t.id,t.type,t.name,JSON.stringify(t.properties),t.sourceRecordId??null,t.sourcePath??null,t.createdAt??new Date().toISOString()]);t.run(`COMMIT`)}catch(e){throw t.run(`ROLLBACK`),e}this.markDirty(),this.flushIfDirty()}async upsertEdges(e){if(e.length===0)return;let t=this.ensureDb();t.run(`PRAGMA foreign_keys = OFF`),t.run(`BEGIN TRANSACTION`);try{for(let t of e)this.run(`INSERT INTO edges (id, from_id, to_id, type, weight, properties)
35
35
  VALUES (?, ?, ?, ?, ?, ?)
36
36
  ON CONFLICT(id) DO UPDATE SET
37
37
  from_id = excluded.from_id, to_id = excluded.to_id,
38
- type = excluded.type, weight = excluded.weight, properties = excluded.properties`,[t.id,t.fromId,t.toId,t.type,t.weight??1,JSON.stringify(t.properties??{})]);t.run(`COMMIT`)}catch(e){throw t.run(`ROLLBACK`),e}this.markDirty(),this.flushIfDirty()}async getNode(e){let t=this.query(`SELECT * FROM nodes WHERE id = ?`,[e]);return t.length>0?c(t[0]):null}async getNeighbors(e,t){let n=t?.direction??`both`,r=t?.edgeType,i=t?.limit??50,a=[],o=[],s=new Set;if(n===`outgoing`||n===`both`){let t=`
38
+ type = excluded.type, weight = excluded.weight, properties = excluded.properties`,[t.id,t.fromId,t.toId,t.type,t.weight??1,JSON.stringify(t.properties??{})]);t.run(`COMMIT`)}catch(e){throw t.run(`ROLLBACK`),e}finally{t.run(`PRAGMA foreign_keys = ON`)}this.markDirty(),this.flushIfDirty()}async getNode(e){let t=this.query(`SELECT * FROM nodes WHERE id = ?`,[e]);return t.length>0?l(t[0]):null}async getNeighbors(e,t){let n=t?.direction??`both`,r=t?.edgeType,i=t?.limit??50,a=[],o=[],s=new Set;if(n===`outgoing`||n===`both`){let t=`
39
39
  SELECT e.id AS edge_id, e.from_id, e.to_id, e.type AS edge_type, e.weight, e.properties AS edge_props,
40
40
  n.id AS node_id, n.type AS node_type, n.name AS node_name, n.properties AS node_props,
41
41
  n.source_record_id AS node_src_rec, n.source_path AS node_src_path, n.created_at AS node_created
42
- FROM edges e JOIN nodes n ON e.to_id = n.id WHERE e.from_id = ?`,n=[e];r&&(t+=` AND e.type = ?`,n.push(r)),t+=` LIMIT ?`,n.push(i);let c=this.query(t,n);for(let e of c)o.push(u(e)),s.has(e.node_id)||(s.add(e.node_id),a.push(d(e)))}if(n===`incoming`||n===`both`){let t=`
42
+ FROM edges e JOIN nodes n ON e.to_id = n.id WHERE e.from_id = ?`,n=[e];r&&(t+=` AND e.type = ?`,n.push(r)),t+=` LIMIT ?`,n.push(i);let c=this.query(t,n);for(let e of c)o.push(d(e)),s.has(e.node_id)||(s.add(e.node_id),a.push(f(e)))}if(n===`incoming`||n===`both`){let t=`
43
43
  SELECT e.id AS edge_id, e.from_id, e.to_id, e.type AS edge_type, e.weight, e.properties AS edge_props,
44
44
  n.id AS node_id, n.type AS node_type, n.name AS node_name, n.properties AS node_props,
45
45
  n.source_record_id AS node_src_rec, n.source_path AS node_src_path, n.created_at AS node_created
46
- FROM edges e JOIN nodes n ON e.from_id = n.id WHERE e.to_id = ?`,n=[e];r&&(t+=` AND e.type = ?`,n.push(r)),t+=` LIMIT ?`,n.push(i);let c=this.query(t,n);for(let e of c)o.push(u(e)),s.has(e.node_id)||(s.add(e.node_id),a.push(d(e)))}return{nodes:a,edges:o}}async traverse(e,t){let n=t?.maxDepth??2,r=t?.direction??`both`,i=t?.edgeType,a=t?.limit??50,o=new Map,s=new Map,c=new Set,l=[{nodeId:e,depth:0}];for(;l.length>0&&o.size<a;){let e=l.shift();if(!e||c.has(e.nodeId)||e.depth>n)continue;c.add(e.nodeId);let t=await this.getNeighbors(e.nodeId,{direction:r,edgeType:i,limit:a-o.size});for(let r of t.nodes)o.has(r.id)||(o.set(r.id,r),e.depth+1<n&&l.push({nodeId:r.id,depth:e.depth+1}));for(let e of t.edges)s.set(e.id,e)}return{nodes:[...o.values()],edges:[...s.values()]}}async findNodes(e){let t=[],n=[];e.type&&(t.push(`type = ?`),n.push(e.type)),e.namePattern&&(t.push(`name LIKE ?`),n.push(`%${e.namePattern}%`)),e.sourcePath&&(t.push(`source_path = ?`),n.push(e.sourcePath));let r=t.length>0?`WHERE ${t.join(` AND `)}`:``,i=e.limit??100;return this.query(`SELECT * FROM nodes ${r} LIMIT ?`,[...n,i]).map(e=>c(e))}async findEdges(e){let t=[],n=[];e.type&&(t.push(`type = ?`),n.push(e.type)),e.fromId&&(t.push(`from_id = ?`),n.push(e.fromId)),e.toId&&(t.push(`to_id = ?`),n.push(e.toId));let r=t.length>0?`WHERE ${t.join(` AND `)}`:``,i=e.limit??100;return this.query(`SELECT * FROM edges ${r} LIMIT ?`,[...n,i]).map(e=>l(e))}async deleteNode(e){let t=this.ensureDb();t.run(`BEGIN TRANSACTION`);try{this.run(`DELETE FROM edges WHERE from_id = ? OR to_id = ?`,[e,e]),this.run(`DELETE FROM nodes WHERE id = ?`,[e]),t.run(`COMMIT`)}catch(e){throw t.run(`ROLLBACK`),e}this.markDirty(),this.flushIfDirty()}async deleteBySourcePath(e){let t=this.query(`SELECT id FROM nodes WHERE source_path = ?`,[e]);if(t.length===0)return 0;let n=this.ensureDb();n.run(`BEGIN TRANSACTION`);try{for(let e of t)this.run(`DELETE FROM edges WHERE from_id = ? OR to_id = ?`,[e.id,e.id]);this.run(`DELETE FROM nodes WHERE source_path = ?`,[e]),n.run(`COMMIT`)}catch(e){throw n.run(`ROLLBACK`),e}return this.markDirty(),this.flushIfDirty(),t.length}async clear(){this.run(`DELETE FROM edges`),this.run(`DELETE FROM nodes`),this.markDirty(),this.flushIfDirty()}async getStats(){let e=this.query(`SELECT COUNT(*) as count FROM nodes`)[0]?.count??0,t=this.query(`SELECT COUNT(*) as count FROM edges`)[0]?.count??0,n=this.query(`SELECT type, COUNT(*) as count FROM nodes GROUP BY type`),r={};for(let e of n)r[e.type]=e.count;let i=this.query(`SELECT type, COUNT(*) as count FROM edges GROUP BY type`),a={};for(let e of i)a[e.type]=e.count;return{nodeCount:e,edgeCount:t,nodeTypes:r,edgeTypes:a}}async close(){this.db&&=(this.flushIfDirty(),this.db.close(),null)}};function s(e){if(!e)return{};try{return JSON.parse(e)}catch{return{}}}function c(e){return{id:e.id,type:e.type,name:e.name,properties:s(e.properties),sourceRecordId:e.source_record_id??void 0,sourcePath:e.source_path??void 0,createdAt:e.created_at}}function l(e){return{id:e.id,fromId:e.from_id,toId:e.to_id,type:e.type,weight:e.weight??1,properties:s(e.properties)}}function u(e){return{id:e.edge_id,fromId:e.from_id,toId:e.to_id,type:e.edge_type,weight:e.weight??1,properties:s(e.edge_props??`{}`)}}function d(e){return{id:e.node_id,type:e.node_type,name:e.node_name,properties:s(e.node_props??`{}`),sourceRecordId:e.node_src_rec??void 0,sourcePath:e.node_src_path??void 0,createdAt:e.node_created}}export{o as SqliteGraphStore};
47
- //# sourceMappingURL=sqlite-graph-store.js.map
46
+ FROM edges e JOIN nodes n ON e.from_id = n.id WHERE e.to_id = ?`,n=[e];r&&(t+=` AND e.type = ?`,n.push(r)),t+=` LIMIT ?`,n.push(i);let c=this.query(t,n);for(let e of c)o.push(d(e)),s.has(e.node_id)||(s.add(e.node_id),a.push(f(e)))}return{nodes:a,edges:o}}async traverse(e,t){let n=t?.maxDepth??2,r=t?.direction??`both`,i=t?.edgeType,a=t?.limit??50,o=new Map,s=new Map,c=new Set,l=[{nodeId:e,depth:0}];for(;l.length>0&&o.size<a;){let e=l.shift();if(!e||c.has(e.nodeId)||e.depth>n)continue;c.add(e.nodeId);let t=await this.getNeighbors(e.nodeId,{direction:r,edgeType:i,limit:a-o.size});for(let r of t.nodes)o.has(r.id)||(o.set(r.id,r),e.depth+1<n&&l.push({nodeId:r.id,depth:e.depth+1}));for(let e of t.edges)s.set(e.id,e)}return{nodes:[...o.values()],edges:[...s.values()]}}async findNodes(e){let t=[],n=[];e.type&&(t.push(`type = ?`),n.push(e.type)),e.namePattern&&(t.push(`name LIKE ?`),n.push(`%${e.namePattern}%`)),e.sourcePath&&(t.push(`source_path = ?`),n.push(e.sourcePath));let r=t.length>0?`WHERE ${t.join(` AND `)}`:``,i=e.limit??100;return this.query(`SELECT * FROM nodes ${r} LIMIT ?`,[...n,i]).map(e=>l(e))}async findEdges(e){let t=[],n=[];e.type&&(t.push(`type = ?`),n.push(e.type)),e.fromId&&(t.push(`from_id = ?`),n.push(e.fromId)),e.toId&&(t.push(`to_id = ?`),n.push(e.toId));let r=t.length>0?`WHERE ${t.join(` AND `)}`:``,i=e.limit??100;return this.query(`SELECT * FROM edges ${r} LIMIT ?`,[...n,i]).map(e=>u(e))}async deleteNode(e){let t=this.ensureDb();t.run(`BEGIN TRANSACTION`);try{this.run(`DELETE FROM edges WHERE from_id = ? OR to_id = ?`,[e,e]),this.run(`DELETE FROM nodes WHERE id = ?`,[e]),t.run(`COMMIT`)}catch(e){throw t.run(`ROLLBACK`),e}this.markDirty(),this.flushIfDirty()}async deleteBySourcePath(e){let t=this.query(`SELECT id FROM nodes WHERE source_path = ?`,[e]);if(t.length===0)return 0;let n=this.ensureDb();n.run(`BEGIN TRANSACTION`);try{for(let e of t)this.run(`DELETE FROM edges WHERE from_id = ? OR to_id = ?`,[e.id,e.id]);this.run(`DELETE FROM nodes WHERE source_path = ?`,[e]),n.run(`COMMIT`)}catch(e){throw n.run(`ROLLBACK`),e}return this.markDirty(),this.flushIfDirty(),t.length}async clear(){this.run(`DELETE FROM edges`),this.run(`DELETE FROM nodes`),this.markDirty(),this.flushIfDirty()}async getStats(){let e=this.query(`SELECT COUNT(*) as count FROM nodes`)[0]?.count??0,t=this.query(`SELECT COUNT(*) as count FROM edges`)[0]?.count??0,n=this.query(`SELECT type, COUNT(*) as count FROM nodes GROUP BY type`),r={};for(let e of n)r[e.type]=e.count;let i=this.query(`SELECT type, COUNT(*) as count FROM edges GROUP BY type`),a={};for(let e of i)a[e.type]=e.count;return{nodeCount:e,edgeCount:t,nodeTypes:r,edgeTypes:a}}async close(){this.db&&=(this.flushIfDirty(),this.db.close(),null)}};function c(e){if(!e)return{};try{return JSON.parse(e)}catch{return{}}}function l(e){return{id:e.id,type:e.type,name:e.name,properties:c(e.properties),sourceRecordId:e.source_record_id??void 0,sourcePath:e.source_path??void 0,createdAt:e.created_at}}function u(e){return{id:e.id,fromId:e.from_id,toId:e.to_id,type:e.type,weight:e.weight??1,properties:c(e.properties)}}function d(e){return{id:e.edge_id,fromId:e.from_id,toId:e.to_id,type:e.edge_type,weight:e.weight??1,properties:c(e.edge_props??`{}`)}}function f(e){return{id:e.node_id,type:e.node_type,name:e.node_name,properties:c(e.node_props??`{}`),sourceRecordId:e.node_src_rec??void 0,sourcePath:e.node_src_path??void 0,createdAt:e.node_created}}export{s as SqliteGraphStore};
@@ -9,5 +9,4 @@ interface StoreConfig {
9
9
  }
10
10
  declare function createStore(config: StoreConfig): Promise<IKnowledgeStore>;
11
11
  //#endregion
12
- export { StoreBackend, StoreConfig, createStore };
13
- //# sourceMappingURL=store-factory.d.ts.map
12
+ export { StoreBackend, StoreConfig, createStore };
@@ -1,2 +1 @@
1
- async function e(e){switch(e.backend){case`lancedb`:{let{LanceStore:t}=await import(`./lance-store.js`);return new t({path:e.path})}default:throw Error(`Unknown store backend: "${e.backend}". Supported: lancedb`)}}export{e as createStore};
2
- //# sourceMappingURL=store-factory.js.map
1
+ async function e(e){switch(e.backend){case`lancedb`:{let{LanceStore:t}=await import(`./lance-store.js`);return new t({path:e.path})}default:throw Error(`Unknown store backend: "${e.backend}". Supported: lancedb`)}}export{e as createStore};
@@ -47,5 +47,4 @@ interface IKnowledgeStore {
47
47
  close(): Promise<void>;
48
48
  }
49
49
  //#endregion
50
- export { IKnowledgeStore, SearchOptions };
51
- //# sourceMappingURL=store.interface.d.ts.map
50
+ export { IKnowledgeStore, SearchOptions };
@@ -62,5 +62,4 @@ interface AuditData {
62
62
  }
63
63
  declare function audit(store: IKnowledgeStore, embedder: IEmbedder, options?: AuditOptions): Promise<KBResponse<AuditData>>;
64
64
  //#endregion
65
- export { AuditCheck, AuditData, AuditOptions, AuditRecommendation, audit };
66
- //# sourceMappingURL=audit.d.ts.map
65
+ export { AuditCheck, AuditData, AuditOptions, AuditRecommendation, audit };
@@ -3,5 +3,4 @@ import{check as e,summarizeCheckResult as t}from"./check.js";import{findDeadSymb
3
3
  `);for(let t of e.recommendations){let e=t.priority===`high`?`🔴`:t.priority===`medium`?`🟡`:`🟢`;n.push(`${e} **${t.area}:** ${t.message}`)}}if(t===`full`&&e.patterns&&e.patterns.length>0){n.push(`
4
4
  ### Patterns Detected
5
5
  `),n.push(`| Pattern | Confidence | Count |`),n.push(`|---------|-----------|-------|`);for(let t of e.patterns)n.push(`| ${t.name} | ${t.confidence} | ${t.count} |`)}return n.join(`
6
- `)}export{f as audit};
7
- //# sourceMappingURL=audit.js.map
6
+ `)}export{f as audit};
@@ -20,5 +20,4 @@ interface BatchOptions {
20
20
  }
21
21
  declare function batch(operations: BatchOperation[], executor: (op: BatchOperation) => Promise<unknown>, options?: BatchOptions): Promise<BatchResult[]>;
22
22
  //#endregion
23
- export { BatchOperation, BatchOptions, BatchResult, batch };
24
- //# sourceMappingURL=batch.d.ts.map
23
+ export { BatchOperation, BatchOptions, BatchResult, batch };
@@ -1,2 +1 @@
1
- async function e(e,t,n={}){let r=Math.max(1,n.concurrency??4),i=[],a=[...e];async function o(e){let n=Date.now();try{let r=await t(e);return{id:e.id,status:`success`,result:r,durationMs:Date.now()-n}}catch(t){return{id:e.id,status:`error`,error:t instanceof Error?t.message:String(t),durationMs:Date.now()-n}}}for(;a.length>0;){let e=a.splice(0,r),t=await Promise.allSettled(e.map(e=>o(e)));for(let e of t){if(e.status===`fulfilled`){i.push(e.value);continue}i.push({id:`unknown`,status:`error`,error:e.reason instanceof Error?e.reason.message:`Promise rejected`,durationMs:0})}}return i}export{e as batch};
2
- //# sourceMappingURL=batch.js.map
1
+ async function e(e,t,n={}){let r=Math.max(1,n.concurrency??4),i=[],a=[...e];async function o(e){let n=Date.now();try{let r=await t(e);return{id:e.id,status:`success`,result:r,durationMs:Date.now()-n}}catch(t){return{id:e.id,status:`error`,error:t instanceof Error?t.message:String(t),durationMs:Date.now()-n}}}for(;a.length>0;){let e=a.splice(0,r),t=await Promise.allSettled(e.map(e=>o(e)));for(let e of t){if(e.status===`fulfilled`){i.push(e.value);continue}i.push({id:`unknown`,status:`error`,error:e.reason instanceof Error?e.reason.message:`Promise rejected`,durationMs:0})}}return i}export{e as batch};
@@ -33,5 +33,4 @@ declare function changelog(options: ChangelogOptions): ChangelogResult;
33
33
  /** Exported for testing */
34
34
  declare function formatChangelog(entries: ChangelogEntry[], format: ChangelogFormat, includeBreaking: boolean): string;
35
35
  //#endregion
36
- export { ChangelogEntry, ChangelogFormat, ChangelogOptions, ChangelogResult, changelog, formatChangelog };
37
- //# sourceMappingURL=changelog.d.ts.map
36
+ export { ChangelogEntry, ChangelogFormat, ChangelogOptions, ChangelogResult, changelog, formatChangelog };
@@ -1,3 +1,2 @@
1
- import{execSync as e}from"node:child_process";const t=/^[a-zA-Z0-9_./\-~^@{}]+$/;function n(n){let{from:i,to:a=`HEAD`,format:o=`grouped`,includeBreaking:s=!0,cwd:c=process.cwd()}=n;if(!t.test(i))throw Error(`Invalid git ref: ${i}`);if(!t.test(a))throw Error(`Invalid git ref: ${a}`);let l;try{l=e(`git log "${i}..${a}" --format="%H%s%b%an%ai"`,{cwd:c,encoding:`utf8`,maxBuffer:10*1024*1024,timeout:3e4})}catch{throw Error(`Git log failed. Ensure "${i}" and "${a}" are valid refs.`)}let u=l.split(``).map(e=>e.trim()).filter(Boolean).map(e=>{let[t=``,n=``,r=``,i=``,a=``]=e.split(``),o=n.match(/^(\w+)(?:\(([^)]*)\))?(!)?:\s*(.+)/);return{hash:t.slice(0,8),type:o?.[1]??`other`,scope:o?.[2]??``,subject:o?.[4]??n,body:r.trim(),author:i.trim(),date:a.trim().split(` `)[0],breaking:!!(o?.[3]||/BREAKING[\s-]CHANGE/i.test(r))}}),d={},f=0;for(let e of u)d[e.type]=(d[e.type]??0)+1,e.breaking&&f++;return{entries:u,markdown:r(u,o,s),stats:{total:u.length,breaking:f,types:d}}}function r(e,t,n){let r=[`# Changelog`,``];if(n){let t=e.filter(e=>e.breaking);if(t.length>0){r.push(`## Breaking Changes`,``);for(let e of t)r.push(`- ${e.subject} (${e.hash})`);r.push(``)}}if(t===`grouped`){let t={};for(let n of e)t[n.type]||(t[n.type]=[]),t[n.type].push(n);let n=[`feat`,`fix`,`refactor`,`perf`,`test`,`docs`,`chore`],i={feat:`Features`,fix:`Bug Fixes`,refactor:`Refactoring`,perf:`Performance`,test:`Tests`,docs:`Documentation`,chore:`Chores`,other:`Other`};for(let e of[...n,...Object.keys(t).filter(e=>!n.includes(e))])if(t[e]?.length){r.push(`## ${i[e]??e}`,``);for(let n of t[e]){let e=n.scope?`**${n.scope}:** `:``;r.push(`- ${e}${n.subject} (${n.hash})`)}r.push(``)}}else if(t===`chronological`)for(let t of e){let e=t.scope?`(${t.scope}) `:``;r.push(`- \`${t.date}\` ${t.type}: ${e}${t.subject} (${t.hash})`)}else{let t={};for(let n of e){let e=n.scope||`general`;t[e]||(t[e]=[]),t[e].push(n)}for(let[e,n]of Object.entries(t)){r.push(`## ${e}`,``);for(let e of n)r.push(`- ${e.type}: ${e.subject} (${e.hash})`);r.push(``)}}return r.join(`
2
- `)}export{n as changelog,r as formatChangelog};
3
- //# sourceMappingURL=changelog.js.map
1
+ import{execFileSync as e}from"node:child_process";const t=/^[a-zA-Z0-9_./\-~^@{}]+$/;function n(n){let{from:i,to:a=`HEAD`,format:o=`grouped`,includeBreaking:s=!0,cwd:c=process.cwd()}=n;if(!t.test(i))throw Error(`Invalid git ref: ${i}`);if(!t.test(a))throw Error(`Invalid git ref: ${a}`);let l;try{l=e(`git`,[`log`,`${i}..${a}`,`--format=%H%s%b%an%ai`],{cwd:c,encoding:`utf8`,maxBuffer:10*1024*1024,timeout:3e4})}catch{throw Error(`Git log failed. Ensure "${i}" and "${a}" are valid refs.`)}let u=l.split(``).map(e=>e.trim()).filter(Boolean).map(e=>{let[t=``,n=``,r=``,i=``,a=``]=e.split(``),o=n.match(/^(\w+)(?:\(([^)]*)\))?(!)?:\s*(.+)/);return{hash:t.slice(0,8),type:o?.[1]??`other`,scope:o?.[2]??``,subject:o?.[4]??n,body:r.trim(),author:i.trim(),date:a.trim().split(` `)[0],breaking:!!(o?.[3]||/BREAKING[\s-]CHANGE/i.test(r))}}),d={},f=0;for(let e of u)d[e.type]=(d[e.type]??0)+1,e.breaking&&f++;return{entries:u,markdown:r(u,o,s),stats:{total:u.length,breaking:f,types:d}}}function r(e,t,n){let r=[`# Changelog`,``];if(n){let t=e.filter(e=>e.breaking);if(t.length>0){r.push(`## Breaking Changes`,``);for(let e of t)r.push(`- ${e.subject} (${e.hash})`);r.push(``)}}if(t===`grouped`){let t={};for(let n of e)t[n.type]||(t[n.type]=[]),t[n.type].push(n);let n=[`feat`,`fix`,`refactor`,`perf`,`test`,`docs`,`chore`],i={feat:`Features`,fix:`Bug Fixes`,refactor:`Refactoring`,perf:`Performance`,test:`Tests`,docs:`Documentation`,chore:`Chores`,other:`Other`};for(let e of[...n,...Object.keys(t).filter(e=>!n.includes(e))])if(t[e]?.length){r.push(`## ${i[e]??e}`,``);for(let n of t[e]){let e=n.scope?`**${n.scope}:** `:``;r.push(`- ${e}${n.subject} (${n.hash})`)}r.push(``)}}else if(t===`chronological`)for(let t of e){let e=t.scope?`(${t.scope}) `:``;r.push(`- \`${t.date}\` ${t.type}: ${e}${t.subject} (${t.hash})`)}else{let t={};for(let n of e){let e=n.scope||`general`;t[e]||(t[e]=[]),t[e].push(n)}for(let[e,n]of Object.entries(t)){r.push(`## ${e}`,``);for(let e of n)r.push(`- ${e.type}: ${e.subject} (${e.hash})`);r.push(``)}}return r.join(`
2
+ `)}export{n as changelog,r as formatChangelog};
@@ -45,5 +45,4 @@ declare function check(options?: CheckOptions): Promise<CheckResult>;
45
45
  /** Produce a minimal summary for LLM consumption (~300 tokens) */
46
46
  declare function summarizeCheckResult(result: CheckResult): CheckSummaryResult;
47
47
  //#endregion
48
- export { CheckOptions, CheckResult, CheckSummaryResult, check, summarizeCheckResult };
49
- //# sourceMappingURL=check.d.ts.map
48
+ export { CheckOptions, CheckResult, CheckSummaryResult, check, summarizeCheckResult };