bluera-knowledge 0.14.0 → 0.14.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (659) hide show
  1. package/.claude-plugin/plugin.json +1 -1
  2. package/CHANGELOG.md +14 -0
  3. package/dist/{chunk-AIS5S77C.js → chunk-X7E4RYJE.js} +51 -9
  4. package/dist/{chunk-AIS5S77C.js.map → chunk-X7E4RYJE.js.map} +1 -1
  5. package/dist/index.js +1 -1
  6. package/dist/workers/background-worker-cli.js +1 -1
  7. package/package.json +12 -1
  8. package/.claude/commands/code-review.md +0 -15
  9. package/.claude/commands/commit.md +0 -34
  10. package/.claude/council-cache/1a43ed5977b8f29afc79a9bf5c4082ee5ad8338c42ab991a4241a48f80c1e46d.json +0 -7
  11. package/.claude/hooks/post-edit-check.sh +0 -40
  12. package/.claude/rules/code-quality.md +0 -12
  13. package/.claude/rules/git.md +0 -5
  14. package/.claude/rules/versioning.md +0 -7
  15. package/.claude/settings.local.json.example +0 -45
  16. package/.claude/skills/atomic-commits/SKILL.md +0 -61
  17. package/.claude/skills/code-review-repo/skill.md +0 -62
  18. package/.editorconfig +0 -15
  19. package/.env.example +0 -21
  20. package/.github/workflows/auto-release.yml +0 -64
  21. package/.github/workflows/ci.yml +0 -168
  22. package/.github/workflows/release.yml +0 -74
  23. package/.github/workflows/update-marketplace.yml +0 -96
  24. package/.husky/pre-commit +0 -48
  25. package/.husky/pre-push +0 -39
  26. package/.mcp.json +0 -11
  27. package/.prettierrc +0 -9
  28. package/.versionrc.json +0 -24
  29. package/CLAUDE.md +0 -110
  30. package/CONTRIBUTING.md +0 -307
  31. package/NOTICE +0 -47
  32. package/SECURITY.md +0 -65
  33. package/bun.lock +0 -2036
  34. package/docs/claude-code-best-practices.md +0 -458
  35. package/docs/cli.md +0 -170
  36. package/docs/commands.md +0 -392
  37. package/docs/crawler-architecture.md +0 -89
  38. package/docs/mcp-integration.md +0 -130
  39. package/docs/token-efficiency.md +0 -91
  40. package/eslint-rules/require-skip-comment.js +0 -81
  41. package/eslint.config.js +0 -103
  42. package/knip.json +0 -43
  43. package/scripts/test-mcp-dev.js +0 -260
  44. package/scripts/validate-npm-release.sh +0 -314
  45. package/src/analysis/adapter-registry.test.ts +0 -211
  46. package/src/analysis/adapter-registry.ts +0 -155
  47. package/src/analysis/ast-parser.test.ts +0 -470
  48. package/src/analysis/ast-parser.ts +0 -198
  49. package/src/analysis/code-graph.test.ts +0 -718
  50. package/src/analysis/code-graph.ts +0 -249
  51. package/src/analysis/dependency-usage-analyzer.test.ts +0 -619
  52. package/src/analysis/dependency-usage-analyzer.ts +0 -433
  53. package/src/analysis/go-ast-parser.test.ts +0 -531
  54. package/src/analysis/go-ast-parser.ts +0 -471
  55. package/src/analysis/language-adapter.ts +0 -127
  56. package/src/analysis/parser-factory.test.ts +0 -210
  57. package/src/analysis/parser-factory.ts +0 -52
  58. package/src/analysis/python-ast-parser.test.ts +0 -210
  59. package/src/analysis/python-ast-parser.ts +0 -34
  60. package/src/analysis/repo-url-resolver.test.ts +0 -533
  61. package/src/analysis/repo-url-resolver.ts +0 -233
  62. package/src/analysis/rust-ast-parser.test.ts +0 -568
  63. package/src/analysis/rust-ast-parser.ts +0 -467
  64. package/src/analysis/tree-sitter-parser.test.ts +0 -297
  65. package/src/analysis/tree-sitter-parser.ts +0 -217
  66. package/src/analysis/zil/index.ts +0 -34
  67. package/src/analysis/zil/zil-adapter.test.ts +0 -187
  68. package/src/analysis/zil/zil-adapter.ts +0 -121
  69. package/src/analysis/zil/zil-lexer.test.ts +0 -222
  70. package/src/analysis/zil/zil-lexer.ts +0 -239
  71. package/src/analysis/zil/zil-parser.test.ts +0 -210
  72. package/src/analysis/zil/zil-parser.ts +0 -360
  73. package/src/analysis/zil/zil-special-forms.ts +0 -193
  74. package/src/cli/commands/crawl.test.ts +0 -1086
  75. package/src/cli/commands/crawl.ts +0 -220
  76. package/src/cli/commands/index-cmd.test.ts +0 -733
  77. package/src/cli/commands/index-cmd.ts +0 -128
  78. package/src/cli/commands/mcp.test.ts +0 -218
  79. package/src/cli/commands/mcp.ts +0 -18
  80. package/src/cli/commands/plugin-api.test.ts +0 -373
  81. package/src/cli/commands/plugin-api.ts +0 -82
  82. package/src/cli/commands/search.test.ts +0 -1047
  83. package/src/cli/commands/search.ts +0 -197
  84. package/src/cli/commands/serve.test.ts +0 -371
  85. package/src/cli/commands/serve.ts +0 -43
  86. package/src/cli/commands/setup.test.ts +0 -895
  87. package/src/cli/commands/setup.ts +0 -176
  88. package/src/cli/commands/store.test.ts +0 -1370
  89. package/src/cli/commands/store.ts +0 -229
  90. package/src/cli/commands/sync.test.ts +0 -54
  91. package/src/cli/commands/sync.ts +0 -313
  92. package/src/cli/index.ts +0 -8
  93. package/src/cli/program.ts +0 -59
  94. package/src/crawl/article-converter.test.ts +0 -576
  95. package/src/crawl/article-converter.ts +0 -142
  96. package/src/crawl/bridge.test.ts +0 -796
  97. package/src/crawl/bridge.ts +0 -349
  98. package/src/crawl/claude-client.test.ts +0 -902
  99. package/src/crawl/claude-client.ts +0 -261
  100. package/src/crawl/intelligent-crawler.test.ts +0 -1028
  101. package/src/crawl/intelligent-crawler.ts +0 -478
  102. package/src/crawl/markdown-utils.test.ts +0 -703
  103. package/src/crawl/markdown-utils.ts +0 -225
  104. package/src/crawl/schemas.ts +0 -114
  105. package/src/db/embeddings.test.ts +0 -79
  106. package/src/db/embeddings.ts +0 -78
  107. package/src/db/index.ts +0 -2
  108. package/src/db/lance.test.ts +0 -479
  109. package/src/db/lance.ts +0 -190
  110. package/src/defaults/repos.ts +0 -67
  111. package/src/index.ts +0 -124
  112. package/src/logging/index.ts +0 -25
  113. package/src/logging/logger.test.ts +0 -75
  114. package/src/logging/logger.ts +0 -145
  115. package/src/logging/payload.test.ts +0 -152
  116. package/src/logging/payload.ts +0 -119
  117. package/src/mcp/cache.test.ts +0 -202
  118. package/src/mcp/cache.ts +0 -103
  119. package/src/mcp/commands/index.ts +0 -24
  120. package/src/mcp/commands/job.commands.ts +0 -48
  121. package/src/mcp/commands/meta.commands.ts +0 -54
  122. package/src/mcp/commands/registry.ts +0 -180
  123. package/src/mcp/commands/store.commands.ts +0 -75
  124. package/src/mcp/commands/sync.commands.test.ts +0 -371
  125. package/src/mcp/commands/sync.commands.ts +0 -263
  126. package/src/mcp/commands/uninstall.commands.test.ts +0 -37
  127. package/src/mcp/commands/uninstall.commands.ts +0 -29
  128. package/src/mcp/handlers/execute.handler.test.ts +0 -179
  129. package/src/mcp/handlers/execute.handler.ts +0 -23
  130. package/src/mcp/handlers/index.ts +0 -39
  131. package/src/mcp/handlers/job.handler.test.ts +0 -189
  132. package/src/mcp/handlers/job.handler.ts +0 -118
  133. package/src/mcp/handlers/search.handler.test.ts +0 -324
  134. package/src/mcp/handlers/search.handler.ts +0 -287
  135. package/src/mcp/handlers/store.handler.test.ts +0 -408
  136. package/src/mcp/handlers/store.handler.ts +0 -318
  137. package/src/mcp/handlers/uninstall.handler.test.ts +0 -194
  138. package/src/mcp/handlers/uninstall.handler.ts +0 -142
  139. package/src/mcp/plugin-mcp-config.test.ts +0 -71
  140. package/src/mcp/schemas/index.test.ts +0 -356
  141. package/src/mcp/schemas/index.ts +0 -155
  142. package/src/mcp/server.test.ts +0 -91
  143. package/src/mcp/server.ts +0 -235
  144. package/src/mcp/types.ts +0 -41
  145. package/src/plugin/commands.test.ts +0 -925
  146. package/src/plugin/commands.ts +0 -311
  147. package/src/plugin/dependency-analyzer.test.ts +0 -380
  148. package/src/plugin/dependency-analyzer.ts +0 -210
  149. package/src/plugin/git-clone.test.ts +0 -387
  150. package/src/plugin/git-clone.ts +0 -57
  151. package/src/scripts/validate-npm-release.test.ts +0 -70
  152. package/src/server/app.test.ts +0 -752
  153. package/src/server/app.ts +0 -128
  154. package/src/server/index.test.ts +0 -475
  155. package/src/server/index.ts +0 -1
  156. package/src/services/chunking.service.test.ts +0 -363
  157. package/src/services/chunking.service.ts +0 -380
  158. package/src/services/code-graph.service.test.ts +0 -298
  159. package/src/services/code-graph.service.ts +0 -326
  160. package/src/services/code-unit.service.test.ts +0 -693
  161. package/src/services/code-unit.service.ts +0 -234
  162. package/src/services/config.service.test.ts +0 -146
  163. package/src/services/config.service.ts +0 -92
  164. package/src/services/gitignore.service.test.ts +0 -157
  165. package/src/services/gitignore.service.ts +0 -132
  166. package/src/services/index.service.test.ts +0 -2301
  167. package/src/services/index.service.ts +0 -442
  168. package/src/services/index.ts +0 -119
  169. package/src/services/job.service.test.ts +0 -531
  170. package/src/services/job.service.ts +0 -298
  171. package/src/services/project-root.service.test.ts +0 -504
  172. package/src/services/project-root.service.ts +0 -112
  173. package/src/services/search.service.test.ts +0 -2263
  174. package/src/services/search.service.ts +0 -1341
  175. package/src/services/services.test.ts +0 -108
  176. package/src/services/snippet.service.test.ts +0 -213
  177. package/src/services/snippet.service.ts +0 -193
  178. package/src/services/store-definition.service.test.ts +0 -440
  179. package/src/services/store-definition.service.ts +0 -198
  180. package/src/services/store.service.test.ts +0 -843
  181. package/src/services/store.service.ts +0 -363
  182. package/src/services/token.service.test.ts +0 -45
  183. package/src/services/token.service.ts +0 -33
  184. package/src/services/watch.service.test.ts +0 -600
  185. package/src/services/watch.service.ts +0 -84
  186. package/src/types/brands.test.ts +0 -47
  187. package/src/types/brands.ts +0 -32
  188. package/src/types/config.ts +0 -79
  189. package/src/types/document.ts +0 -54
  190. package/src/types/index.ts +0 -73
  191. package/src/types/job.ts +0 -61
  192. package/src/types/progress.ts +0 -9
  193. package/src/types/result.test.ts +0 -54
  194. package/src/types/result.ts +0 -41
  195. package/src/types/search.ts +0 -105
  196. package/src/types/store-definition.test.ts +0 -492
  197. package/src/types/store-definition.ts +0 -129
  198. package/src/types/store.test.ts +0 -69
  199. package/src/types/store.ts +0 -47
  200. package/src/utils/type-guards.test.ts +0 -351
  201. package/src/utils/type-guards.ts +0 -61
  202. package/src/workers/background-worker-cli.test.ts +0 -35
  203. package/src/workers/background-worker-cli.ts +0 -149
  204. package/src/workers/background-worker.test.ts +0 -222
  205. package/src/workers/background-worker.ts +0 -322
  206. package/src/workers/pid-file.test.ts +0 -167
  207. package/src/workers/pid-file.ts +0 -82
  208. package/src/workers/spawn-worker.test.ts +0 -194
  209. package/src/workers/spawn-worker.ts +0 -70
  210. package/tests/analysis/ast-parser.test.ts +0 -98
  211. package/tests/analysis/code-graph.test.ts +0 -60
  212. package/tests/fixtures/README.md +0 -114
  213. package/tests/fixtures/code-snippets/api/error-handling.ts +0 -256
  214. package/tests/fixtures/code-snippets/api/rest-controller.ts +0 -297
  215. package/tests/fixtures/code-snippets/auth/jwt-auth.ts +0 -197
  216. package/tests/fixtures/code-snippets/auth/oauth-flow.ts +0 -245
  217. package/tests/fixtures/code-snippets/database/repository-pattern.ts +0 -280
  218. package/tests/fixtures/corpus/VERSION.md +0 -25
  219. package/tests/fixtures/corpus/articles/jwt-authentication.md +0 -97
  220. package/tests/fixtures/corpus/articles/react-hooks-patterns.md +0 -127
  221. package/tests/fixtures/corpus/articles/typescript-generics.md +0 -111
  222. package/tests/fixtures/corpus/documentation/express-middleware.md +0 -71
  223. package/tests/fixtures/corpus/documentation/express-routing.md +0 -83
  224. package/tests/fixtures/corpus/documentation/node-streams.md +0 -78
  225. package/tests/fixtures/corpus/oss-repos/express/History.md +0 -3871
  226. package/tests/fixtures/corpus/oss-repos/express/LICENSE +0 -24
  227. package/tests/fixtures/corpus/oss-repos/express/README.md +0 -276
  228. package/tests/fixtures/corpus/oss-repos/express/SECURITY.md +0 -56
  229. package/tests/fixtures/corpus/oss-repos/express/benchmarks/Makefile +0 -17
  230. package/tests/fixtures/corpus/oss-repos/express/benchmarks/README.md +0 -34
  231. package/tests/fixtures/corpus/oss-repos/express/benchmarks/middleware.js +0 -20
  232. package/tests/fixtures/corpus/oss-repos/express/benchmarks/run +0 -18
  233. package/tests/fixtures/corpus/oss-repos/express/examples/README.md +0 -29
  234. package/tests/fixtures/corpus/oss-repos/express/examples/auth/index.js +0 -134
  235. package/tests/fixtures/corpus/oss-repos/express/examples/auth/views/foot.ejs +0 -2
  236. package/tests/fixtures/corpus/oss-repos/express/examples/auth/views/head.ejs +0 -20
  237. package/tests/fixtures/corpus/oss-repos/express/examples/auth/views/login.ejs +0 -21
  238. package/tests/fixtures/corpus/oss-repos/express/examples/content-negotiation/db.js +0 -9
  239. package/tests/fixtures/corpus/oss-repos/express/examples/content-negotiation/index.js +0 -46
  240. package/tests/fixtures/corpus/oss-repos/express/examples/content-negotiation/users.js +0 -19
  241. package/tests/fixtures/corpus/oss-repos/express/examples/cookie-sessions/index.js +0 -25
  242. package/tests/fixtures/corpus/oss-repos/express/examples/cookies/index.js +0 -53
  243. package/tests/fixtures/corpus/oss-repos/express/examples/downloads/files/CCTV/345/244/247/350/265/233/344/270/212/346/265/267/345/210/206/350/265/233/345/214/272.txt +0 -2
  244. package/tests/fixtures/corpus/oss-repos/express/examples/downloads/files/amazing.txt +0 -1
  245. package/tests/fixtures/corpus/oss-repos/express/examples/downloads/files/notes/groceries.txt +0 -3
  246. package/tests/fixtures/corpus/oss-repos/express/examples/downloads/index.js +0 -40
  247. package/tests/fixtures/corpus/oss-repos/express/examples/ejs/index.js +0 -57
  248. package/tests/fixtures/corpus/oss-repos/express/examples/ejs/public/stylesheets/style.css +0 -4
  249. package/tests/fixtures/corpus/oss-repos/express/examples/ejs/views/footer.html +0 -2
  250. package/tests/fixtures/corpus/oss-repos/express/examples/ejs/views/header.html +0 -9
  251. package/tests/fixtures/corpus/oss-repos/express/examples/ejs/views/users.html +0 -10
  252. package/tests/fixtures/corpus/oss-repos/express/examples/error/index.js +0 -53
  253. package/tests/fixtures/corpus/oss-repos/express/examples/error-pages/index.js +0 -103
  254. package/tests/fixtures/corpus/oss-repos/express/examples/error-pages/views/404.ejs +0 -3
  255. package/tests/fixtures/corpus/oss-repos/express/examples/error-pages/views/500.ejs +0 -8
  256. package/tests/fixtures/corpus/oss-repos/express/examples/error-pages/views/error_header.ejs +0 -10
  257. package/tests/fixtures/corpus/oss-repos/express/examples/error-pages/views/footer.ejs +0 -2
  258. package/tests/fixtures/corpus/oss-repos/express/examples/error-pages/views/index.ejs +0 -20
  259. package/tests/fixtures/corpus/oss-repos/express/examples/hello-world/index.js +0 -15
  260. package/tests/fixtures/corpus/oss-repos/express/examples/markdown/index.js +0 -44
  261. package/tests/fixtures/corpus/oss-repos/express/examples/markdown/views/index.md +0 -4
  262. package/tests/fixtures/corpus/oss-repos/express/examples/multi-router/controllers/api_v1.js +0 -15
  263. package/tests/fixtures/corpus/oss-repos/express/examples/multi-router/controllers/api_v2.js +0 -15
  264. package/tests/fixtures/corpus/oss-repos/express/examples/multi-router/index.js +0 -18
  265. package/tests/fixtures/corpus/oss-repos/express/examples/mvc/controllers/main/index.js +0 -5
  266. package/tests/fixtures/corpus/oss-repos/express/examples/mvc/controllers/pet/index.js +0 -31
  267. package/tests/fixtures/corpus/oss-repos/express/examples/mvc/controllers/pet/views/edit.ejs +0 -17
  268. package/tests/fixtures/corpus/oss-repos/express/examples/mvc/controllers/pet/views/show.ejs +0 -15
  269. package/tests/fixtures/corpus/oss-repos/express/examples/mvc/controllers/user/index.js +0 -41
  270. package/tests/fixtures/corpus/oss-repos/express/examples/mvc/controllers/user/views/edit.hbs +0 -27
  271. package/tests/fixtures/corpus/oss-repos/express/examples/mvc/controllers/user/views/list.hbs +0 -18
  272. package/tests/fixtures/corpus/oss-repos/express/examples/mvc/controllers/user/views/show.hbs +0 -31
  273. package/tests/fixtures/corpus/oss-repos/express/examples/mvc/controllers/user-pet/index.js +0 -22
  274. package/tests/fixtures/corpus/oss-repos/express/examples/mvc/db.js +0 -16
  275. package/tests/fixtures/corpus/oss-repos/express/examples/mvc/index.js +0 -95
  276. package/tests/fixtures/corpus/oss-repos/express/examples/mvc/lib/boot.js +0 -83
  277. package/tests/fixtures/corpus/oss-repos/express/examples/mvc/public/style.css +0 -14
  278. package/tests/fixtures/corpus/oss-repos/express/examples/mvc/views/404.ejs +0 -13
  279. package/tests/fixtures/corpus/oss-repos/express/examples/mvc/views/5xx.ejs +0 -13
  280. package/tests/fixtures/corpus/oss-repos/express/examples/online/index.js +0 -61
  281. package/tests/fixtures/corpus/oss-repos/express/examples/params/index.js +0 -74
  282. package/tests/fixtures/corpus/oss-repos/express/examples/resource/index.js +0 -95
  283. package/tests/fixtures/corpus/oss-repos/express/examples/route-map/index.js +0 -75
  284. package/tests/fixtures/corpus/oss-repos/express/examples/route-middleware/index.js +0 -90
  285. package/tests/fixtures/corpus/oss-repos/express/examples/route-separation/index.js +0 -55
  286. package/tests/fixtures/corpus/oss-repos/express/examples/route-separation/post.js +0 -13
  287. package/tests/fixtures/corpus/oss-repos/express/examples/route-separation/public/style.css +0 -24
  288. package/tests/fixtures/corpus/oss-repos/express/examples/route-separation/site.js +0 -5
  289. package/tests/fixtures/corpus/oss-repos/express/examples/route-separation/user.js +0 -47
  290. package/tests/fixtures/corpus/oss-repos/express/examples/route-separation/views/footer.ejs +0 -2
  291. package/tests/fixtures/corpus/oss-repos/express/examples/route-separation/views/header.ejs +0 -9
  292. package/tests/fixtures/corpus/oss-repos/express/examples/route-separation/views/index.ejs +0 -10
  293. package/tests/fixtures/corpus/oss-repos/express/examples/route-separation/views/posts/index.ejs +0 -12
  294. package/tests/fixtures/corpus/oss-repos/express/examples/route-separation/views/users/edit.ejs +0 -23
  295. package/tests/fixtures/corpus/oss-repos/express/examples/route-separation/views/users/index.ejs +0 -14
  296. package/tests/fixtures/corpus/oss-repos/express/examples/route-separation/views/users/view.ejs +0 -9
  297. package/tests/fixtures/corpus/oss-repos/express/examples/search/index.js +0 -61
  298. package/tests/fixtures/corpus/oss-repos/express/examples/search/public/client.js +0 -15
  299. package/tests/fixtures/corpus/oss-repos/express/examples/search/public/index.html +0 -21
  300. package/tests/fixtures/corpus/oss-repos/express/examples/session/index.js +0 -37
  301. package/tests/fixtures/corpus/oss-repos/express/examples/session/redis.js +0 -39
  302. package/tests/fixtures/corpus/oss-repos/express/examples/static-files/index.js +0 -43
  303. package/tests/fixtures/corpus/oss-repos/express/examples/static-files/public/css/style.css +0 -3
  304. package/tests/fixtures/corpus/oss-repos/express/examples/static-files/public/hello.txt +0 -1
  305. package/tests/fixtures/corpus/oss-repos/express/examples/static-files/public/js/app.js +0 -1
  306. package/tests/fixtures/corpus/oss-repos/express/examples/vhost/index.js +0 -53
  307. package/tests/fixtures/corpus/oss-repos/express/examples/view-constructor/github-view.js +0 -53
  308. package/tests/fixtures/corpus/oss-repos/express/examples/view-constructor/index.js +0 -48
  309. package/tests/fixtures/corpus/oss-repos/express/examples/view-locals/index.js +0 -155
  310. package/tests/fixtures/corpus/oss-repos/express/examples/view-locals/user.js +0 -36
  311. package/tests/fixtures/corpus/oss-repos/express/examples/view-locals/views/index.ejs +0 -20
  312. package/tests/fixtures/corpus/oss-repos/express/examples/web-service/index.js +0 -117
  313. package/tests/fixtures/corpus/oss-repos/express/index.js +0 -11
  314. package/tests/fixtures/corpus/oss-repos/express/lib/application.js +0 -631
  315. package/tests/fixtures/corpus/oss-repos/express/lib/express.js +0 -81
  316. package/tests/fixtures/corpus/oss-repos/express/lib/request.js +0 -514
  317. package/tests/fixtures/corpus/oss-repos/express/lib/response.js +0 -1053
  318. package/tests/fixtures/corpus/oss-repos/express/lib/utils.js +0 -271
  319. package/tests/fixtures/corpus/oss-repos/express/lib/view.js +0 -205
  320. package/tests/fixtures/corpus/oss-repos/express/package.json +0 -99
  321. package/tests/fixtures/corpus/oss-repos/express/test/Route.js +0 -274
  322. package/tests/fixtures/corpus/oss-repos/express/test/Router.js +0 -636
  323. package/tests/fixtures/corpus/oss-repos/express/test/acceptance/auth.js +0 -117
  324. package/tests/fixtures/corpus/oss-repos/express/test/acceptance/content-negotiation.js +0 -49
  325. package/tests/fixtures/corpus/oss-repos/express/test/acceptance/cookie-sessions.js +0 -38
  326. package/tests/fixtures/corpus/oss-repos/express/test/acceptance/cookies.js +0 -71
  327. package/tests/fixtures/corpus/oss-repos/express/test/acceptance/downloads.js +0 -47
  328. package/tests/fixtures/corpus/oss-repos/express/test/acceptance/ejs.js +0 -17
  329. package/tests/fixtures/corpus/oss-repos/express/test/acceptance/error-pages.js +0 -99
  330. package/tests/fixtures/corpus/oss-repos/express/test/acceptance/error.js +0 -29
  331. package/tests/fixtures/corpus/oss-repos/express/test/acceptance/hello-world.js +0 -21
  332. package/tests/fixtures/corpus/oss-repos/express/test/acceptance/markdown.js +0 -21
  333. package/tests/fixtures/corpus/oss-repos/express/test/acceptance/multi-router.js +0 -44
  334. package/tests/fixtures/corpus/oss-repos/express/test/acceptance/mvc.js +0 -132
  335. package/tests/fixtures/corpus/oss-repos/express/test/acceptance/params.js +0 -44
  336. package/tests/fixtures/corpus/oss-repos/express/test/acceptance/resource.js +0 -68
  337. package/tests/fixtures/corpus/oss-repos/express/test/acceptance/route-map.js +0 -45
  338. package/tests/fixtures/corpus/oss-repos/express/test/acceptance/route-separation.js +0 -97
  339. package/tests/fixtures/corpus/oss-repos/express/test/acceptance/vhost.js +0 -46
  340. package/tests/fixtures/corpus/oss-repos/express/test/acceptance/web-service.js +0 -105
  341. package/tests/fixtures/corpus/oss-repos/express/test/app.all.js +0 -38
  342. package/tests/fixtures/corpus/oss-repos/express/test/app.engine.js +0 -83
  343. package/tests/fixtures/corpus/oss-repos/express/test/app.head.js +0 -66
  344. package/tests/fixtures/corpus/oss-repos/express/test/app.js +0 -120
  345. package/tests/fixtures/corpus/oss-repos/express/test/app.listen.js +0 -55
  346. package/tests/fixtures/corpus/oss-repos/express/test/app.locals.js +0 -26
  347. package/tests/fixtures/corpus/oss-repos/express/test/app.options.js +0 -116
  348. package/tests/fixtures/corpus/oss-repos/express/test/app.param.js +0 -323
  349. package/tests/fixtures/corpus/oss-repos/express/test/app.render.js +0 -374
  350. package/tests/fixtures/corpus/oss-repos/express/test/app.request.js +0 -143
  351. package/tests/fixtures/corpus/oss-repos/express/test/app.response.js +0 -143
  352. package/tests/fixtures/corpus/oss-repos/express/test/app.route.js +0 -197
  353. package/tests/fixtures/corpus/oss-repos/express/test/app.router.js +0 -1217
  354. package/tests/fixtures/corpus/oss-repos/express/test/app.routes.error.js +0 -62
  355. package/tests/fixtures/corpus/oss-repos/express/test/app.use.js +0 -542
  356. package/tests/fixtures/corpus/oss-repos/express/test/config.js +0 -207
  357. package/tests/fixtures/corpus/oss-repos/express/test/exports.js +0 -82
  358. package/tests/fixtures/corpus/oss-repos/express/test/express.json.js +0 -755
  359. package/tests/fixtures/corpus/oss-repos/express/test/express.raw.js +0 -513
  360. package/tests/fixtures/corpus/oss-repos/express/test/express.static.js +0 -815
  361. package/tests/fixtures/corpus/oss-repos/express/test/express.text.js +0 -566
  362. package/tests/fixtures/corpus/oss-repos/express/test/express.urlencoded.js +0 -828
  363. package/tests/fixtures/corpus/oss-repos/express/test/fixtures/% of dogs.txt +0 -1
  364. package/tests/fixtures/corpus/oss-repos/express/test/fixtures/.name +0 -1
  365. package/tests/fixtures/corpus/oss-repos/express/test/fixtures/blog/index.html +0 -1
  366. package/tests/fixtures/corpus/oss-repos/express/test/fixtures/blog/post/index.tmpl +0 -1
  367. package/tests/fixtures/corpus/oss-repos/express/test/fixtures/broken.send +0 -0
  368. package/tests/fixtures/corpus/oss-repos/express/test/fixtures/default_layout/name.tmpl +0 -1
  369. package/tests/fixtures/corpus/oss-repos/express/test/fixtures/default_layout/user.tmpl +0 -1
  370. package/tests/fixtures/corpus/oss-repos/express/test/fixtures/email.tmpl +0 -1
  371. package/tests/fixtures/corpus/oss-repos/express/test/fixtures/empty.txt +0 -0
  372. package/tests/fixtures/corpus/oss-repos/express/test/fixtures/local_layout/user.tmpl +0 -1
  373. package/tests/fixtures/corpus/oss-repos/express/test/fixtures/name.tmpl +0 -1
  374. package/tests/fixtures/corpus/oss-repos/express/test/fixtures/name.txt +0 -1
  375. package/tests/fixtures/corpus/oss-repos/express/test/fixtures/nums.txt +0 -1
  376. package/tests/fixtures/corpus/oss-repos/express/test/fixtures/pets/names.txt +0 -1
  377. package/tests/fixtures/corpus/oss-repos/express/test/fixtures/snow /342/230/203/.gitkeep +0 -0
  378. package/tests/fixtures/corpus/oss-repos/express/test/fixtures/todo.html +0 -1
  379. package/tests/fixtures/corpus/oss-repos/express/test/fixtures/todo.txt +0 -1
  380. package/tests/fixtures/corpus/oss-repos/express/test/fixtures/user.html +0 -1
  381. package/tests/fixtures/corpus/oss-repos/express/test/fixtures/user.tmpl +0 -1
  382. package/tests/fixtures/corpus/oss-repos/express/test/fixtures/users/index.html +0 -1
  383. package/tests/fixtures/corpus/oss-repos/express/test/fixtures/users/tobi.txt +0 -1
  384. package/tests/fixtures/corpus/oss-repos/express/test/middleware.basic.js +0 -42
  385. package/tests/fixtures/corpus/oss-repos/express/test/regression.js +0 -20
  386. package/tests/fixtures/corpus/oss-repos/express/test/req.accepts.js +0 -125
  387. package/tests/fixtures/corpus/oss-repos/express/test/req.acceptsCharsets.js +0 -50
  388. package/tests/fixtures/corpus/oss-repos/express/test/req.acceptsEncodings.js +0 -39
  389. package/tests/fixtures/corpus/oss-repos/express/test/req.acceptsLanguages.js +0 -57
  390. package/tests/fixtures/corpus/oss-repos/express/test/req.baseUrl.js +0 -88
  391. package/tests/fixtures/corpus/oss-repos/express/test/req.fresh.js +0 -70
  392. package/tests/fixtures/corpus/oss-repos/express/test/req.get.js +0 -60
  393. package/tests/fixtures/corpus/oss-repos/express/test/req.host.js +0 -156
  394. package/tests/fixtures/corpus/oss-repos/express/test/req.hostname.js +0 -188
  395. package/tests/fixtures/corpus/oss-repos/express/test/req.ip.js +0 -113
  396. package/tests/fixtures/corpus/oss-repos/express/test/req.ips.js +0 -71
  397. package/tests/fixtures/corpus/oss-repos/express/test/req.is.js +0 -169
  398. package/tests/fixtures/corpus/oss-repos/express/test/req.path.js +0 -20
  399. package/tests/fixtures/corpus/oss-repos/express/test/req.protocol.js +0 -113
  400. package/tests/fixtures/corpus/oss-repos/express/test/req.query.js +0 -106
  401. package/tests/fixtures/corpus/oss-repos/express/test/req.range.js +0 -104
  402. package/tests/fixtures/corpus/oss-repos/express/test/req.route.js +0 -28
  403. package/tests/fixtures/corpus/oss-repos/express/test/req.secure.js +0 -101
  404. package/tests/fixtures/corpus/oss-repos/express/test/req.signedCookies.js +0 -37
  405. package/tests/fixtures/corpus/oss-repos/express/test/req.stale.js +0 -50
  406. package/tests/fixtures/corpus/oss-repos/express/test/req.subdomains.js +0 -173
  407. package/tests/fixtures/corpus/oss-repos/express/test/req.xhr.js +0 -42
  408. package/tests/fixtures/corpus/oss-repos/express/test/res.append.js +0 -116
  409. package/tests/fixtures/corpus/oss-repos/express/test/res.attachment.js +0 -79
  410. package/tests/fixtures/corpus/oss-repos/express/test/res.clearCookie.js +0 -62
  411. package/tests/fixtures/corpus/oss-repos/express/test/res.cookie.js +0 -295
  412. package/tests/fixtures/corpus/oss-repos/express/test/res.download.js +0 -487
  413. package/tests/fixtures/corpus/oss-repos/express/test/res.format.js +0 -248
  414. package/tests/fixtures/corpus/oss-repos/express/test/res.get.js +0 -21
  415. package/tests/fixtures/corpus/oss-repos/express/test/res.json.js +0 -186
  416. package/tests/fixtures/corpus/oss-repos/express/test/res.jsonp.js +0 -344
  417. package/tests/fixtures/corpus/oss-repos/express/test/res.links.js +0 -65
  418. package/tests/fixtures/corpus/oss-repos/express/test/res.locals.js +0 -40
  419. package/tests/fixtures/corpus/oss-repos/express/test/res.location.js +0 -316
  420. package/tests/fixtures/corpus/oss-repos/express/test/res.redirect.js +0 -214
  421. package/tests/fixtures/corpus/oss-repos/express/test/res.render.js +0 -367
  422. package/tests/fixtures/corpus/oss-repos/express/test/res.send.js +0 -569
  423. package/tests/fixtures/corpus/oss-repos/express/test/res.sendFile.js +0 -913
  424. package/tests/fixtures/corpus/oss-repos/express/test/res.sendStatus.js +0 -44
  425. package/tests/fixtures/corpus/oss-repos/express/test/res.set.js +0 -124
  426. package/tests/fixtures/corpus/oss-repos/express/test/res.status.js +0 -206
  427. package/tests/fixtures/corpus/oss-repos/express/test/res.type.js +0 -46
  428. package/tests/fixtures/corpus/oss-repos/express/test/res.vary.js +0 -90
  429. package/tests/fixtures/corpus/oss-repos/express/test/support/env.js +0 -3
  430. package/tests/fixtures/corpus/oss-repos/express/test/support/tmpl.js +0 -36
  431. package/tests/fixtures/corpus/oss-repos/express/test/support/utils.js +0 -86
  432. package/tests/fixtures/corpus/oss-repos/express/test/utils.js +0 -83
  433. package/tests/fixtures/corpus/oss-repos/hono/.devcontainer/Dockerfile +0 -11
  434. package/tests/fixtures/corpus/oss-repos/hono/.devcontainer/devcontainer.json +0 -21
  435. package/tests/fixtures/corpus/oss-repos/hono/.devcontainer/docker-compose.yml +0 -18
  436. package/tests/fixtures/corpus/oss-repos/hono/.eslintignore +0 -1
  437. package/tests/fixtures/corpus/oss-repos/hono/.eslintrc.cjs +0 -9
  438. package/tests/fixtures/corpus/oss-repos/hono/.gitpod.yml +0 -9
  439. package/tests/fixtures/corpus/oss-repos/hono/.prettierrc +0 -9
  440. package/tests/fixtures/corpus/oss-repos/hono/.vitest.config/jsx-runtime-default.ts +0 -15
  441. package/tests/fixtures/corpus/oss-repos/hono/.vitest.config/jsx-runtime-dom.ts +0 -15
  442. package/tests/fixtures/corpus/oss-repos/hono/.vitest.config/setup-vitest.ts +0 -47
  443. package/tests/fixtures/corpus/oss-repos/hono/LICENSE +0 -21
  444. package/tests/fixtures/corpus/oss-repos/hono/README.md +0 -91
  445. package/tests/fixtures/corpus/oss-repos/hono/build.ts +0 -80
  446. package/tests/fixtures/corpus/oss-repos/hono/bun.lockb +0 -0
  447. package/tests/fixtures/corpus/oss-repos/hono/bunfig.toml +0 -7
  448. package/tests/fixtures/corpus/oss-repos/hono/codecov.yml +0 -13
  449. package/tests/fixtures/corpus/oss-repos/hono/docs/CODE_OF_CONDUCT.md +0 -128
  450. package/tests/fixtures/corpus/oss-repos/hono/docs/CONTRIBUTING.md +0 -62
  451. package/tests/fixtures/corpus/oss-repos/hono/docs/MIGRATION.md +0 -295
  452. package/tests/fixtures/corpus/oss-repos/hono/docs/images/hono-logo.png +0 -0
  453. package/tests/fixtures/corpus/oss-repos/hono/docs/images/hono-logo.pxm +0 -0
  454. package/tests/fixtures/corpus/oss-repos/hono/docs/images/hono-logo.svg +0 -6
  455. package/tests/fixtures/corpus/oss-repos/hono/docs/images/hono-title.png +0 -0
  456. package/tests/fixtures/corpus/oss-repos/hono/docs/images/hono-title.pxm +0 -0
  457. package/tests/fixtures/corpus/oss-repos/hono/jsr.json +0 -119
  458. package/tests/fixtures/corpus/oss-repos/hono/package.cjs.json +0 -3
  459. package/tests/fixtures/corpus/oss-repos/hono/package.json +0 -650
  460. package/tests/fixtures/corpus/oss-repos/hono/src/adapter/aws-lambda/handler.ts +0 -492
  461. package/tests/fixtures/corpus/oss-repos/hono/src/adapter/aws-lambda/index.ts +0 -13
  462. package/tests/fixtures/corpus/oss-repos/hono/src/adapter/aws-lambda/types.ts +0 -144
  463. package/tests/fixtures/corpus/oss-repos/hono/src/adapter/bun/conninfo.ts +0 -28
  464. package/tests/fixtures/corpus/oss-repos/hono/src/adapter/bun/index.ts +0 -9
  465. package/tests/fixtures/corpus/oss-repos/hono/src/adapter/bun/serve-static.ts +0 -35
  466. package/tests/fixtures/corpus/oss-repos/hono/src/adapter/bun/server.ts +0 -30
  467. package/tests/fixtures/corpus/oss-repos/hono/src/adapter/bun/ssg.ts +0 -27
  468. package/tests/fixtures/corpus/oss-repos/hono/src/adapter/bun/websocket.ts +0 -110
  469. package/tests/fixtures/corpus/oss-repos/hono/src/adapter/cloudflare-pages/handler.ts +0 -120
  470. package/tests/fixtures/corpus/oss-repos/hono/src/adapter/cloudflare-pages/index.ts +0 -7
  471. package/tests/fixtures/corpus/oss-repos/hono/src/adapter/cloudflare-workers/conninfo.ts +0 -7
  472. package/tests/fixtures/corpus/oss-repos/hono/src/adapter/cloudflare-workers/index.ts +0 -8
  473. package/tests/fixtures/corpus/oss-repos/hono/src/adapter/cloudflare-workers/serve-static-module.ts +0 -12
  474. package/tests/fixtures/corpus/oss-repos/hono/src/adapter/cloudflare-workers/serve-static.ts +0 -39
  475. package/tests/fixtures/corpus/oss-repos/hono/src/adapter/cloudflare-workers/utils.ts +0 -50
  476. package/tests/fixtures/corpus/oss-repos/hono/src/adapter/cloudflare-workers/websocket.ts +0 -50
  477. package/tests/fixtures/corpus/oss-repos/hono/src/adapter/deno/conninfo.ts +0 -17
  478. package/tests/fixtures/corpus/oss-repos/hono/src/adapter/deno/deno.d.ts +0 -28
  479. package/tests/fixtures/corpus/oss-repos/hono/src/adapter/deno/index.ts +0 -9
  480. package/tests/fixtures/corpus/oss-repos/hono/src/adapter/deno/serve-static.ts +0 -40
  481. package/tests/fixtures/corpus/oss-repos/hono/src/adapter/deno/ssg.ts +0 -27
  482. package/tests/fixtures/corpus/oss-repos/hono/src/adapter/deno/websocket.ts +0 -51
  483. package/tests/fixtures/corpus/oss-repos/hono/src/adapter/lambda-edge/conninfo.ts +0 -15
  484. package/tests/fixtures/corpus/oss-repos/hono/src/adapter/lambda-edge/handler.ts +0 -189
  485. package/tests/fixtures/corpus/oss-repos/hono/src/adapter/lambda-edge/index.ts +0 -14
  486. package/tests/fixtures/corpus/oss-repos/hono/src/adapter/netlify/handler.ts +0 -10
  487. package/tests/fixtures/corpus/oss-repos/hono/src/adapter/netlify/index.ts +0 -6
  488. package/tests/fixtures/corpus/oss-repos/hono/src/adapter/netlify/mod.ts +0 -1
  489. package/tests/fixtures/corpus/oss-repos/hono/src/adapter/service-worker/handler.ts +0 -34
  490. package/tests/fixtures/corpus/oss-repos/hono/src/adapter/service-worker/index.ts +0 -5
  491. package/tests/fixtures/corpus/oss-repos/hono/src/adapter/service-worker/types.ts +0 -14
  492. package/tests/fixtures/corpus/oss-repos/hono/src/adapter/vercel/conninfo.ts +0 -8
  493. package/tests/fixtures/corpus/oss-repos/hono/src/adapter/vercel/handler.ts +0 -9
  494. package/tests/fixtures/corpus/oss-repos/hono/src/adapter/vercel/index.ts +0 -7
  495. package/tests/fixtures/corpus/oss-repos/hono/src/client/client.ts +0 -214
  496. package/tests/fixtures/corpus/oss-repos/hono/src/client/index.ts +0 -14
  497. package/tests/fixtures/corpus/oss-repos/hono/src/client/types.ts +0 -182
  498. package/tests/fixtures/corpus/oss-repos/hono/src/client/utils.ts +0 -54
  499. package/tests/fixtures/corpus/oss-repos/hono/src/compose.ts +0 -94
  500. package/tests/fixtures/corpus/oss-repos/hono/src/context.ts +0 -917
  501. package/tests/fixtures/corpus/oss-repos/hono/src/helper/accepts/accepts.ts +0 -84
  502. package/tests/fixtures/corpus/oss-repos/hono/src/helper/accepts/index.ts +0 -6
  503. package/tests/fixtures/corpus/oss-repos/hono/src/helper/adapter/index.ts +0 -85
  504. package/tests/fixtures/corpus/oss-repos/hono/src/helper/conninfo/index.ts +0 -6
  505. package/tests/fixtures/corpus/oss-repos/hono/src/helper/conninfo/types.ts +0 -45
  506. package/tests/fixtures/corpus/oss-repos/hono/src/helper/cookie/index.ts +0 -130
  507. package/tests/fixtures/corpus/oss-repos/hono/src/helper/css/common.ts +0 -243
  508. package/tests/fixtures/corpus/oss-repos/hono/src/helper/css/index.ts +0 -220
  509. package/tests/fixtures/corpus/oss-repos/hono/src/helper/dev/index.ts +0 -79
  510. package/tests/fixtures/corpus/oss-repos/hono/src/helper/factory/index.ts +0 -246
  511. package/tests/fixtures/corpus/oss-repos/hono/src/helper/html/index.ts +0 -56
  512. package/tests/fixtures/corpus/oss-repos/hono/src/helper/ssg/index.ts +0 -13
  513. package/tests/fixtures/corpus/oss-repos/hono/src/helper/ssg/middleware.ts +0 -79
  514. package/tests/fixtures/corpus/oss-repos/hono/src/helper/ssg/ssg.ts +0 -388
  515. package/tests/fixtures/corpus/oss-repos/hono/src/helper/ssg/utils.ts +0 -71
  516. package/tests/fixtures/corpus/oss-repos/hono/src/helper/streaming/index.ts +0 -9
  517. package/tests/fixtures/corpus/oss-repos/hono/src/helper/streaming/sse.ts +0 -89
  518. package/tests/fixtures/corpus/oss-repos/hono/src/helper/streaming/stream.ts +0 -36
  519. package/tests/fixtures/corpus/oss-repos/hono/src/helper/streaming/text.ts +0 -15
  520. package/tests/fixtures/corpus/oss-repos/hono/src/helper/testing/index.ts +0 -26
  521. package/tests/fixtures/corpus/oss-repos/hono/src/helper/websocket/index.ts +0 -57
  522. package/tests/fixtures/corpus/oss-repos/hono/src/hono-base.ts +0 -523
  523. package/tests/fixtures/corpus/oss-repos/hono/src/hono.ts +0 -34
  524. package/tests/fixtures/corpus/oss-repos/hono/src/http-exception.ts +0 -78
  525. package/tests/fixtures/corpus/oss-repos/hono/src/index.ts +0 -51
  526. package/tests/fixtures/corpus/oss-repos/hono/src/jsx/base.ts +0 -419
  527. package/tests/fixtures/corpus/oss-repos/hono/src/jsx/children.ts +0 -20
  528. package/tests/fixtures/corpus/oss-repos/hono/src/jsx/components.ts +0 -195
  529. package/tests/fixtures/corpus/oss-repos/hono/src/jsx/constants.ts +0 -5
  530. package/tests/fixtures/corpus/oss-repos/hono/src/jsx/context.ts +0 -50
  531. package/tests/fixtures/corpus/oss-repos/hono/src/jsx/dom/client.ts +0 -89
  532. package/tests/fixtures/corpus/oss-repos/hono/src/jsx/dom/components.ts +0 -39
  533. package/tests/fixtures/corpus/oss-repos/hono/src/jsx/dom/context.ts +0 -52
  534. package/tests/fixtures/corpus/oss-repos/hono/src/jsx/dom/css.ts +0 -246
  535. package/tests/fixtures/corpus/oss-repos/hono/src/jsx/dom/hooks/index.ts +0 -91
  536. package/tests/fixtures/corpus/oss-repos/hono/src/jsx/dom/index.ts +0 -159
  537. package/tests/fixtures/corpus/oss-repos/hono/src/jsx/dom/intrinsic-element/components.ts +0 -398
  538. package/tests/fixtures/corpus/oss-repos/hono/src/jsx/dom/jsx-dev-runtime.ts +0 -22
  539. package/tests/fixtures/corpus/oss-repos/hono/src/jsx/dom/jsx-runtime.ts +0 -7
  540. package/tests/fixtures/corpus/oss-repos/hono/src/jsx/dom/render.ts +0 -772
  541. package/tests/fixtures/corpus/oss-repos/hono/src/jsx/dom/server.ts +0 -70
  542. package/tests/fixtures/corpus/oss-repos/hono/src/jsx/dom/utils.ts +0 -7
  543. package/tests/fixtures/corpus/oss-repos/hono/src/jsx/hooks/index.ts +0 -426
  544. package/tests/fixtures/corpus/oss-repos/hono/src/jsx/index.ts +0 -114
  545. package/tests/fixtures/corpus/oss-repos/hono/src/jsx/intrinsic-element/common.ts +0 -11
  546. package/tests/fixtures/corpus/oss-repos/hono/src/jsx/intrinsic-element/components.ts +0 -196
  547. package/tests/fixtures/corpus/oss-repos/hono/src/jsx/intrinsic-elements.ts +0 -924
  548. package/tests/fixtures/corpus/oss-repos/hono/src/jsx/jsx-dev-runtime.ts +0 -26
  549. package/tests/fixtures/corpus/oss-repos/hono/src/jsx/jsx-runtime.ts +0 -18
  550. package/tests/fixtures/corpus/oss-repos/hono/src/jsx/streaming.ts +0 -184
  551. package/tests/fixtures/corpus/oss-repos/hono/src/jsx/types.ts +0 -41
  552. package/tests/fixtures/corpus/oss-repos/hono/src/jsx/utils.ts +0 -36
  553. package/tests/fixtures/corpus/oss-repos/hono/src/middleware/basic-auth/index.ts +0 -128
  554. package/tests/fixtures/corpus/oss-repos/hono/src/middleware/bearer-auth/index.ts +0 -159
  555. package/tests/fixtures/corpus/oss-repos/hono/src/middleware/body-limit/index.ts +0 -115
  556. package/tests/fixtures/corpus/oss-repos/hono/src/middleware/cache/index.ts +0 -127
  557. package/tests/fixtures/corpus/oss-repos/hono/src/middleware/combine/index.ts +0 -153
  558. package/tests/fixtures/corpus/oss-repos/hono/src/middleware/compress/index.ts +0 -79
  559. package/tests/fixtures/corpus/oss-repos/hono/src/middleware/context-storage/index.ts +0 -55
  560. package/tests/fixtures/corpus/oss-repos/hono/src/middleware/cors/index.ts +0 -141
  561. package/tests/fixtures/corpus/oss-repos/hono/src/middleware/csrf/index.ts +0 -90
  562. package/tests/fixtures/corpus/oss-repos/hono/src/middleware/etag/index.ts +0 -88
  563. package/tests/fixtures/corpus/oss-repos/hono/src/middleware/ip-restriction/index.ts +0 -178
  564. package/tests/fixtures/corpus/oss-repos/hono/src/middleware/jsx-renderer/index.ts +0 -158
  565. package/tests/fixtures/corpus/oss-repos/hono/src/middleware/jwt/index.ts +0 -8
  566. package/tests/fixtures/corpus/oss-repos/hono/src/middleware/jwt/jwt.ts +0 -159
  567. package/tests/fixtures/corpus/oss-repos/hono/src/middleware/logger/index.ts +0 -93
  568. package/tests/fixtures/corpus/oss-repos/hono/src/middleware/method-override/index.ts +0 -146
  569. package/tests/fixtures/corpus/oss-repos/hono/src/middleware/powered-by/index.ts +0 -13
  570. package/tests/fixtures/corpus/oss-repos/hono/src/middleware/pretty-json/index.ts +0 -50
  571. package/tests/fixtures/corpus/oss-repos/hono/src/middleware/request-id/index.ts +0 -8
  572. package/tests/fixtures/corpus/oss-repos/hono/src/middleware/request-id/request-id.ts +0 -59
  573. package/tests/fixtures/corpus/oss-repos/hono/src/middleware/secure-headers/index.ts +0 -8
  574. package/tests/fixtures/corpus/oss-repos/hono/src/middleware/secure-headers/permissions-policy.ts +0 -86
  575. package/tests/fixtures/corpus/oss-repos/hono/src/middleware/secure-headers/secure-headers.ts +0 -319
  576. package/tests/fixtures/corpus/oss-repos/hono/src/middleware/serve-static/index.ts +0 -140
  577. package/tests/fixtures/corpus/oss-repos/hono/src/middleware/timeout/index.ts +0 -58
  578. package/tests/fixtures/corpus/oss-repos/hono/src/middleware/timing/index.ts +0 -7
  579. package/tests/fixtures/corpus/oss-repos/hono/src/middleware/timing/timing.ts +0 -225
  580. package/tests/fixtures/corpus/oss-repos/hono/src/middleware/trailing-slash/index.ts +0 -71
  581. package/tests/fixtures/corpus/oss-repos/hono/src/preset/quick.ts +0 -24
  582. package/tests/fixtures/corpus/oss-repos/hono/src/preset/tiny.ts +0 -20
  583. package/tests/fixtures/corpus/oss-repos/hono/src/request.ts +0 -403
  584. package/tests/fixtures/corpus/oss-repos/hono/src/router/linear-router/index.ts +0 -6
  585. package/tests/fixtures/corpus/oss-repos/hono/src/router/linear-router/router.ts +0 -132
  586. package/tests/fixtures/corpus/oss-repos/hono/src/router/pattern-router/index.ts +0 -6
  587. package/tests/fixtures/corpus/oss-repos/hono/src/router/pattern-router/router.ts +0 -54
  588. package/tests/fixtures/corpus/oss-repos/hono/src/router/reg-exp-router/index.ts +0 -6
  589. package/tests/fixtures/corpus/oss-repos/hono/src/router/reg-exp-router/node.ts +0 -159
  590. package/tests/fixtures/corpus/oss-repos/hono/src/router/reg-exp-router/router.ts +0 -274
  591. package/tests/fixtures/corpus/oss-repos/hono/src/router/reg-exp-router/trie.ts +0 -74
  592. package/tests/fixtures/corpus/oss-repos/hono/src/router/smart-router/index.ts +0 -6
  593. package/tests/fixtures/corpus/oss-repos/hono/src/router/smart-router/router.ts +0 -69
  594. package/tests/fixtures/corpus/oss-repos/hono/src/router/trie-router/index.ts +0 -6
  595. package/tests/fixtures/corpus/oss-repos/hono/src/router/trie-router/node.ts +0 -205
  596. package/tests/fixtures/corpus/oss-repos/hono/src/router/trie-router/router.ts +0 -28
  597. package/tests/fixtures/corpus/oss-repos/hono/src/router.ts +0 -103
  598. package/tests/fixtures/corpus/oss-repos/hono/src/types.ts +0 -2006
  599. package/tests/fixtures/corpus/oss-repos/hono/src/utils/basic-auth.ts +0 -26
  600. package/tests/fixtures/corpus/oss-repos/hono/src/utils/body.ts +0 -225
  601. package/tests/fixtures/corpus/oss-repos/hono/src/utils/buffer.ts +0 -65
  602. package/tests/fixtures/corpus/oss-repos/hono/src/utils/color.ts +0 -26
  603. package/tests/fixtures/corpus/oss-repos/hono/src/utils/concurrent.ts +0 -55
  604. package/tests/fixtures/corpus/oss-repos/hono/src/utils/cookie.ts +0 -230
  605. package/tests/fixtures/corpus/oss-repos/hono/src/utils/crypto.ts +0 -65
  606. package/tests/fixtures/corpus/oss-repos/hono/src/utils/encode.ts +0 -34
  607. package/tests/fixtures/corpus/oss-repos/hono/src/utils/filepath.ts +0 -56
  608. package/tests/fixtures/corpus/oss-repos/hono/src/utils/handler.ts +0 -15
  609. package/tests/fixtures/corpus/oss-repos/hono/src/utils/html.ts +0 -182
  610. package/tests/fixtures/corpus/oss-repos/hono/src/utils/http-status.ts +0 -69
  611. package/tests/fixtures/corpus/oss-repos/hono/src/utils/ipaddr.ts +0 -113
  612. package/tests/fixtures/corpus/oss-repos/hono/src/utils/jwt/index.ts +0 -7
  613. package/tests/fixtures/corpus/oss-repos/hono/src/utils/jwt/jwa.ts +0 -23
  614. package/tests/fixtures/corpus/oss-repos/hono/src/utils/jwt/jws.ts +0 -226
  615. package/tests/fixtures/corpus/oss-repos/hono/src/utils/jwt/jwt.ts +0 -114
  616. package/tests/fixtures/corpus/oss-repos/hono/src/utils/jwt/types.ts +0 -83
  617. package/tests/fixtures/corpus/oss-repos/hono/src/utils/jwt/utf8.ts +0 -7
  618. package/tests/fixtures/corpus/oss-repos/hono/src/utils/mime.ts +0 -142
  619. package/tests/fixtures/corpus/oss-repos/hono/src/utils/stream.ts +0 -96
  620. package/tests/fixtures/corpus/oss-repos/hono/src/utils/types.ts +0 -102
  621. package/tests/fixtures/corpus/oss-repos/hono/src/utils/url.ts +0 -310
  622. package/tests/fixtures/corpus/oss-repos/hono/src/validator/index.ts +0 -7
  623. package/tests/fixtures/corpus/oss-repos/hono/src/validator/validator.ts +0 -151
  624. package/tests/fixtures/corpus/oss-repos/hono/tsconfig.build.json +0 -23
  625. package/tests/fixtures/corpus/oss-repos/hono/tsconfig.json +0 -28
  626. package/tests/fixtures/corpus/oss-repos/hono/vitest.config.ts +0 -34
  627. package/tests/fixtures/corpus/oss-repos/hono/yarn.lock +0 -6232
  628. package/tests/fixtures/documentation/api-reference.md +0 -412
  629. package/tests/fixtures/documentation/architecture.md +0 -214
  630. package/tests/fixtures/documentation/deployment-guide.md +0 -420
  631. package/tests/fixtures/github-readmes/express.md +0 -133
  632. package/tests/fixtures/github-readmes/nextjs.md +0 -106
  633. package/tests/fixtures/github-readmes/react.md +0 -74
  634. package/tests/fixtures/github-readmes/typescript.md +0 -93
  635. package/tests/fixtures/github-readmes/vite.md +0 -79
  636. package/tests/fixtures/queries/core.json +0 -125
  637. package/tests/fixtures/queries/extended.json +0 -427
  638. package/tests/fixtures/queries/generated/.gitkeep +0 -0
  639. package/tests/fixtures/test-server.ts +0 -268
  640. package/tests/helpers/performance-metrics.ts +0 -370
  641. package/tests/helpers/search-relevance.ts +0 -326
  642. package/tests/integration/cli-consistency.test.ts +0 -298
  643. package/tests/integration/cli.test.ts +0 -69
  644. package/tests/integration/e2e-workflow.test.ts +0 -614
  645. package/tests/integration/mcp.test.ts +0 -250
  646. package/tests/integration/python-bridge.test.ts +0 -193
  647. package/tests/integration/search-quality.test.ts +0 -720
  648. package/tests/integration/serve.test.ts +0 -260
  649. package/tests/integration/stress.test.ts +0 -326
  650. package/tests/mcp/server.test.ts +0 -15
  651. package/tests/scripts/schemas/evaluation.json +0 -44
  652. package/tests/scripts/schemas/query-generation.json +0 -21
  653. package/tests/services/code-unit.service.test.ts +0 -95
  654. package/tests/services/job.service.test.ts +0 -124
  655. package/tests/services/search.progressive-context.test.ts +0 -35
  656. package/tsconfig.json +0 -34
  657. package/tsup.config.ts +0 -15
  658. package/turndown-plugin-gfm.d.ts +0 -29
  659. package/vitest.config.ts +0 -90
@@ -1,5 +1,5 @@
1
1
  {
2
2
  "name": "bluera-knowledge",
3
- "version": "0.14.0",
3
+ "version": "0.14.2",
4
4
  "description": "Clone repos, crawl docs, search locally. Fast, authoritative answers for AI coding agents."
5
5
  }
package/CHANGELOG.md CHANGED
@@ -2,6 +2,20 @@
2
2
 
3
3
  All notable changes to this project will be documented in this file. See [commit-and-tag-version](https://github.com/absolute-version/commit-and-tag-version) for commit guidelines.
4
4
 
5
+ ## [0.14.2](https://github.com/blueraai/bluera-knowledge/compare/v0.14.1...v0.14.2) (2026-01-16)
6
+
7
+
8
+ ### Bug Fixes
9
+
10
+ * **crawl:** detect Claude CLI at ~/.claude/local/claude ([cf490e2](https://github.com/blueraai/bluera-knowledge/commit/cf490e2af2524352b108aa19c313faecf0abafac))
11
+
12
+ ## [0.14.1](https://github.com/blueraai/bluera-knowledge/compare/v0.14.0...v0.14.1) (2026-01-16)
13
+
14
+
15
+ ### Bug Fixes
16
+
17
+ * **npm:** add files field to prevent 389MB package bloat ([fa55313](https://github.com/blueraai/bluera-knowledge/commit/fa55313050381b8f2c51cb89d42f66c30adfe58c))
18
+
5
19
  ## [0.14.0](https://github.com/blueraai/bluera-knowledge/compare/v0.13.3...v0.14.0) (2026-01-16)
6
20
 
7
21
 
@@ -217,6 +217,9 @@ ${hashes} ${cleanContent}
217
217
 
218
218
  // src/crawl/claude-client.ts
219
219
  import { spawn, execSync } from "child_process";
220
+ import { existsSync } from "fs";
221
+ import { homedir } from "os";
222
+ import { join } from "path";
220
223
  var CRAWL_STRATEGY_SCHEMA = {
221
224
  type: "object",
222
225
  properties: {
@@ -236,22 +239,56 @@ var ClaudeClient = class _ClaudeClient {
236
239
  timeout;
237
240
  static availabilityChecked = false;
238
241
  static available = false;
242
+ static claudePath = null;
239
243
  /**
240
- * Check if Claude CLI is available in PATH
244
+ * Get the path to the Claude CLI binary
245
+ * Checks in order:
246
+ * 1. CLAUDE_BIN environment variable (explicit override)
247
+ * 2. ~/.claude/local/claude (newer installation location)
248
+ * 3. ~/.local/bin/claude (standard installation location)
249
+ * 4. 'claude' in PATH (custom installations)
250
+ */
251
+ static getClaudePath() {
252
+ const envPath = process.env["CLAUDE_BIN"];
253
+ if (envPath !== void 0 && envPath !== "" && existsSync(envPath)) {
254
+ return envPath;
255
+ }
256
+ const claudeLocalPath = join(homedir(), ".claude", "local", "claude");
257
+ if (existsSync(claudeLocalPath)) {
258
+ return claudeLocalPath;
259
+ }
260
+ const localBinPath = join(homedir(), ".local", "bin", "claude");
261
+ if (existsSync(localBinPath)) {
262
+ return localBinPath;
263
+ }
264
+ try {
265
+ const result = execSync("command -v claude", { stdio: ["pipe", "pipe", "ignore"] });
266
+ const path = result.toString().trim();
267
+ if (path) {
268
+ return path;
269
+ }
270
+ } catch {
271
+ }
272
+ return null;
273
+ }
274
+ /**
275
+ * Check if Claude CLI is available
241
276
  * Result is cached after first check for performance
242
277
  */
243
278
  static isAvailable() {
244
279
  if (!_ClaudeClient.availabilityChecked) {
245
- try {
246
- execSync("which claude", { stdio: "ignore" });
247
- _ClaudeClient.available = true;
248
- } catch {
249
- _ClaudeClient.available = false;
250
- }
280
+ _ClaudeClient.claudePath = _ClaudeClient.getClaudePath();
281
+ _ClaudeClient.available = _ClaudeClient.claudePath !== null;
251
282
  _ClaudeClient.availabilityChecked = true;
252
283
  }
253
284
  return _ClaudeClient.available;
254
285
  }
286
+ /**
287
+ * Get the cached Claude path (call isAvailable first)
288
+ */
289
+ static getCachedPath() {
290
+ return _ClaudeClient.claudePath;
291
+ }
255
292
  /**
256
293
  * Reset availability cache (for testing)
257
294
  */
@@ -329,12 +366,17 @@ ${this.truncateMarkdown(markdown, 1e5)}`;
329
366
  */
330
367
  async callClaude(prompt, jsonSchema) {
331
368
  return new Promise((resolve, reject) => {
369
+ const claudePath = _ClaudeClient.getCachedPath();
370
+ if (claudePath === null) {
371
+ reject(new Error("Claude CLI not available"));
372
+ return;
373
+ }
332
374
  const args = ["-p"];
333
375
  if (jsonSchema) {
334
376
  args.push("--json-schema", JSON.stringify(jsonSchema));
335
377
  args.push("--output-format", "json");
336
378
  }
337
- const proc = spawn("claude", args, {
379
+ const proc = spawn(claudePath, args, {
338
380
  stdio: ["pipe", "pipe", "pipe"],
339
381
  cwd: process.cwd(),
340
382
  env: { ...process.env }
@@ -753,4 +795,4 @@ var IntelligentCrawler = class extends EventEmitter {
753
795
  export {
754
796
  IntelligentCrawler
755
797
  };
756
- //# sourceMappingURL=chunk-AIS5S77C.js.map
798
+ //# sourceMappingURL=chunk-X7E4RYJE.js.map
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/crawl/intelligent-crawler.ts","../src/crawl/article-converter.ts","../src/crawl/markdown-utils.ts","../src/crawl/claude-client.ts"],"sourcesContent":["/**\n * Intelligent web crawler with natural language control\n * Two modes: Intelligent (Claude-driven) and Simple (BFS)\n */\n\nimport { EventEmitter } from 'node:events';\nimport axios from 'axios';\nimport { convertHtmlToMarkdown } from './article-converter.js';\nimport { PythonBridge, type CrawledLink } from './bridge.js';\nimport { ClaudeClient, type CrawlStrategy } from './claude-client.js';\nimport { createLogger, summarizePayload } from '../logging/index.js';\n\nconst logger = createLogger('crawler');\n\nexport interface CrawlOptions {\n crawlInstruction?: string; // Natural language: what to crawl\n extractInstruction?: string; // Natural language: what to extract\n maxPages?: number; // Max pages to crawl (default: 50)\n timeout?: number; // Per-page timeout in ms (default: 30000)\n simple?: boolean; // Force simple BFS mode\n useHeadless?: boolean; // Enable headless browser for JavaScript-rendered sites\n}\n\nexport interface CrawlResult {\n url: string;\n title?: string;\n markdown: string;\n extracted?: string;\n depth?: number;\n}\n\nexport interface CrawlProgress {\n type: 'start' | 'strategy' | 'page' | 'extraction' | 'complete' | 'error';\n pagesVisited: number;\n totalPages: number;\n currentUrl?: string;\n message?: string;\n error?: Error;\n}\n\n/**\n * Intelligent crawler that uses Claude CLI for strategy and extraction\n */\nexport class IntelligentCrawler extends EventEmitter {\n private readonly claudeClient: ClaudeClient;\n private readonly pythonBridge: PythonBridge;\n private readonly visited: Set<string>;\n private stopped: boolean;\n\n constructor() {\n super();\n this.claudeClient = new ClaudeClient();\n this.pythonBridge = new PythonBridge();\n this.visited = new Set();\n this.stopped = false;\n }\n\n /**\n * Crawl a website with intelligent or simple mode\n */\n async *crawl(seedUrl: string, options: CrawlOptions = {}): AsyncIterable<CrawlResult> {\n const { crawlInstruction, extractInstruction, maxPages = 50, simple = false } = options;\n\n this.visited.clear();\n this.stopped = false;\n\n logger.info(\n {\n seedUrl,\n maxPages,\n mode: simple\n ? 'simple'\n : crawlInstruction !== undefined && crawlInstruction !== ''\n ? 'intelligent'\n : 'simple',\n hasExtractInstruction: extractInstruction !== undefined,\n },\n 'Starting crawl'\n );\n\n const startProgress: CrawlProgress = {\n type: 'start',\n pagesVisited: 0,\n totalPages: maxPages,\n };\n this.emit('progress', startProgress);\n\n // Determine mode: intelligent (with crawl instruction) or simple (BFS)\n const useIntelligentMode = !simple && crawlInstruction !== undefined && crawlInstruction !== '';\n\n if (useIntelligentMode) {\n // TypeScript knows crawlInstruction is defined here due to useIntelligentMode check\n yield* this.crawlIntelligent(\n seedUrl,\n crawlInstruction,\n extractInstruction,\n maxPages,\n options.useHeadless ?? false\n );\n } else {\n yield* this.crawlSimple(seedUrl, extractInstruction, maxPages, options.useHeadless ?? false);\n }\n\n logger.info(\n {\n seedUrl,\n pagesVisited: this.visited.size,\n },\n 'Crawl complete'\n );\n\n // Warn if crawl discovered far fewer pages than requested\n if (this.visited.size === 1 && maxPages > 1) {\n const warningProgress: CrawlProgress = {\n type: 'error',\n pagesVisited: this.visited.size,\n totalPages: maxPages,\n message: `Warning: Only crawled 1 page despite maxPages=${String(maxPages)}. Link discovery may have failed. If using --fast mode, try without it for JavaScript-heavy sites.`,\n error: new Error('Low page discovery'),\n };\n this.emit('progress', warningProgress);\n }\n\n const completeProgress: CrawlProgress = {\n type: 'complete',\n pagesVisited: this.visited.size,\n totalPages: this.visited.size,\n };\n this.emit('progress', completeProgress);\n }\n\n /**\n * Intelligent mode: Use Claude to determine which URLs to crawl\n */\n private async *crawlIntelligent(\n seedUrl: string,\n crawlInstruction: string,\n extractInstruction: string | undefined,\n maxPages: number,\n useHeadless: boolean = false\n ): AsyncIterable<CrawlResult> {\n // Check if Claude CLI is available before attempting intelligent mode\n if (!ClaudeClient.isAvailable()) {\n throw new Error('Claude CLI not available: install Claude Code for intelligent crawling');\n }\n\n let strategy: CrawlStrategy;\n\n try {\n // Step 1: Fetch seed page HTML\n const strategyStartProgress: CrawlProgress = {\n type: 'strategy',\n pagesVisited: 0,\n totalPages: maxPages,\n currentUrl: seedUrl,\n message: 'Analyzing page structure with Claude...',\n };\n this.emit('progress', strategyStartProgress);\n\n const seedHtml = await this.fetchHtml(seedUrl, useHeadless);\n\n // Step 2: Ask Claude which URLs to crawl (pass seedUrl for relative URL resolution)\n strategy = await this.claudeClient.determineCrawlUrls(seedUrl, seedHtml, crawlInstruction);\n\n const strategyCompleteProgress: CrawlProgress = {\n type: 'strategy',\n pagesVisited: 0,\n totalPages: maxPages,\n message: `Claude identified ${String(strategy.urls.length)} URLs to crawl: ${strategy.reasoning}`,\n };\n this.emit('progress', strategyCompleteProgress);\n } catch (error) {\n // Re-throw strategy errors - do not fall back silently\n throw error instanceof Error ? error : new Error(String(error));\n }\n\n // Step 3: Crawl each URL from Claude's strategy\n let pagesVisited = 0;\n\n for (const url of strategy.urls) {\n if (this.stopped || pagesVisited >= maxPages) break;\n if (this.visited.has(url)) continue;\n\n try {\n const result = await this.crawlSinglePage(\n url,\n extractInstruction,\n pagesVisited,\n useHeadless\n );\n pagesVisited++;\n yield result;\n } catch (error) {\n const pageErrorProgress: CrawlProgress = {\n type: 'error',\n pagesVisited,\n totalPages: maxPages,\n currentUrl: url,\n error: error instanceof Error ? error : new Error(String(error)),\n };\n this.emit('progress', pageErrorProgress);\n }\n }\n }\n\n /**\n * Simple mode: BFS crawling with depth limit\n */\n private async *crawlSimple(\n seedUrl: string,\n extractInstruction: string | undefined,\n maxPages: number,\n useHeadless: boolean = false\n ): AsyncIterable<CrawlResult> {\n const queue: Array<{ url: string; depth: number }> = [{ url: seedUrl, depth: 0 }];\n const maxDepth = 2; // Default depth limit for simple mode\n let pagesVisited = 0;\n\n while (queue.length > 0 && pagesVisited < maxPages && !this.stopped) {\n const current = queue.shift();\n\n if (!current || this.visited.has(current.url) || current.depth > maxDepth) {\n continue;\n }\n\n try {\n const result = await this.crawlSinglePage(\n current.url,\n extractInstruction,\n pagesVisited,\n useHeadless\n );\n result.depth = current.depth;\n pagesVisited++;\n\n yield result;\n\n // Add links to queue if we haven't reached max depth\n if (current.depth < maxDepth) {\n try {\n const links = await this.extractLinks(current.url, useHeadless);\n\n if (links.length === 0) {\n logger.debug({ url: current.url }, 'No links found - page may be a leaf node');\n } else {\n logger.debug(\n { url: current.url, linkCount: links.length },\n 'Links extracted from page'\n );\n }\n\n for (const link of links) {\n if (!this.visited.has(link) && this.isSameDomain(seedUrl, link)) {\n queue.push({ url: link, depth: current.depth + 1 });\n }\n }\n } catch (error) {\n // Log link extraction failure but continue crawling other pages\n const errorProgress: CrawlProgress = {\n type: 'error',\n pagesVisited,\n totalPages: maxPages,\n currentUrl: current.url,\n message: `Failed to extract links from ${current.url}`,\n error: error instanceof Error ? error : new Error(String(error)),\n };\n this.emit('progress', errorProgress);\n }\n }\n } catch (error) {\n const errorObj = error instanceof Error ? error : new Error(String(error));\n\n // Re-throw non-recoverable errors (extraction failures, Claude CLI not available, headless failures)\n // These represent failures in user-requested functionality that should not be silently skipped\n if (\n errorObj.message.includes('Extraction failed') ||\n errorObj.message.includes('Claude CLI not available') ||\n errorObj.message.includes('Headless fetch failed')\n ) {\n throw errorObj;\n }\n\n // For recoverable errors (page fetch failures), emit progress and continue\n const simpleErrorProgress: CrawlProgress = {\n type: 'error',\n pagesVisited,\n totalPages: maxPages,\n currentUrl: current.url,\n error: errorObj,\n };\n this.emit('progress', simpleErrorProgress);\n }\n }\n }\n\n /**\n * Crawl a single page: fetch, convert to markdown, optionally extract\n */\n private async crawlSinglePage(\n url: string,\n extractInstruction: string | undefined,\n pagesVisited: number,\n useHeadless: boolean = false\n ): Promise<CrawlResult> {\n const pageProgress: CrawlProgress = {\n type: 'page',\n pagesVisited,\n totalPages: 0,\n currentUrl: url,\n };\n this.emit('progress', pageProgress);\n\n // Mark as visited\n this.visited.add(url);\n\n // Fetch HTML\n const html = await this.fetchHtml(url, useHeadless);\n\n // Convert to clean markdown using slurp-ai techniques\n // Note: convertHtmlToMarkdown throws on errors, no need to check success\n const conversion = await convertHtmlToMarkdown(html, url);\n\n logger.debug(\n {\n url,\n title: conversion.title,\n markdownLength: conversion.markdown.length,\n },\n 'Article converted to markdown'\n );\n\n let extracted: string | undefined;\n\n // Optional: Extract specific information using Claude\n if (extractInstruction !== undefined && extractInstruction !== '') {\n // Throw if extraction requested but Claude CLI isn't available\n if (!ClaudeClient.isAvailable()) {\n throw new Error('Claude CLI not available: install Claude Code for extraction');\n }\n\n const extractionProgress: CrawlProgress = {\n type: 'extraction',\n pagesVisited,\n totalPages: 0,\n currentUrl: url,\n };\n this.emit('progress', extractionProgress);\n\n extracted = await this.claudeClient.extractContent(conversion.markdown, extractInstruction);\n }\n\n return {\n url,\n ...(conversion.title !== undefined && { title: conversion.title }),\n markdown: conversion.markdown,\n ...(extracted !== undefined && { extracted }),\n };\n }\n\n /**\n * Fetch HTML content from a URL\n */\n private async fetchHtml(url: string, useHeadless: boolean = false): Promise<string> {\n const startTime = Date.now();\n logger.debug({ url, useHeadless }, 'Fetching HTML');\n\n if (useHeadless) {\n try {\n const result = await this.pythonBridge.fetchHeadless(url);\n const durationMs = Date.now() - startTime;\n logger.info(\n {\n url,\n useHeadless: true,\n durationMs,\n ...summarizePayload(result.html, 'raw-html', url),\n },\n 'Raw HTML fetched'\n );\n return result.html;\n } catch (error) {\n // Wrap with distinctive message so crawlSimple knows not to recover\n throw new Error(\n `Headless fetch failed: ${error instanceof Error ? error.message : String(error)}`\n );\n }\n }\n\n // Original axios implementation for static sites\n try {\n const response = await axios.get<string>(url, {\n timeout: 30000,\n headers: {\n 'User-Agent': 'Mozilla/5.0 (compatible; bluera-knowledge-crawler/1.0)',\n },\n });\n\n const durationMs = Date.now() - startTime;\n logger.info(\n {\n url,\n useHeadless: false,\n durationMs,\n ...summarizePayload(response.data, 'raw-html', url),\n },\n 'Raw HTML fetched'\n );\n\n return response.data;\n } catch (error) {\n logger.error(\n { url, error: error instanceof Error ? error.message : String(error) },\n 'Failed to fetch HTML'\n );\n throw new Error(\n `Failed to fetch ${url}: ${error instanceof Error ? error.message : String(error)}`\n );\n }\n }\n\n /**\n * Extract links from a page using Python bridge\n */\n private async extractLinks(url: string, useHeadless: boolean = false): Promise<string[]> {\n try {\n // Use headless mode for link extraction if enabled\n if (useHeadless) {\n const result = await this.pythonBridge.fetchHeadless(url);\n // Extract href strings from link objects (crawl4ai returns objects, not strings)\n return result.links.map((link: CrawledLink | string) => {\n if (typeof link === 'string') return link;\n return link.href;\n });\n }\n\n const result = await this.pythonBridge.crawl(url);\n\n // Validate response structure (handle potential runtime type mismatches)\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition -- TypeScript types claim pages exists but Python bridge may return invalid structure at runtime\n const firstPage = result.pages?.[0];\n if (!firstPage) {\n throw new Error(`Invalid crawl response structure for ${url}: missing pages array`);\n }\n\n return firstPage.links;\n } catch (error: unknown) {\n // Log the error for debugging\n const errorMessage = error instanceof Error ? error.message : String(error);\n logger.error({ url, error: errorMessage }, 'Failed to extract links');\n\n // Re-throw the error instead of silently swallowing it\n throw new Error(`Link extraction failed for ${url}: ${errorMessage}`);\n }\n }\n\n /**\n * Check if two URLs are from the same domain\n */\n private isSameDomain(url1: string, url2: string): boolean {\n try {\n const domain1 = new URL(url1).hostname.toLowerCase();\n const domain2 = new URL(url2).hostname.toLowerCase();\n return (\n domain1 === domain2 || domain1.endsWith(`.${domain2}`) || domain2.endsWith(`.${domain1}`)\n );\n } catch {\n return false;\n }\n }\n\n /**\n * Stop the crawler\n */\n async stop(): Promise<void> {\n this.stopped = true;\n await this.pythonBridge.stop();\n }\n}\n","/**\n * Article converter using @extractus/article-extractor and Turndown\n * Produces clean markdown from HTML using slurp-ai techniques\n */\n\nimport { extractFromHtml } from '@extractus/article-extractor';\nimport TurndownService from 'turndown';\nimport { gfm } from 'turndown-plugin-gfm';\nimport { preprocessHtmlForCodeBlocks, cleanupMarkdown } from './markdown-utils.js';\nimport { createLogger, truncateForLog } from '../logging/index.js';\n\nconst logger = createLogger('article-converter');\n\nexport interface ConversionResult {\n markdown: string;\n title?: string;\n}\n\n/**\n * Convert HTML to clean markdown using best practices from slurp-ai\n *\n * Pipeline:\n * 1. Extract main article content (strips navigation, ads, boilerplate)\n * 2. Preprocess HTML (handle MkDocs code blocks)\n * 3. Convert to markdown with Turndown + GFM\n * 4. Cleanup markdown (regex patterns)\n */\nexport async function convertHtmlToMarkdown(html: string, url: string): Promise<ConversionResult> {\n logger.debug({ url, htmlLength: html.length }, 'Starting HTML conversion');\n\n try {\n // Step 1: Extract main article content\n let articleHtml: string;\n let title: string | undefined;\n\n try {\n const article = await extractFromHtml(html, url);\n if (article?.content !== undefined && article.content !== '') {\n articleHtml = article.content;\n title = article.title !== undefined && article.title !== '' ? article.title : undefined;\n logger.debug(\n {\n url,\n title,\n extractedLength: articleHtml.length,\n usedFullHtml: false,\n },\n 'Article content extracted'\n );\n } else {\n // Fallback to full HTML if extraction fails\n articleHtml = html;\n logger.debug(\n { url, usedFullHtml: true },\n 'Article extraction returned empty, using full HTML'\n );\n }\n } catch (extractError) {\n // Fallback to full HTML if extraction fails\n articleHtml = html;\n logger.debug(\n {\n url,\n usedFullHtml: true,\n error: extractError instanceof Error ? extractError.message : String(extractError),\n },\n 'Article extraction failed, using full HTML'\n );\n }\n\n // Step 2: Preprocess HTML for code blocks\n const preprocessed = preprocessHtmlForCodeBlocks(articleHtml);\n\n // Step 3: Configure Turndown with custom rules\n const turndownService = new TurndownService({\n headingStyle: 'atx', // Use # style headings\n codeBlockStyle: 'fenced', // Use ``` style code blocks\n fence: '```',\n emDelimiter: '*',\n strongDelimiter: '**',\n linkStyle: 'inlined',\n });\n\n // Add GitHub Flavored Markdown support (tables, strikethrough, task lists)\n turndownService.use(gfm);\n\n // Custom rule for headings with anchors (from slurp-ai)\n turndownService.addRule('headingsWithAnchors', {\n filter: ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'],\n replacement(content: string, node: HTMLElement): string {\n const level = Number(node.nodeName.charAt(1));\n const hashes = '#'.repeat(level);\n const cleanContent = content\n .replace(/\\[\\]\\([^)]*\\)/g, '') // Remove empty links\n .replace(/\\s+/g, ' ') // Normalize whitespace\n .trim();\n return cleanContent !== '' ? `\\n\\n${hashes} ${cleanContent}\\n\\n` : '';\n },\n });\n\n // Convert to markdown\n const rawMarkdown = turndownService.turndown(preprocessed);\n\n // Step 4: Cleanup markdown with comprehensive regex patterns\n const markdown = cleanupMarkdown(rawMarkdown);\n\n logger.debug(\n {\n url,\n title,\n rawMarkdownLength: rawMarkdown.length,\n finalMarkdownLength: markdown.length,\n },\n 'HTML to markdown conversion complete'\n );\n\n // Log markdown preview at trace level\n logger.trace(\n {\n url,\n markdownPreview: truncateForLog(markdown, 1000),\n },\n 'Markdown content preview'\n );\n\n return {\n markdown,\n ...(title !== undefined && { title }),\n };\n } catch (error) {\n logger.error(\n {\n url,\n error: error instanceof Error ? error.message : String(error),\n },\n 'HTML to markdown conversion failed'\n );\n\n // Re-throw errors - do not return graceful degradation\n throw error instanceof Error ? error : new Error(String(error));\n }\n}\n","/**\n * Markdown conversion utilities ported from slurp-ai\n * Source: https://github.com/ratacat/slurp-ai\n *\n * These utilities handle complex documentation site patterns (MkDocs, Sphinx, etc.)\n * and produce clean, well-formatted markdown.\n */\n\nimport * as cheerio from 'cheerio';\n\n/**\n * Detect language from code element class names.\n * Handles various class naming patterns from different highlighters.\n */\nfunction detectLanguageFromClass(className: string | undefined): string {\n if (className === undefined || className === '') return '';\n\n // Common patterns: \"language-python\", \"lang-js\", \"highlight-python\", \"python\", \"hljs language-python\"\n const patterns = [\n /language-(\\w+)/i,\n /lang-(\\w+)/i,\n /highlight-(\\w+)/i,\n /hljs\\s+(\\w+)/i,\n /^(\\w+)$/i,\n ];\n\n for (const pattern of patterns) {\n const match = className.match(pattern);\n if (match?.[1] !== undefined) {\n const lang = match[1].toLowerCase();\n // Filter out common non-language classes\n if (!['hljs', 'highlight', 'code', 'pre', 'block', 'inline'].includes(lang)) {\n return lang;\n }\n }\n }\n\n return '';\n}\n\n/**\n * Escape HTML special characters for safe embedding in HTML.\n */\nfunction escapeHtml(text: string): string {\n return text\n .replace(/&/g, '&amp;')\n .replace(/</g, '&lt;')\n .replace(/>/g, '&gt;')\n .replace(/\"/g, '&quot;')\n .replace(/'/g, '&#039;');\n}\n\n/**\n * Preprocess HTML to handle MkDocs/Material theme code blocks.\n *\n * MkDocs wraps code in tables for line numbers:\n * <table><tbody><tr><td>line numbers</td><td><pre><code>code</code></pre></td></tr></tbody></table>\n *\n * This function converts them to standard <pre><code> blocks that Turndown handles correctly.\n * Also strips syntax highlighting spans and empty anchors from code.\n */\nexport function preprocessHtmlForCodeBlocks(html: string): string {\n if (!html || typeof html !== 'string') return html;\n\n const $ = cheerio.load(html);\n\n // Handle MkDocs/Material table-wrapped code blocks\n $('table').each((_i, table) => {\n const $table = $(table);\n\n // Check if this table contains a code block\n const $codeCell = $table.find('td pre code, td div pre code');\n\n if ($codeCell.length > 0) {\n // This is a code block table - extract the code\n const $pre = $codeCell.closest('pre');\n const $code = $codeCell.first();\n\n // Get language from class\n let language = detectLanguageFromClass($code.attr('class'));\n if (!language) {\n language = detectLanguageFromClass($pre.attr('class'));\n }\n\n // Get the text content, stripping all inner HTML tags\n const codeText = $code.text();\n\n // Create a clean pre > code block\n const cleanPre = `<pre><code class=\"language-${language}\">${escapeHtml(codeText)}</code></pre>`;\n\n // Replace the entire table with the clean code block\n $table.replaceWith(cleanPre);\n }\n });\n\n // Strip empty anchor tags used for line numbers\n $('pre a, code a').each((_i, anchor) => {\n const $anchor = $(anchor);\n if (!$anchor.text().trim()) {\n $anchor.remove();\n }\n });\n\n // Strip syntax highlighting spans inside code blocks, keeping only text\n $('pre span, code span').each((_i, span) => {\n const $span = $(span);\n $span.replaceWith($span.text());\n });\n\n // Handle standalone pre blocks that might have spans/anchors\n $('pre').each((_i, pre) => {\n const $pre = $(pre);\n // If this pre has a code child, it was already processed\n if ($pre.find('code').length === 0) {\n // Direct pre without code - get text content\n const text = $pre.text();\n const lang = detectLanguageFromClass($pre.attr('class'));\n $pre.html(`<code class=\"language-${lang}\">${escapeHtml(text)}</code>`);\n }\n });\n\n return $.html();\n}\n\n/**\n * Apply comprehensive cleanup rules to markdown content.\n *\n * Formatting rules:\n * - Double newlines between paragraphs and headings\n * - Double newlines before lists when preceded by normal text\n * - Single newlines between list items\n * - No blank lines inside code blocks\n */\nexport function cleanupMarkdown(markdown: string): string {\n if (!markdown) return '';\n\n const trimmed = markdown.trim();\n if (trimmed === '') return '';\n\n let result = trimmed;\n\n // 0. Fix broken headings where ## is on its own line followed by the text\n // Pattern: \"## \\n\\nSome text\" → \"## Some text\"\n result = result.replace(/^(#{1,6})\\s*\\n\\n+(\\S[^\\n]*)/gm, '$1 $2');\n\n // 0.5. Normalize multiple spaces after heading markers to single space\n // Pattern: \"## Subtitle\" → \"## Subtitle\"\n result = result.replace(/(#{1,6})\\s{2,}/g, '$1 ');\n\n // 1. Fix navigation links with excessive whitespace\n result = result.replace(/\\*\\s+\\[\\s*([^\\n]+?)\\s*\\]\\(([^)]+)\\)/g, '* [$1]($2)');\n\n // 2. Handle headings with specific newline requirements\n\n // Text followed by heading should have a single newline between them (no blank line)\n result = result.replace(/([^\\n])\\n\\n+(#\\s)/g, '$1\\n$2');\n\n // Add double newlines between text and next heading\n result = result.replace(/(Some text\\.)\\n(##\\s)/g, '$1\\n\\n$2');\n\n // Double newlines after a heading when followed by text\n result = result.replace(/(#{1,6}\\s[^\\n]+)\\n([^#\\n])/g, '$1\\n\\n$2');\n\n // Double newlines between headings\n result = result.replace(/(#{1,6}\\s[^\\n]+)\\n(#{1,6}\\s)/g, '$1\\n\\n$2');\n\n // 3. Lists - ensure all list items have single newlines only\n result = result.replace(/(\\* Item 1)\\n\\n+(\\* Item 2)\\n\\n+(\\* Item 3)/g, '$1\\n$2\\n$3');\n\n // 3.5. General list item spacing - ensure single newlines between list items\n result = result.replace(/(^\\*\\s[^\\n]+)\\n{2,}(^\\*\\s)/gm, '$1\\n$2');\n\n // 4. Clean up excessive blank lines (3+ newlines → 2 newlines)\n result = result.replace(/\\n{3,}/g, '\\n\\n');\n\n // 5. Code blocks - no blank lines after opening or before closing backticks\n result = result.replace(/(```[^\\n]*)\\n\\n+/g, '$1\\n');\n result = result.replace(/\\n\\n+```/g, '\\n```');\n\n // 6. Remove empty list items\n result = result.replace(/\\*\\s*\\n\\s*\\*/g, '*');\n\n // 7. Strip any remaining HTML tags that leaked through (common in MkDocs/Material)\n // Remove table structure tags\n result = result.replace(/<\\/?table[^>]*>/gi, '');\n result = result.replace(/<\\/?tbody[^>]*>/gi, '');\n result = result.replace(/<\\/?thead[^>]*>/gi, '');\n result = result.replace(/<\\/?tr[^>]*>/gi, '');\n result = result.replace(/<\\/?td[^>]*>/gi, '');\n result = result.replace(/<\\/?th[^>]*>/gi, '');\n\n // Remove empty anchor tags: <a></a> or <a id=\"...\"></a>\n result = result.replace(/<a[^>]*><\\/a>/gi, '');\n\n // Remove span tags (syntax highlighting remnants)\n result = result.replace(/<\\/?span[^>]*>/gi, '');\n\n // Remove div tags\n result = result.replace(/<\\/?div[^>]*>/gi, '');\n\n // Remove pre/code tags that leaked\n result = result.replace(/<\\/?pre[^>]*>/gi, '');\n result = result.replace(/<\\/?code[^>]*>/gi, '');\n\n // 8. Remove empty markdown links: [](url) and []()\n result = result.replace(/\\[\\]\\([^)]*\\)/g, '');\n\n // 9. Remove codelineno references that leaked into content\n // Pattern: [](_file.md#__codelineno-N-M)\n result = result.replace(/\\[\\]\\([^)]*#__codelineno-[^)]+\\)/g, '');\n\n // Also clean inline codelineno patterns\n result = result.replace(/\\[?\\]?\\([^)]*#__codelineno-[^)]*\\)/g, '');\n\n // 10. Clean up any double-escaped HTML entities that might result\n result = result.replace(/&amp;lt;/g, '&lt;');\n result = result.replace(/&amp;gt;/g, '&gt;');\n result = result.replace(/&amp;amp;/g, '&amp;');\n\n // 11. Final cleanup - normalize excessive whitespace from removed tags\n result = result.replace(/\\n{3,}/g, '\\n\\n');\n result = result.replace(/[ \\t]+\\n/g, '\\n');\n\n return result;\n}\n","/**\n * Claude CLI client for intelligent crawling and extraction\n * Uses `claude -p` programmatically to analyze page structure and extract content\n */\n\nimport { spawn, execSync } from 'node:child_process';\n\n/**\n * Schema for crawl strategy response from Claude\n */\nexport interface CrawlStrategy {\n urls: string[];\n reasoning: string;\n}\n\nconst CRAWL_STRATEGY_SCHEMA = {\n type: 'object',\n properties: {\n urls: {\n type: 'array',\n items: { type: 'string' },\n description: 'List of URLs to crawl based on the instruction',\n },\n reasoning: {\n type: 'string',\n description: 'Brief explanation of why these URLs were selected',\n },\n },\n required: ['urls', 'reasoning'],\n};\n\n/**\n * Client for interacting with Claude Code CLI\n */\nexport class ClaudeClient {\n private readonly timeout: number;\n private static availabilityChecked = false;\n private static available = false;\n\n /**\n * Check if Claude CLI is available in PATH\n * Result is cached after first check for performance\n */\n static isAvailable(): boolean {\n if (!ClaudeClient.availabilityChecked) {\n try {\n execSync('which claude', { stdio: 'ignore' });\n ClaudeClient.available = true;\n } catch {\n ClaudeClient.available = false;\n }\n ClaudeClient.availabilityChecked = true;\n }\n return ClaudeClient.available;\n }\n\n /**\n * Reset availability cache (for testing)\n */\n static resetAvailabilityCache(): void {\n ClaudeClient.availabilityChecked = false;\n ClaudeClient.available = false;\n }\n\n constructor(options: { timeout?: number } = {}) {\n this.timeout = options.timeout ?? 30000; // 30s default\n }\n\n /**\n * Determine which URLs to crawl based on natural language instruction\n *\n * @param seedUrl - The URL of the seed page (for resolving relative URLs)\n * @param seedHtml - HTML content of the seed page\n * @param instruction - Natural language crawl instruction (e.g., \"scrape all Getting Started pages\")\n * @returns List of URLs to crawl with reasoning\n */\n async determineCrawlUrls(\n seedUrl: string,\n seedHtml: string,\n instruction: string\n ): Promise<CrawlStrategy> {\n const prompt = `You are analyzing a webpage to determine which pages to crawl based on the user's instruction.\n\nBase URL: ${seedUrl}\n\nInstruction: ${instruction}\n\nWebpage HTML (analyze the navigation structure, links, and content):\n${this.truncateHtml(seedHtml, 50000)}\n\nBased on the instruction, extract and return a list of absolute URLs that should be crawled. When you encounter relative URLs (starting with \"/\" or without a protocol), resolve them against the Base URL. For example, if Base URL is \"https://example.com/docs\" and you see href=\"/docs/hooks\", return \"https://example.com/docs/hooks\".\n\nLook for navigation menus, sidebars, headers, and link structures that match the instruction.\n\nReturn only URLs that are relevant to the instruction. If the instruction mentions specific sections (e.g., \"Getting Started\"), find links in those sections.`;\n\n try {\n const result = await this.callClaude(prompt, CRAWL_STRATEGY_SCHEMA);\n const rawParsed: unknown = JSON.parse(result);\n\n // Claude CLI with --json-schema returns wrapper: {type, result, structured_output: {...}}\n // Extract structured_output if present, otherwise use raw response\n const parsed = this.extractStructuredOutput(rawParsed);\n\n // Validate and narrow type\n if (\n typeof parsed !== 'object' ||\n parsed === null ||\n !('urls' in parsed) ||\n !('reasoning' in parsed) ||\n !Array.isArray(parsed.urls) ||\n parsed.urls.length === 0 ||\n typeof parsed.reasoning !== 'string' ||\n !parsed.urls.every((url) => typeof url === 'string')\n ) {\n throw new Error('Claude returned invalid crawl strategy');\n }\n\n // Type is now properly narrowed - urls is string[] after validation\n return { urls: parsed.urls, reasoning: parsed.reasoning };\n } catch (error) {\n throw new Error(\n `Failed to determine crawl strategy: ${error instanceof Error ? error.message : String(error)}`\n );\n }\n }\n\n /**\n * Extract specific information from markdown content using natural language\n *\n * @param markdown - Page content in markdown format\n * @param instruction - Natural language extraction instruction (e.g., \"extract pricing info\")\n * @returns Extracted information as text\n */\n async extractContent(markdown: string, instruction: string): Promise<string> {\n const prompt = `${instruction}\n\nContent to analyze:\n${this.truncateMarkdown(markdown, 100000)}`;\n\n try {\n const result = await this.callClaude(prompt);\n return result.trim();\n } catch (error) {\n throw new Error(\n `Failed to extract content: ${error instanceof Error ? error.message : String(error)}`\n );\n }\n }\n\n /**\n * Call Claude CLI with a prompt\n *\n * @param prompt - The prompt to send to Claude\n * @param jsonSchema - Optional JSON schema for structured output\n * @returns Claude's response as a string\n */\n private async callClaude(prompt: string, jsonSchema?: Record<string, unknown>): Promise<string> {\n return new Promise<string>((resolve, reject) => {\n const args = ['-p'];\n\n // Add JSON schema if provided\n if (jsonSchema) {\n args.push('--json-schema', JSON.stringify(jsonSchema));\n args.push('--output-format', 'json');\n }\n\n const proc = spawn('claude', args, {\n stdio: ['pipe', 'pipe', 'pipe'],\n cwd: process.cwd(),\n env: { ...process.env },\n });\n\n let stdout = '';\n let stderr = '';\n let timeoutId: NodeJS.Timeout | undefined;\n\n // Set timeout\n if (this.timeout > 0) {\n timeoutId = setTimeout(() => {\n proc.kill('SIGTERM');\n reject(new Error(`Claude CLI timed out after ${String(this.timeout)}ms`));\n }, this.timeout);\n }\n\n proc.stdout.on('data', (chunk: Buffer) => {\n stdout += chunk.toString();\n });\n\n proc.stderr.on('data', (chunk: Buffer) => {\n stderr += chunk.toString();\n });\n\n proc.on('close', (code: number | null) => {\n if (timeoutId !== undefined) {\n clearTimeout(timeoutId);\n }\n\n if (code === 0) {\n resolve(stdout.trim());\n } else {\n reject(\n new Error(`Claude CLI exited with code ${String(code)}${stderr ? `: ${stderr}` : ''}`)\n );\n }\n });\n\n proc.on('error', (err) => {\n if (timeoutId !== undefined) {\n clearTimeout(timeoutId);\n }\n reject(new Error(`Failed to spawn Claude CLI: ${err.message}`));\n });\n\n // Write prompt to stdin\n proc.stdin.write(prompt);\n proc.stdin.end();\n });\n }\n\n /**\n * Truncate HTML to a maximum length (keep important parts)\n */\n private truncateHtml(html: string, maxLength: number): string {\n if (html.length <= maxLength) return html;\n\n // Try to keep the beginning (usually has navigation)\n return `${html.substring(0, maxLength)}\\n\\n[... HTML truncated ...]`;\n }\n\n /**\n * Truncate markdown to a maximum length\n */\n private truncateMarkdown(markdown: string, maxLength: number): string {\n if (markdown.length <= maxLength) return markdown;\n\n return `${markdown.substring(0, maxLength)}\\n\\n[... content truncated ...]`;\n }\n\n /**\n * Type guard to check if value is a record (plain object)\n */\n private isRecord(value: unknown): value is Record<string, unknown> {\n return typeof value === 'object' && value !== null && !Array.isArray(value);\n }\n\n /**\n * Extract structured_output from Claude CLI wrapper format if present.\n * Claude CLI with --json-schema returns: {type, result, structured_output: {...}}\n * This method extracts the inner structured_output, or returns the raw value if not wrapped.\n */\n private extractStructuredOutput(rawParsed: unknown): unknown {\n if (this.isRecord(rawParsed) && 'structured_output' in rawParsed) {\n const structuredOutput = rawParsed['structured_output'];\n if (typeof structuredOutput === 'object') {\n return structuredOutput;\n }\n }\n return rawParsed;\n }\n}\n"],"mappings":";;;;;;;;AAKA,SAAS,oBAAoB;AAC7B,OAAO,WAAW;;;ACDlB,SAAS,uBAAuB;AAChC,OAAO,qBAAqB;AAC5B,SAAS,WAAW;;;ACCpB,YAAY,aAAa;AAMzB,SAAS,wBAAwB,WAAuC;AACtE,MAAI,cAAc,UAAa,cAAc,GAAI,QAAO;AAGxD,QAAM,WAAW;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAEA,aAAW,WAAW,UAAU;AAC9B,UAAM,QAAQ,UAAU,MAAM,OAAO;AACrC,QAAI,QAAQ,CAAC,MAAM,QAAW;AAC5B,YAAM,OAAO,MAAM,CAAC,EAAE,YAAY;AAElC,UAAI,CAAC,CAAC,QAAQ,aAAa,QAAQ,OAAO,SAAS,QAAQ,EAAE,SAAS,IAAI,GAAG;AAC3E,eAAO;AAAA,MACT;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAKA,SAAS,WAAW,MAAsB;AACxC,SAAO,KACJ,QAAQ,MAAM,OAAO,EACrB,QAAQ,MAAM,MAAM,EACpB,QAAQ,MAAM,MAAM,EACpB,QAAQ,MAAM,QAAQ,EACtB,QAAQ,MAAM,QAAQ;AAC3B;AAWO,SAAS,4BAA4B,MAAsB;AAChE,MAAI,CAAC,QAAQ,OAAO,SAAS,SAAU,QAAO;AAE9C,QAAM,IAAY,aAAK,IAAI;AAG3B,IAAE,OAAO,EAAE,KAAK,CAAC,IAAI,UAAU;AAC7B,UAAM,SAAS,EAAE,KAAK;AAGtB,UAAM,YAAY,OAAO,KAAK,8BAA8B;AAE5D,QAAI,UAAU,SAAS,GAAG;AAExB,YAAM,OAAO,UAAU,QAAQ,KAAK;AACpC,YAAM,QAAQ,UAAU,MAAM;AAG9B,UAAI,WAAW,wBAAwB,MAAM,KAAK,OAAO,CAAC;AAC1D,UAAI,CAAC,UAAU;AACb,mBAAW,wBAAwB,KAAK,KAAK,OAAO,CAAC;AAAA,MACvD;AAGA,YAAM,WAAW,MAAM,KAAK;AAG5B,YAAM,WAAW,8BAA8B,QAAQ,KAAK,WAAW,QAAQ,CAAC;AAGhF,aAAO,YAAY,QAAQ;AAAA,IAC7B;AAAA,EACF,CAAC;AAGD,IAAE,eAAe,EAAE,KAAK,CAAC,IAAI,WAAW;AACtC,UAAM,UAAU,EAAE,MAAM;AACxB,QAAI,CAAC,QAAQ,KAAK,EAAE,KAAK,GAAG;AAC1B,cAAQ,OAAO;AAAA,IACjB;AAAA,EACF,CAAC;AAGD,IAAE,qBAAqB,EAAE,KAAK,CAAC,IAAI,SAAS;AAC1C,UAAM,QAAQ,EAAE,IAAI;AACpB,UAAM,YAAY,MAAM,KAAK,CAAC;AAAA,EAChC,CAAC;AAGD,IAAE,KAAK,EAAE,KAAK,CAAC,IAAI,QAAQ;AACzB,UAAM,OAAO,EAAE,GAAG;AAElB,QAAI,KAAK,KAAK,MAAM,EAAE,WAAW,GAAG;AAElC,YAAM,OAAO,KAAK,KAAK;AACvB,YAAM,OAAO,wBAAwB,KAAK,KAAK,OAAO,CAAC;AACvD,WAAK,KAAK,yBAAyB,IAAI,KAAK,WAAW,IAAI,CAAC,SAAS;AAAA,IACvE;AAAA,EACF,CAAC;AAED,SAAO,EAAE,KAAK;AAChB;AAWO,SAAS,gBAAgB,UAA0B;AACxD,MAAI,CAAC,SAAU,QAAO;AAEtB,QAAM,UAAU,SAAS,KAAK;AAC9B,MAAI,YAAY,GAAI,QAAO;AAE3B,MAAI,SAAS;AAIb,WAAS,OAAO,QAAQ,iCAAiC,OAAO;AAIhE,WAAS,OAAO,QAAQ,mBAAmB,KAAK;AAGhD,WAAS,OAAO,QAAQ,wCAAwC,YAAY;AAK5E,WAAS,OAAO,QAAQ,sBAAsB,QAAQ;AAGtD,WAAS,OAAO,QAAQ,0BAA0B,UAAU;AAG5D,WAAS,OAAO,QAAQ,+BAA+B,UAAU;AAGjE,WAAS,OAAO,QAAQ,iCAAiC,UAAU;AAGnE,WAAS,OAAO,QAAQ,gDAAgD,YAAY;AAGpF,WAAS,OAAO,QAAQ,gCAAgC,QAAQ;AAGhE,WAAS,OAAO,QAAQ,WAAW,MAAM;AAGzC,WAAS,OAAO,QAAQ,qBAAqB,MAAM;AACnD,WAAS,OAAO,QAAQ,aAAa,OAAO;AAG5C,WAAS,OAAO,QAAQ,iBAAiB,GAAG;AAI5C,WAAS,OAAO,QAAQ,qBAAqB,EAAE;AAC/C,WAAS,OAAO,QAAQ,qBAAqB,EAAE;AAC/C,WAAS,OAAO,QAAQ,qBAAqB,EAAE;AAC/C,WAAS,OAAO,QAAQ,kBAAkB,EAAE;AAC5C,WAAS,OAAO,QAAQ,kBAAkB,EAAE;AAC5C,WAAS,OAAO,QAAQ,kBAAkB,EAAE;AAG5C,WAAS,OAAO,QAAQ,mBAAmB,EAAE;AAG7C,WAAS,OAAO,QAAQ,oBAAoB,EAAE;AAG9C,WAAS,OAAO,QAAQ,mBAAmB,EAAE;AAG7C,WAAS,OAAO,QAAQ,mBAAmB,EAAE;AAC7C,WAAS,OAAO,QAAQ,oBAAoB,EAAE;AAG9C,WAAS,OAAO,QAAQ,kBAAkB,EAAE;AAI5C,WAAS,OAAO,QAAQ,qCAAqC,EAAE;AAG/D,WAAS,OAAO,QAAQ,uCAAuC,EAAE;AAGjE,WAAS,OAAO,QAAQ,aAAa,MAAM;AAC3C,WAAS,OAAO,QAAQ,aAAa,MAAM;AAC3C,WAAS,OAAO,QAAQ,cAAc,OAAO;AAG7C,WAAS,OAAO,QAAQ,WAAW,MAAM;AACzC,WAAS,OAAO,QAAQ,aAAa,IAAI;AAEzC,SAAO;AACT;;;ADrNA,IAAM,SAAS,aAAa,mBAAmB;AAgB/C,eAAsB,sBAAsB,MAAc,KAAwC;AAChG,SAAO,MAAM,EAAE,KAAK,YAAY,KAAK,OAAO,GAAG,0BAA0B;AAEzE,MAAI;AAEF,QAAI;AACJ,QAAI;AAEJ,QAAI;AACF,YAAM,UAAU,MAAM,gBAAgB,MAAM,GAAG;AAC/C,UAAI,SAAS,YAAY,UAAa,QAAQ,YAAY,IAAI;AAC5D,sBAAc,QAAQ;AACtB,gBAAQ,QAAQ,UAAU,UAAa,QAAQ,UAAU,KAAK,QAAQ,QAAQ;AAC9E,eAAO;AAAA,UACL;AAAA,YACE;AAAA,YACA;AAAA,YACA,iBAAiB,YAAY;AAAA,YAC7B,cAAc;AAAA,UAChB;AAAA,UACA;AAAA,QACF;AAAA,MACF,OAAO;AAEL,sBAAc;AACd,eAAO;AAAA,UACL,EAAE,KAAK,cAAc,KAAK;AAAA,UAC1B;AAAA,QACF;AAAA,MACF;AAAA,IACF,SAAS,cAAc;AAErB,oBAAc;AACd,aAAO;AAAA,QACL;AAAA,UACE;AAAA,UACA,cAAc;AAAA,UACd,OAAO,wBAAwB,QAAQ,aAAa,UAAU,OAAO,YAAY;AAAA,QACnF;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAGA,UAAM,eAAe,4BAA4B,WAAW;AAG5D,UAAM,kBAAkB,IAAI,gBAAgB;AAAA,MAC1C,cAAc;AAAA;AAAA,MACd,gBAAgB;AAAA;AAAA,MAChB,OAAO;AAAA,MACP,aAAa;AAAA,MACb,iBAAiB;AAAA,MACjB,WAAW;AAAA,IACb,CAAC;AAGD,oBAAgB,IAAI,GAAG;AAGvB,oBAAgB,QAAQ,uBAAuB;AAAA,MAC7C,QAAQ,CAAC,MAAM,MAAM,MAAM,MAAM,MAAM,IAAI;AAAA,MAC3C,YAAY,SAAiB,MAA2B;AACtD,cAAM,QAAQ,OAAO,KAAK,SAAS,OAAO,CAAC,CAAC;AAC5C,cAAM,SAAS,IAAI,OAAO,KAAK;AAC/B,cAAM,eAAe,QAClB,QAAQ,kBAAkB,EAAE,EAC5B,QAAQ,QAAQ,GAAG,EACnB,KAAK;AACR,eAAO,iBAAiB,KAAK;AAAA;AAAA,EAAO,MAAM,IAAI,YAAY;AAAA;AAAA,IAAS;AAAA,MACrE;AAAA,IACF,CAAC;AAGD,UAAM,cAAc,gBAAgB,SAAS,YAAY;AAGzD,UAAM,WAAW,gBAAgB,WAAW;AAE5C,WAAO;AAAA,MACL;AAAA,QACE;AAAA,QACA;AAAA,QACA,mBAAmB,YAAY;AAAA,QAC/B,qBAAqB,SAAS;AAAA,MAChC;AAAA,MACA;AAAA,IACF;AAGA,WAAO;AAAA,MACL;AAAA,QACE;AAAA,QACA,iBAAiB,eAAe,UAAU,GAAI;AAAA,MAChD;AAAA,MACA;AAAA,IACF;AAEA,WAAO;AAAA,MACL;AAAA,MACA,GAAI,UAAU,UAAa,EAAE,MAAM;AAAA,IACrC;AAAA,EACF,SAAS,OAAO;AACd,WAAO;AAAA,MACL;AAAA,QACE;AAAA,QACA,OAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,MAC9D;AAAA,MACA;AAAA,IACF;AAGA,UAAM,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AAAA,EAChE;AACF;;;AExIA,SAAS,OAAO,gBAAgB;AAUhC,IAAM,wBAAwB;AAAA,EAC5B,MAAM;AAAA,EACN,YAAY;AAAA,IACV,MAAM;AAAA,MACJ,MAAM;AAAA,MACN,OAAO,EAAE,MAAM,SAAS;AAAA,MACxB,aAAa;AAAA,IACf;AAAA,IACA,WAAW;AAAA,MACT,MAAM;AAAA,MACN,aAAa;AAAA,IACf;AAAA,EACF;AAAA,EACA,UAAU,CAAC,QAAQ,WAAW;AAChC;AAKO,IAAM,eAAN,MAAM,cAAa;AAAA,EACP;AAAA,EACjB,OAAe,sBAAsB;AAAA,EACrC,OAAe,YAAY;AAAA;AAAA;AAAA;AAAA;AAAA,EAM3B,OAAO,cAAuB;AAC5B,QAAI,CAAC,cAAa,qBAAqB;AACrC,UAAI;AACF,iBAAS,gBAAgB,EAAE,OAAO,SAAS,CAAC;AAC5C,sBAAa,YAAY;AAAA,MAC3B,QAAQ;AACN,sBAAa,YAAY;AAAA,MAC3B;AACA,oBAAa,sBAAsB;AAAA,IACrC;AACA,WAAO,cAAa;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,yBAA+B;AACpC,kBAAa,sBAAsB;AACnC,kBAAa,YAAY;AAAA,EAC3B;AAAA,EAEA,YAAY,UAAgC,CAAC,GAAG;AAC9C,SAAK,UAAU,QAAQ,WAAW;AAAA,EACpC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAM,mBACJ,SACA,UACA,aACwB;AACxB,UAAM,SAAS;AAAA;AAAA,YAEP,OAAO;AAAA;AAAA,eAEJ,WAAW;AAAA;AAAA;AAAA,EAGxB,KAAK,aAAa,UAAU,GAAK,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAQhC,QAAI;AACF,YAAM,SAAS,MAAM,KAAK,WAAW,QAAQ,qBAAqB;AAClE,YAAM,YAAqB,KAAK,MAAM,MAAM;AAI5C,YAAM,SAAS,KAAK,wBAAwB,SAAS;AAGrD,UACE,OAAO,WAAW,YAClB,WAAW,QACX,EAAE,UAAU,WACZ,EAAE,eAAe,WACjB,CAAC,MAAM,QAAQ,OAAO,IAAI,KAC1B,OAAO,KAAK,WAAW,KACvB,OAAO,OAAO,cAAc,YAC5B,CAAC,OAAO,KAAK,MAAM,CAAC,QAAQ,OAAO,QAAQ,QAAQ,GACnD;AACA,cAAM,IAAI,MAAM,wCAAwC;AAAA,MAC1D;AAGA,aAAO,EAAE,MAAM,OAAO,MAAM,WAAW,OAAO,UAAU;AAAA,IAC1D,SAAS,OAAO;AACd,YAAM,IAAI;AAAA,QACR,uCAAuC,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,MAC/F;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAM,eAAe,UAAkB,aAAsC;AAC3E,UAAM,SAAS,GAAG,WAAW;AAAA;AAAA;AAAA,EAG/B,KAAK,iBAAiB,UAAU,GAAM,CAAC;AAErC,QAAI;AACF,YAAM,SAAS,MAAM,KAAK,WAAW,MAAM;AAC3C,aAAO,OAAO,KAAK;AAAA,IACrB,SAAS,OAAO;AACd,YAAM,IAAI;AAAA,QACR,8BAA8B,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,MACtF;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAc,WAAW,QAAgB,YAAuD;AAC9F,WAAO,IAAI,QAAgB,CAAC,SAAS,WAAW;AAC9C,YAAM,OAAO,CAAC,IAAI;AAGlB,UAAI,YAAY;AACd,aAAK,KAAK,iBAAiB,KAAK,UAAU,UAAU,CAAC;AACrD,aAAK,KAAK,mBAAmB,MAAM;AAAA,MACrC;AAEA,YAAM,OAAO,MAAM,UAAU,MAAM;AAAA,QACjC,OAAO,CAAC,QAAQ,QAAQ,MAAM;AAAA,QAC9B,KAAK,QAAQ,IAAI;AAAA,QACjB,KAAK,EAAE,GAAG,QAAQ,IAAI;AAAA,MACxB,CAAC;AAED,UAAI,SAAS;AACb,UAAI,SAAS;AACb,UAAI;AAGJ,UAAI,KAAK,UAAU,GAAG;AACpB,oBAAY,WAAW,MAAM;AAC3B,eAAK,KAAK,SAAS;AACnB,iBAAO,IAAI,MAAM,8BAA8B,OAAO,KAAK,OAAO,CAAC,IAAI,CAAC;AAAA,QAC1E,GAAG,KAAK,OAAO;AAAA,MACjB;AAEA,WAAK,OAAO,GAAG,QAAQ,CAAC,UAAkB;AACxC,kBAAU,MAAM,SAAS;AAAA,MAC3B,CAAC;AAED,WAAK,OAAO,GAAG,QAAQ,CAAC,UAAkB;AACxC,kBAAU,MAAM,SAAS;AAAA,MAC3B,CAAC;AAED,WAAK,GAAG,SAAS,CAAC,SAAwB;AACxC,YAAI,cAAc,QAAW;AAC3B,uBAAa,SAAS;AAAA,QACxB;AAEA,YAAI,SAAS,GAAG;AACd,kBAAQ,OAAO,KAAK,CAAC;AAAA,QACvB,OAAO;AACL;AAAA,YACE,IAAI,MAAM,+BAA+B,OAAO,IAAI,CAAC,GAAG,SAAS,KAAK,MAAM,KAAK,EAAE,EAAE;AAAA,UACvF;AAAA,QACF;AAAA,MACF,CAAC;AAED,WAAK,GAAG,SAAS,CAAC,QAAQ;AACxB,YAAI,cAAc,QAAW;AAC3B,uBAAa,SAAS;AAAA,QACxB;AACA,eAAO,IAAI,MAAM,+BAA+B,IAAI,OAAO,EAAE,CAAC;AAAA,MAChE,CAAC;AAGD,WAAK,MAAM,MAAM,MAAM;AACvB,WAAK,MAAM,IAAI;AAAA,IACjB,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAa,MAAc,WAA2B;AAC5D,QAAI,KAAK,UAAU,UAAW,QAAO;AAGrC,WAAO,GAAG,KAAK,UAAU,GAAG,SAAS,CAAC;AAAA;AAAA;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA,EAKQ,iBAAiB,UAAkB,WAA2B;AACpE,QAAI,SAAS,UAAU,UAAW,QAAO;AAEzC,WAAO,GAAG,SAAS,UAAU,GAAG,SAAS,CAAC;AAAA;AAAA;AAAA,EAC5C;AAAA;AAAA;AAAA;AAAA,EAKQ,SAAS,OAAkD;AACjE,WAAO,OAAO,UAAU,YAAY,UAAU,QAAQ,CAAC,MAAM,QAAQ,KAAK;AAAA,EAC5E;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOQ,wBAAwB,WAA6B;AAC3D,QAAI,KAAK,SAAS,SAAS,KAAK,uBAAuB,WAAW;AAChE,YAAM,mBAAmB,UAAU,mBAAmB;AACtD,UAAI,OAAO,qBAAqB,UAAU;AACxC,eAAO;AAAA,MACT;AAAA,IACF;AACA,WAAO;AAAA,EACT;AACF;;;AHxPA,IAAMA,UAAS,aAAa,SAAS;AA+B9B,IAAM,qBAAN,cAAiC,aAAa;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AAAA,EACT;AAAA,EAER,cAAc;AACZ,UAAM;AACN,SAAK,eAAe,IAAI,aAAa;AACrC,SAAK,eAAe,IAAI,aAAa;AACrC,SAAK,UAAU,oBAAI,IAAI;AACvB,SAAK,UAAU;AAAA,EACjB;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,MAAM,SAAiB,UAAwB,CAAC,GAA+B;AACpF,UAAM,EAAE,kBAAkB,oBAAoB,WAAW,IAAI,SAAS,MAAM,IAAI;AAEhF,SAAK,QAAQ,MAAM;AACnB,SAAK,UAAU;AAEf,IAAAA,QAAO;AAAA,MACL;AAAA,QACE;AAAA,QACA;AAAA,QACA,MAAM,SACF,WACA,qBAAqB,UAAa,qBAAqB,KACrD,gBACA;AAAA,QACN,uBAAuB,uBAAuB;AAAA,MAChD;AAAA,MACA;AAAA,IACF;AAEA,UAAM,gBAA+B;AAAA,MACnC,MAAM;AAAA,MACN,cAAc;AAAA,MACd,YAAY;AAAA,IACd;AACA,SAAK,KAAK,YAAY,aAAa;AAGnC,UAAM,qBAAqB,CAAC,UAAU,qBAAqB,UAAa,qBAAqB;AAE7F,QAAI,oBAAoB;AAEtB,aAAO,KAAK;AAAA,QACV;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA,QAAQ,eAAe;AAAA,MACzB;AAAA,IACF,OAAO;AACL,aAAO,KAAK,YAAY,SAAS,oBAAoB,UAAU,QAAQ,eAAe,KAAK;AAAA,IAC7F;AAEA,IAAAA,QAAO;AAAA,MACL;AAAA,QACE;AAAA,QACA,cAAc,KAAK,QAAQ;AAAA,MAC7B;AAAA,MACA;AAAA,IACF;AAGA,QAAI,KAAK,QAAQ,SAAS,KAAK,WAAW,GAAG;AAC3C,YAAM,kBAAiC;AAAA,QACrC,MAAM;AAAA,QACN,cAAc,KAAK,QAAQ;AAAA,QAC3B,YAAY;AAAA,QACZ,SAAS,iDAAiD,OAAO,QAAQ,CAAC;AAAA,QAC1E,OAAO,IAAI,MAAM,oBAAoB;AAAA,MACvC;AACA,WAAK,KAAK,YAAY,eAAe;AAAA,IACvC;AAEA,UAAM,mBAAkC;AAAA,MACtC,MAAM;AAAA,MACN,cAAc,KAAK,QAAQ;AAAA,MAC3B,YAAY,KAAK,QAAQ;AAAA,IAC3B;AACA,SAAK,KAAK,YAAY,gBAAgB;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA,EAKA,OAAe,iBACb,SACA,kBACA,oBACA,UACA,cAAuB,OACK;AAE5B,QAAI,CAAC,aAAa,YAAY,GAAG;AAC/B,YAAM,IAAI,MAAM,wEAAwE;AAAA,IAC1F;AAEA,QAAI;AAEJ,QAAI;AAEF,YAAM,wBAAuC;AAAA,QAC3C,MAAM;AAAA,QACN,cAAc;AAAA,QACd,YAAY;AAAA,QACZ,YAAY;AAAA,QACZ,SAAS;AAAA,MACX;AACA,WAAK,KAAK,YAAY,qBAAqB;AAE3C,YAAM,WAAW,MAAM,KAAK,UAAU,SAAS,WAAW;AAG1D,iBAAW,MAAM,KAAK,aAAa,mBAAmB,SAAS,UAAU,gBAAgB;AAEzF,YAAM,2BAA0C;AAAA,QAC9C,MAAM;AAAA,QACN,cAAc;AAAA,QACd,YAAY;AAAA,QACZ,SAAS,qBAAqB,OAAO,SAAS,KAAK,MAAM,CAAC,mBAAmB,SAAS,SAAS;AAAA,MACjG;AACA,WAAK,KAAK,YAAY,wBAAwB;AAAA,IAChD,SAAS,OAAO;AAEd,YAAM,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AAAA,IAChE;AAGA,QAAI,eAAe;AAEnB,eAAW,OAAO,SAAS,MAAM;AAC/B,UAAI,KAAK,WAAW,gBAAgB,SAAU;AAC9C,UAAI,KAAK,QAAQ,IAAI,GAAG,EAAG;AAE3B,UAAI;AACF,cAAM,SAAS,MAAM,KAAK;AAAA,UACxB;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AACA;AACA,cAAM;AAAA,MACR,SAAS,OAAO;AACd,cAAM,oBAAmC;AAAA,UACvC,MAAM;AAAA,UACN;AAAA,UACA,YAAY;AAAA,UACZ,YAAY;AAAA,UACZ,OAAO,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AAAA,QACjE;AACA,aAAK,KAAK,YAAY,iBAAiB;AAAA,MACzC;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,OAAe,YACb,SACA,oBACA,UACA,cAAuB,OACK;AAC5B,UAAM,QAA+C,CAAC,EAAE,KAAK,SAAS,OAAO,EAAE,CAAC;AAChF,UAAM,WAAW;AACjB,QAAI,eAAe;AAEnB,WAAO,MAAM,SAAS,KAAK,eAAe,YAAY,CAAC,KAAK,SAAS;AACnE,YAAM,UAAU,MAAM,MAAM;AAE5B,UAAI,CAAC,WAAW,KAAK,QAAQ,IAAI,QAAQ,GAAG,KAAK,QAAQ,QAAQ,UAAU;AACzE;AAAA,MACF;AAEA,UAAI;AACF,cAAM,SAAS,MAAM,KAAK;AAAA,UACxB,QAAQ;AAAA,UACR;AAAA,UACA;AAAA,UACA;AAAA,QACF;AACA,eAAO,QAAQ,QAAQ;AACvB;AAEA,cAAM;AAGN,YAAI,QAAQ,QAAQ,UAAU;AAC5B,cAAI;AACF,kBAAM,QAAQ,MAAM,KAAK,aAAa,QAAQ,KAAK,WAAW;AAE9D,gBAAI,MAAM,WAAW,GAAG;AACtB,cAAAA,QAAO,MAAM,EAAE,KAAK,QAAQ,IAAI,GAAG,0CAA0C;AAAA,YAC/E,OAAO;AACL,cAAAA,QAAO;AAAA,gBACL,EAAE,KAAK,QAAQ,KAAK,WAAW,MAAM,OAAO;AAAA,gBAC5C;AAAA,cACF;AAAA,YACF;AAEA,uBAAW,QAAQ,OAAO;AACxB,kBAAI,CAAC,KAAK,QAAQ,IAAI,IAAI,KAAK,KAAK,aAAa,SAAS,IAAI,GAAG;AAC/D,sBAAM,KAAK,EAAE,KAAK,MAAM,OAAO,QAAQ,QAAQ,EAAE,CAAC;AAAA,cACpD;AAAA,YACF;AAAA,UACF,SAAS,OAAO;AAEd,kBAAM,gBAA+B;AAAA,cACnC,MAAM;AAAA,cACN;AAAA,cACA,YAAY;AAAA,cACZ,YAAY,QAAQ;AAAA,cACpB,SAAS,gCAAgC,QAAQ,GAAG;AAAA,cACpD,OAAO,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AAAA,YACjE;AACA,iBAAK,KAAK,YAAY,aAAa;AAAA,UACrC;AAAA,QACF;AAAA,MACF,SAAS,OAAO;AACd,cAAM,WAAW,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AAIzE,YACE,SAAS,QAAQ,SAAS,mBAAmB,KAC7C,SAAS,QAAQ,SAAS,0BAA0B,KACpD,SAAS,QAAQ,SAAS,uBAAuB,GACjD;AACA,gBAAM;AAAA,QACR;AAGA,cAAM,sBAAqC;AAAA,UACzC,MAAM;AAAA,UACN;AAAA,UACA,YAAY;AAAA,UACZ,YAAY,QAAQ;AAAA,UACpB,OAAO;AAAA,QACT;AACA,aAAK,KAAK,YAAY,mBAAmB;AAAA,MAC3C;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,gBACZ,KACA,oBACA,cACA,cAAuB,OACD;AACtB,UAAM,eAA8B;AAAA,MAClC,MAAM;AAAA,MACN;AAAA,MACA,YAAY;AAAA,MACZ,YAAY;AAAA,IACd;AACA,SAAK,KAAK,YAAY,YAAY;AAGlC,SAAK,QAAQ,IAAI,GAAG;AAGpB,UAAM,OAAO,MAAM,KAAK,UAAU,KAAK,WAAW;AAIlD,UAAM,aAAa,MAAM,sBAAsB,MAAM,GAAG;AAExD,IAAAA,QAAO;AAAA,MACL;AAAA,QACE;AAAA,QACA,OAAO,WAAW;AAAA,QAClB,gBAAgB,WAAW,SAAS;AAAA,MACtC;AAAA,MACA;AAAA,IACF;AAEA,QAAI;AAGJ,QAAI,uBAAuB,UAAa,uBAAuB,IAAI;AAEjE,UAAI,CAAC,aAAa,YAAY,GAAG;AAC/B,cAAM,IAAI,MAAM,8DAA8D;AAAA,MAChF;AAEA,YAAM,qBAAoC;AAAA,QACxC,MAAM;AAAA,QACN;AAAA,QACA,YAAY;AAAA,QACZ,YAAY;AAAA,MACd;AACA,WAAK,KAAK,YAAY,kBAAkB;AAExC,kBAAY,MAAM,KAAK,aAAa,eAAe,WAAW,UAAU,kBAAkB;AAAA,IAC5F;AAEA,WAAO;AAAA,MACL;AAAA,MACA,GAAI,WAAW,UAAU,UAAa,EAAE,OAAO,WAAW,MAAM;AAAA,MAChE,UAAU,WAAW;AAAA,MACrB,GAAI,cAAc,UAAa,EAAE,UAAU;AAAA,IAC7C;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,UAAU,KAAa,cAAuB,OAAwB;AAClF,UAAM,YAAY,KAAK,IAAI;AAC3B,IAAAA,QAAO,MAAM,EAAE,KAAK,YAAY,GAAG,eAAe;AAElD,QAAI,aAAa;AACf,UAAI;AACF,cAAM,SAAS,MAAM,KAAK,aAAa,cAAc,GAAG;AACxD,cAAM,aAAa,KAAK,IAAI,IAAI;AAChC,QAAAA,QAAO;AAAA,UACL;AAAA,YACE;AAAA,YACA,aAAa;AAAA,YACb;AAAA,YACA,GAAG,iBAAiB,OAAO,MAAM,YAAY,GAAG;AAAA,UAClD;AAAA,UACA;AAAA,QACF;AACA,eAAO,OAAO;AAAA,MAChB,SAAS,OAAO;AAEd,cAAM,IAAI;AAAA,UACR,0BAA0B,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,QAClF;AAAA,MACF;AAAA,IACF;AAGA,QAAI;AACF,YAAM,WAAW,MAAM,MAAM,IAAY,KAAK;AAAA,QAC5C,SAAS;AAAA,QACT,SAAS;AAAA,UACP,cAAc;AAAA,QAChB;AAAA,MACF,CAAC;AAED,YAAM,aAAa,KAAK,IAAI,IAAI;AAChC,MAAAA,QAAO;AAAA,QACL;AAAA,UACE;AAAA,UACA,aAAa;AAAA,UACb;AAAA,UACA,GAAG,iBAAiB,SAAS,MAAM,YAAY,GAAG;AAAA,QACpD;AAAA,QACA;AAAA,MACF;AAEA,aAAO,SAAS;AAAA,IAClB,SAAS,OAAO;AACd,MAAAA,QAAO;AAAA,QACL,EAAE,KAAK,OAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,EAAE;AAAA,QACrE;AAAA,MACF;AACA,YAAM,IAAI;AAAA,QACR,mBAAmB,GAAG,KAAK,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,MACnF;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,aAAa,KAAa,cAAuB,OAA0B;AACvF,QAAI;AAEF,UAAI,aAAa;AACf,cAAMC,UAAS,MAAM,KAAK,aAAa,cAAc,GAAG;AAExD,eAAOA,QAAO,MAAM,IAAI,CAAC,SAA+B;AACtD,cAAI,OAAO,SAAS,SAAU,QAAO;AACrC,iBAAO,KAAK;AAAA,QACd,CAAC;AAAA,MACH;AAEA,YAAM,SAAS,MAAM,KAAK,aAAa,MAAM,GAAG;AAIhD,YAAM,YAAY,OAAO,QAAQ,CAAC;AAClC,UAAI,CAAC,WAAW;AACd,cAAM,IAAI,MAAM,wCAAwC,GAAG,uBAAuB;AAAA,MACpF;AAEA,aAAO,UAAU;AAAA,IACnB,SAAS,OAAgB;AAEvB,YAAM,eAAe,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAC1E,MAAAD,QAAO,MAAM,EAAE,KAAK,OAAO,aAAa,GAAG,yBAAyB;AAGpE,YAAM,IAAI,MAAM,8BAA8B,GAAG,KAAK,YAAY,EAAE;AAAA,IACtE;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAa,MAAc,MAAuB;AACxD,QAAI;AACF,YAAM,UAAU,IAAI,IAAI,IAAI,EAAE,SAAS,YAAY;AACnD,YAAM,UAAU,IAAI,IAAI,IAAI,EAAE,SAAS,YAAY;AACnD,aACE,YAAY,WAAW,QAAQ,SAAS,IAAI,OAAO,EAAE,KAAK,QAAQ,SAAS,IAAI,OAAO,EAAE;AAAA,IAE5F,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,OAAsB;AAC1B,SAAK,UAAU;AACf,UAAM,KAAK,aAAa,KAAK;AAAA,EAC/B;AACF;","names":["logger","result"]}
1
+ {"version":3,"sources":["../src/crawl/intelligent-crawler.ts","../src/crawl/article-converter.ts","../src/crawl/markdown-utils.ts","../src/crawl/claude-client.ts"],"sourcesContent":["/**\n * Intelligent web crawler with natural language control\n * Two modes: Intelligent (Claude-driven) and Simple (BFS)\n */\n\nimport { EventEmitter } from 'node:events';\nimport axios from 'axios';\nimport { convertHtmlToMarkdown } from './article-converter.js';\nimport { PythonBridge, type CrawledLink } from './bridge.js';\nimport { ClaudeClient, type CrawlStrategy } from './claude-client.js';\nimport { createLogger, summarizePayload } from '../logging/index.js';\n\nconst logger = createLogger('crawler');\n\nexport interface CrawlOptions {\n crawlInstruction?: string; // Natural language: what to crawl\n extractInstruction?: string; // Natural language: what to extract\n maxPages?: number; // Max pages to crawl (default: 50)\n timeout?: number; // Per-page timeout in ms (default: 30000)\n simple?: boolean; // Force simple BFS mode\n useHeadless?: boolean; // Enable headless browser for JavaScript-rendered sites\n}\n\nexport interface CrawlResult {\n url: string;\n title?: string;\n markdown: string;\n extracted?: string;\n depth?: number;\n}\n\nexport interface CrawlProgress {\n type: 'start' | 'strategy' | 'page' | 'extraction' | 'complete' | 'error';\n pagesVisited: number;\n totalPages: number;\n currentUrl?: string;\n message?: string;\n error?: Error;\n}\n\n/**\n * Intelligent crawler that uses Claude CLI for strategy and extraction\n */\nexport class IntelligentCrawler extends EventEmitter {\n private readonly claudeClient: ClaudeClient;\n private readonly pythonBridge: PythonBridge;\n private readonly visited: Set<string>;\n private stopped: boolean;\n\n constructor() {\n super();\n this.claudeClient = new ClaudeClient();\n this.pythonBridge = new PythonBridge();\n this.visited = new Set();\n this.stopped = false;\n }\n\n /**\n * Crawl a website with intelligent or simple mode\n */\n async *crawl(seedUrl: string, options: CrawlOptions = {}): AsyncIterable<CrawlResult> {\n const { crawlInstruction, extractInstruction, maxPages = 50, simple = false } = options;\n\n this.visited.clear();\n this.stopped = false;\n\n logger.info(\n {\n seedUrl,\n maxPages,\n mode: simple\n ? 'simple'\n : crawlInstruction !== undefined && crawlInstruction !== ''\n ? 'intelligent'\n : 'simple',\n hasExtractInstruction: extractInstruction !== undefined,\n },\n 'Starting crawl'\n );\n\n const startProgress: CrawlProgress = {\n type: 'start',\n pagesVisited: 0,\n totalPages: maxPages,\n };\n this.emit('progress', startProgress);\n\n // Determine mode: intelligent (with crawl instruction) or simple (BFS)\n const useIntelligentMode = !simple && crawlInstruction !== undefined && crawlInstruction !== '';\n\n if (useIntelligentMode) {\n // TypeScript knows crawlInstruction is defined here due to useIntelligentMode check\n yield* this.crawlIntelligent(\n seedUrl,\n crawlInstruction,\n extractInstruction,\n maxPages,\n options.useHeadless ?? false\n );\n } else {\n yield* this.crawlSimple(seedUrl, extractInstruction, maxPages, options.useHeadless ?? false);\n }\n\n logger.info(\n {\n seedUrl,\n pagesVisited: this.visited.size,\n },\n 'Crawl complete'\n );\n\n // Warn if crawl discovered far fewer pages than requested\n if (this.visited.size === 1 && maxPages > 1) {\n const warningProgress: CrawlProgress = {\n type: 'error',\n pagesVisited: this.visited.size,\n totalPages: maxPages,\n message: `Warning: Only crawled 1 page despite maxPages=${String(maxPages)}. Link discovery may have failed. If using --fast mode, try without it for JavaScript-heavy sites.`,\n error: new Error('Low page discovery'),\n };\n this.emit('progress', warningProgress);\n }\n\n const completeProgress: CrawlProgress = {\n type: 'complete',\n pagesVisited: this.visited.size,\n totalPages: this.visited.size,\n };\n this.emit('progress', completeProgress);\n }\n\n /**\n * Intelligent mode: Use Claude to determine which URLs to crawl\n */\n private async *crawlIntelligent(\n seedUrl: string,\n crawlInstruction: string,\n extractInstruction: string | undefined,\n maxPages: number,\n useHeadless: boolean = false\n ): AsyncIterable<CrawlResult> {\n // Check if Claude CLI is available before attempting intelligent mode\n if (!ClaudeClient.isAvailable()) {\n throw new Error('Claude CLI not available: install Claude Code for intelligent crawling');\n }\n\n let strategy: CrawlStrategy;\n\n try {\n // Step 1: Fetch seed page HTML\n const strategyStartProgress: CrawlProgress = {\n type: 'strategy',\n pagesVisited: 0,\n totalPages: maxPages,\n currentUrl: seedUrl,\n message: 'Analyzing page structure with Claude...',\n };\n this.emit('progress', strategyStartProgress);\n\n const seedHtml = await this.fetchHtml(seedUrl, useHeadless);\n\n // Step 2: Ask Claude which URLs to crawl (pass seedUrl for relative URL resolution)\n strategy = await this.claudeClient.determineCrawlUrls(seedUrl, seedHtml, crawlInstruction);\n\n const strategyCompleteProgress: CrawlProgress = {\n type: 'strategy',\n pagesVisited: 0,\n totalPages: maxPages,\n message: `Claude identified ${String(strategy.urls.length)} URLs to crawl: ${strategy.reasoning}`,\n };\n this.emit('progress', strategyCompleteProgress);\n } catch (error) {\n // Re-throw strategy errors - do not fall back silently\n throw error instanceof Error ? error : new Error(String(error));\n }\n\n // Step 3: Crawl each URL from Claude's strategy\n let pagesVisited = 0;\n\n for (const url of strategy.urls) {\n if (this.stopped || pagesVisited >= maxPages) break;\n if (this.visited.has(url)) continue;\n\n try {\n const result = await this.crawlSinglePage(\n url,\n extractInstruction,\n pagesVisited,\n useHeadless\n );\n pagesVisited++;\n yield result;\n } catch (error) {\n const pageErrorProgress: CrawlProgress = {\n type: 'error',\n pagesVisited,\n totalPages: maxPages,\n currentUrl: url,\n error: error instanceof Error ? error : new Error(String(error)),\n };\n this.emit('progress', pageErrorProgress);\n }\n }\n }\n\n /**\n * Simple mode: BFS crawling with depth limit\n */\n private async *crawlSimple(\n seedUrl: string,\n extractInstruction: string | undefined,\n maxPages: number,\n useHeadless: boolean = false\n ): AsyncIterable<CrawlResult> {\n const queue: Array<{ url: string; depth: number }> = [{ url: seedUrl, depth: 0 }];\n const maxDepth = 2; // Default depth limit for simple mode\n let pagesVisited = 0;\n\n while (queue.length > 0 && pagesVisited < maxPages && !this.stopped) {\n const current = queue.shift();\n\n if (!current || this.visited.has(current.url) || current.depth > maxDepth) {\n continue;\n }\n\n try {\n const result = await this.crawlSinglePage(\n current.url,\n extractInstruction,\n pagesVisited,\n useHeadless\n );\n result.depth = current.depth;\n pagesVisited++;\n\n yield result;\n\n // Add links to queue if we haven't reached max depth\n if (current.depth < maxDepth) {\n try {\n const links = await this.extractLinks(current.url, useHeadless);\n\n if (links.length === 0) {\n logger.debug({ url: current.url }, 'No links found - page may be a leaf node');\n } else {\n logger.debug(\n { url: current.url, linkCount: links.length },\n 'Links extracted from page'\n );\n }\n\n for (const link of links) {\n if (!this.visited.has(link) && this.isSameDomain(seedUrl, link)) {\n queue.push({ url: link, depth: current.depth + 1 });\n }\n }\n } catch (error) {\n // Log link extraction failure but continue crawling other pages\n const errorProgress: CrawlProgress = {\n type: 'error',\n pagesVisited,\n totalPages: maxPages,\n currentUrl: current.url,\n message: `Failed to extract links from ${current.url}`,\n error: error instanceof Error ? error : new Error(String(error)),\n };\n this.emit('progress', errorProgress);\n }\n }\n } catch (error) {\n const errorObj = error instanceof Error ? error : new Error(String(error));\n\n // Re-throw non-recoverable errors (extraction failures, Claude CLI not available, headless failures)\n // These represent failures in user-requested functionality that should not be silently skipped\n if (\n errorObj.message.includes('Extraction failed') ||\n errorObj.message.includes('Claude CLI not available') ||\n errorObj.message.includes('Headless fetch failed')\n ) {\n throw errorObj;\n }\n\n // For recoverable errors (page fetch failures), emit progress and continue\n const simpleErrorProgress: CrawlProgress = {\n type: 'error',\n pagesVisited,\n totalPages: maxPages,\n currentUrl: current.url,\n error: errorObj,\n };\n this.emit('progress', simpleErrorProgress);\n }\n }\n }\n\n /**\n * Crawl a single page: fetch, convert to markdown, optionally extract\n */\n private async crawlSinglePage(\n url: string,\n extractInstruction: string | undefined,\n pagesVisited: number,\n useHeadless: boolean = false\n ): Promise<CrawlResult> {\n const pageProgress: CrawlProgress = {\n type: 'page',\n pagesVisited,\n totalPages: 0,\n currentUrl: url,\n };\n this.emit('progress', pageProgress);\n\n // Mark as visited\n this.visited.add(url);\n\n // Fetch HTML\n const html = await this.fetchHtml(url, useHeadless);\n\n // Convert to clean markdown using slurp-ai techniques\n // Note: convertHtmlToMarkdown throws on errors, no need to check success\n const conversion = await convertHtmlToMarkdown(html, url);\n\n logger.debug(\n {\n url,\n title: conversion.title,\n markdownLength: conversion.markdown.length,\n },\n 'Article converted to markdown'\n );\n\n let extracted: string | undefined;\n\n // Optional: Extract specific information using Claude\n if (extractInstruction !== undefined && extractInstruction !== '') {\n // Throw if extraction requested but Claude CLI isn't available\n if (!ClaudeClient.isAvailable()) {\n throw new Error('Claude CLI not available: install Claude Code for extraction');\n }\n\n const extractionProgress: CrawlProgress = {\n type: 'extraction',\n pagesVisited,\n totalPages: 0,\n currentUrl: url,\n };\n this.emit('progress', extractionProgress);\n\n extracted = await this.claudeClient.extractContent(conversion.markdown, extractInstruction);\n }\n\n return {\n url,\n ...(conversion.title !== undefined && { title: conversion.title }),\n markdown: conversion.markdown,\n ...(extracted !== undefined && { extracted }),\n };\n }\n\n /**\n * Fetch HTML content from a URL\n */\n private async fetchHtml(url: string, useHeadless: boolean = false): Promise<string> {\n const startTime = Date.now();\n logger.debug({ url, useHeadless }, 'Fetching HTML');\n\n if (useHeadless) {\n try {\n const result = await this.pythonBridge.fetchHeadless(url);\n const durationMs = Date.now() - startTime;\n logger.info(\n {\n url,\n useHeadless: true,\n durationMs,\n ...summarizePayload(result.html, 'raw-html', url),\n },\n 'Raw HTML fetched'\n );\n return result.html;\n } catch (error) {\n // Wrap with distinctive message so crawlSimple knows not to recover\n throw new Error(\n `Headless fetch failed: ${error instanceof Error ? error.message : String(error)}`\n );\n }\n }\n\n // Original axios implementation for static sites\n try {\n const response = await axios.get<string>(url, {\n timeout: 30000,\n headers: {\n 'User-Agent': 'Mozilla/5.0 (compatible; bluera-knowledge-crawler/1.0)',\n },\n });\n\n const durationMs = Date.now() - startTime;\n logger.info(\n {\n url,\n useHeadless: false,\n durationMs,\n ...summarizePayload(response.data, 'raw-html', url),\n },\n 'Raw HTML fetched'\n );\n\n return response.data;\n } catch (error) {\n logger.error(\n { url, error: error instanceof Error ? error.message : String(error) },\n 'Failed to fetch HTML'\n );\n throw new Error(\n `Failed to fetch ${url}: ${error instanceof Error ? error.message : String(error)}`\n );\n }\n }\n\n /**\n * Extract links from a page using Python bridge\n */\n private async extractLinks(url: string, useHeadless: boolean = false): Promise<string[]> {\n try {\n // Use headless mode for link extraction if enabled\n if (useHeadless) {\n const result = await this.pythonBridge.fetchHeadless(url);\n // Extract href strings from link objects (crawl4ai returns objects, not strings)\n return result.links.map((link: CrawledLink | string) => {\n if (typeof link === 'string') return link;\n return link.href;\n });\n }\n\n const result = await this.pythonBridge.crawl(url);\n\n // Validate response structure (handle potential runtime type mismatches)\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition -- TypeScript types claim pages exists but Python bridge may return invalid structure at runtime\n const firstPage = result.pages?.[0];\n if (!firstPage) {\n throw new Error(`Invalid crawl response structure for ${url}: missing pages array`);\n }\n\n return firstPage.links;\n } catch (error: unknown) {\n // Log the error for debugging\n const errorMessage = error instanceof Error ? error.message : String(error);\n logger.error({ url, error: errorMessage }, 'Failed to extract links');\n\n // Re-throw the error instead of silently swallowing it\n throw new Error(`Link extraction failed for ${url}: ${errorMessage}`);\n }\n }\n\n /**\n * Check if two URLs are from the same domain\n */\n private isSameDomain(url1: string, url2: string): boolean {\n try {\n const domain1 = new URL(url1).hostname.toLowerCase();\n const domain2 = new URL(url2).hostname.toLowerCase();\n return (\n domain1 === domain2 || domain1.endsWith(`.${domain2}`) || domain2.endsWith(`.${domain1}`)\n );\n } catch {\n return false;\n }\n }\n\n /**\n * Stop the crawler\n */\n async stop(): Promise<void> {\n this.stopped = true;\n await this.pythonBridge.stop();\n }\n}\n","/**\n * Article converter using @extractus/article-extractor and Turndown\n * Produces clean markdown from HTML using slurp-ai techniques\n */\n\nimport { extractFromHtml } from '@extractus/article-extractor';\nimport TurndownService from 'turndown';\nimport { gfm } from 'turndown-plugin-gfm';\nimport { preprocessHtmlForCodeBlocks, cleanupMarkdown } from './markdown-utils.js';\nimport { createLogger, truncateForLog } from '../logging/index.js';\n\nconst logger = createLogger('article-converter');\n\nexport interface ConversionResult {\n markdown: string;\n title?: string;\n}\n\n/**\n * Convert HTML to clean markdown using best practices from slurp-ai\n *\n * Pipeline:\n * 1. Extract main article content (strips navigation, ads, boilerplate)\n * 2. Preprocess HTML (handle MkDocs code blocks)\n * 3. Convert to markdown with Turndown + GFM\n * 4. Cleanup markdown (regex patterns)\n */\nexport async function convertHtmlToMarkdown(html: string, url: string): Promise<ConversionResult> {\n logger.debug({ url, htmlLength: html.length }, 'Starting HTML conversion');\n\n try {\n // Step 1: Extract main article content\n let articleHtml: string;\n let title: string | undefined;\n\n try {\n const article = await extractFromHtml(html, url);\n if (article?.content !== undefined && article.content !== '') {\n articleHtml = article.content;\n title = article.title !== undefined && article.title !== '' ? article.title : undefined;\n logger.debug(\n {\n url,\n title,\n extractedLength: articleHtml.length,\n usedFullHtml: false,\n },\n 'Article content extracted'\n );\n } else {\n // Fallback to full HTML if extraction fails\n articleHtml = html;\n logger.debug(\n { url, usedFullHtml: true },\n 'Article extraction returned empty, using full HTML'\n );\n }\n } catch (extractError) {\n // Fallback to full HTML if extraction fails\n articleHtml = html;\n logger.debug(\n {\n url,\n usedFullHtml: true,\n error: extractError instanceof Error ? extractError.message : String(extractError),\n },\n 'Article extraction failed, using full HTML'\n );\n }\n\n // Step 2: Preprocess HTML for code blocks\n const preprocessed = preprocessHtmlForCodeBlocks(articleHtml);\n\n // Step 3: Configure Turndown with custom rules\n const turndownService = new TurndownService({\n headingStyle: 'atx', // Use # style headings\n codeBlockStyle: 'fenced', // Use ``` style code blocks\n fence: '```',\n emDelimiter: '*',\n strongDelimiter: '**',\n linkStyle: 'inlined',\n });\n\n // Add GitHub Flavored Markdown support (tables, strikethrough, task lists)\n turndownService.use(gfm);\n\n // Custom rule for headings with anchors (from slurp-ai)\n turndownService.addRule('headingsWithAnchors', {\n filter: ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'],\n replacement(content: string, node: HTMLElement): string {\n const level = Number(node.nodeName.charAt(1));\n const hashes = '#'.repeat(level);\n const cleanContent = content\n .replace(/\\[\\]\\([^)]*\\)/g, '') // Remove empty links\n .replace(/\\s+/g, ' ') // Normalize whitespace\n .trim();\n return cleanContent !== '' ? `\\n\\n${hashes} ${cleanContent}\\n\\n` : '';\n },\n });\n\n // Convert to markdown\n const rawMarkdown = turndownService.turndown(preprocessed);\n\n // Step 4: Cleanup markdown with comprehensive regex patterns\n const markdown = cleanupMarkdown(rawMarkdown);\n\n logger.debug(\n {\n url,\n title,\n rawMarkdownLength: rawMarkdown.length,\n finalMarkdownLength: markdown.length,\n },\n 'HTML to markdown conversion complete'\n );\n\n // Log markdown preview at trace level\n logger.trace(\n {\n url,\n markdownPreview: truncateForLog(markdown, 1000),\n },\n 'Markdown content preview'\n );\n\n return {\n markdown,\n ...(title !== undefined && { title }),\n };\n } catch (error) {\n logger.error(\n {\n url,\n error: error instanceof Error ? error.message : String(error),\n },\n 'HTML to markdown conversion failed'\n );\n\n // Re-throw errors - do not return graceful degradation\n throw error instanceof Error ? error : new Error(String(error));\n }\n}\n","/**\n * Markdown conversion utilities ported from slurp-ai\n * Source: https://github.com/ratacat/slurp-ai\n *\n * These utilities handle complex documentation site patterns (MkDocs, Sphinx, etc.)\n * and produce clean, well-formatted markdown.\n */\n\nimport * as cheerio from 'cheerio';\n\n/**\n * Detect language from code element class names.\n * Handles various class naming patterns from different highlighters.\n */\nfunction detectLanguageFromClass(className: string | undefined): string {\n if (className === undefined || className === '') return '';\n\n // Common patterns: \"language-python\", \"lang-js\", \"highlight-python\", \"python\", \"hljs language-python\"\n const patterns = [\n /language-(\\w+)/i,\n /lang-(\\w+)/i,\n /highlight-(\\w+)/i,\n /hljs\\s+(\\w+)/i,\n /^(\\w+)$/i,\n ];\n\n for (const pattern of patterns) {\n const match = className.match(pattern);\n if (match?.[1] !== undefined) {\n const lang = match[1].toLowerCase();\n // Filter out common non-language classes\n if (!['hljs', 'highlight', 'code', 'pre', 'block', 'inline'].includes(lang)) {\n return lang;\n }\n }\n }\n\n return '';\n}\n\n/**\n * Escape HTML special characters for safe embedding in HTML.\n */\nfunction escapeHtml(text: string): string {\n return text\n .replace(/&/g, '&amp;')\n .replace(/</g, '&lt;')\n .replace(/>/g, '&gt;')\n .replace(/\"/g, '&quot;')\n .replace(/'/g, '&#039;');\n}\n\n/**\n * Preprocess HTML to handle MkDocs/Material theme code blocks.\n *\n * MkDocs wraps code in tables for line numbers:\n * <table><tbody><tr><td>line numbers</td><td><pre><code>code</code></pre></td></tr></tbody></table>\n *\n * This function converts them to standard <pre><code> blocks that Turndown handles correctly.\n * Also strips syntax highlighting spans and empty anchors from code.\n */\nexport function preprocessHtmlForCodeBlocks(html: string): string {\n if (!html || typeof html !== 'string') return html;\n\n const $ = cheerio.load(html);\n\n // Handle MkDocs/Material table-wrapped code blocks\n $('table').each((_i, table) => {\n const $table = $(table);\n\n // Check if this table contains a code block\n const $codeCell = $table.find('td pre code, td div pre code');\n\n if ($codeCell.length > 0) {\n // This is a code block table - extract the code\n const $pre = $codeCell.closest('pre');\n const $code = $codeCell.first();\n\n // Get language from class\n let language = detectLanguageFromClass($code.attr('class'));\n if (!language) {\n language = detectLanguageFromClass($pre.attr('class'));\n }\n\n // Get the text content, stripping all inner HTML tags\n const codeText = $code.text();\n\n // Create a clean pre > code block\n const cleanPre = `<pre><code class=\"language-${language}\">${escapeHtml(codeText)}</code></pre>`;\n\n // Replace the entire table with the clean code block\n $table.replaceWith(cleanPre);\n }\n });\n\n // Strip empty anchor tags used for line numbers\n $('pre a, code a').each((_i, anchor) => {\n const $anchor = $(anchor);\n if (!$anchor.text().trim()) {\n $anchor.remove();\n }\n });\n\n // Strip syntax highlighting spans inside code blocks, keeping only text\n $('pre span, code span').each((_i, span) => {\n const $span = $(span);\n $span.replaceWith($span.text());\n });\n\n // Handle standalone pre blocks that might have spans/anchors\n $('pre').each((_i, pre) => {\n const $pre = $(pre);\n // If this pre has a code child, it was already processed\n if ($pre.find('code').length === 0) {\n // Direct pre without code - get text content\n const text = $pre.text();\n const lang = detectLanguageFromClass($pre.attr('class'));\n $pre.html(`<code class=\"language-${lang}\">${escapeHtml(text)}</code>`);\n }\n });\n\n return $.html();\n}\n\n/**\n * Apply comprehensive cleanup rules to markdown content.\n *\n * Formatting rules:\n * - Double newlines between paragraphs and headings\n * - Double newlines before lists when preceded by normal text\n * - Single newlines between list items\n * - No blank lines inside code blocks\n */\nexport function cleanupMarkdown(markdown: string): string {\n if (!markdown) return '';\n\n const trimmed = markdown.trim();\n if (trimmed === '') return '';\n\n let result = trimmed;\n\n // 0. Fix broken headings where ## is on its own line followed by the text\n // Pattern: \"## \\n\\nSome text\" → \"## Some text\"\n result = result.replace(/^(#{1,6})\\s*\\n\\n+(\\S[^\\n]*)/gm, '$1 $2');\n\n // 0.5. Normalize multiple spaces after heading markers to single space\n // Pattern: \"## Subtitle\" → \"## Subtitle\"\n result = result.replace(/(#{1,6})\\s{2,}/g, '$1 ');\n\n // 1. Fix navigation links with excessive whitespace\n result = result.replace(/\\*\\s+\\[\\s*([^\\n]+?)\\s*\\]\\(([^)]+)\\)/g, '* [$1]($2)');\n\n // 2. Handle headings with specific newline requirements\n\n // Text followed by heading should have a single newline between them (no blank line)\n result = result.replace(/([^\\n])\\n\\n+(#\\s)/g, '$1\\n$2');\n\n // Add double newlines between text and next heading\n result = result.replace(/(Some text\\.)\\n(##\\s)/g, '$1\\n\\n$2');\n\n // Double newlines after a heading when followed by text\n result = result.replace(/(#{1,6}\\s[^\\n]+)\\n([^#\\n])/g, '$1\\n\\n$2');\n\n // Double newlines between headings\n result = result.replace(/(#{1,6}\\s[^\\n]+)\\n(#{1,6}\\s)/g, '$1\\n\\n$2');\n\n // 3. Lists - ensure all list items have single newlines only\n result = result.replace(/(\\* Item 1)\\n\\n+(\\* Item 2)\\n\\n+(\\* Item 3)/g, '$1\\n$2\\n$3');\n\n // 3.5. General list item spacing - ensure single newlines between list items\n result = result.replace(/(^\\*\\s[^\\n]+)\\n{2,}(^\\*\\s)/gm, '$1\\n$2');\n\n // 4. Clean up excessive blank lines (3+ newlines → 2 newlines)\n result = result.replace(/\\n{3,}/g, '\\n\\n');\n\n // 5. Code blocks - no blank lines after opening or before closing backticks\n result = result.replace(/(```[^\\n]*)\\n\\n+/g, '$1\\n');\n result = result.replace(/\\n\\n+```/g, '\\n```');\n\n // 6. Remove empty list items\n result = result.replace(/\\*\\s*\\n\\s*\\*/g, '*');\n\n // 7. Strip any remaining HTML tags that leaked through (common in MkDocs/Material)\n // Remove table structure tags\n result = result.replace(/<\\/?table[^>]*>/gi, '');\n result = result.replace(/<\\/?tbody[^>]*>/gi, '');\n result = result.replace(/<\\/?thead[^>]*>/gi, '');\n result = result.replace(/<\\/?tr[^>]*>/gi, '');\n result = result.replace(/<\\/?td[^>]*>/gi, '');\n result = result.replace(/<\\/?th[^>]*>/gi, '');\n\n // Remove empty anchor tags: <a></a> or <a id=\"...\"></a>\n result = result.replace(/<a[^>]*><\\/a>/gi, '');\n\n // Remove span tags (syntax highlighting remnants)\n result = result.replace(/<\\/?span[^>]*>/gi, '');\n\n // Remove div tags\n result = result.replace(/<\\/?div[^>]*>/gi, '');\n\n // Remove pre/code tags that leaked\n result = result.replace(/<\\/?pre[^>]*>/gi, '');\n result = result.replace(/<\\/?code[^>]*>/gi, '');\n\n // 8. Remove empty markdown links: [](url) and []()\n result = result.replace(/\\[\\]\\([^)]*\\)/g, '');\n\n // 9. Remove codelineno references that leaked into content\n // Pattern: [](_file.md#__codelineno-N-M)\n result = result.replace(/\\[\\]\\([^)]*#__codelineno-[^)]+\\)/g, '');\n\n // Also clean inline codelineno patterns\n result = result.replace(/\\[?\\]?\\([^)]*#__codelineno-[^)]*\\)/g, '');\n\n // 10. Clean up any double-escaped HTML entities that might result\n result = result.replace(/&amp;lt;/g, '&lt;');\n result = result.replace(/&amp;gt;/g, '&gt;');\n result = result.replace(/&amp;amp;/g, '&amp;');\n\n // 11. Final cleanup - normalize excessive whitespace from removed tags\n result = result.replace(/\\n{3,}/g, '\\n\\n');\n result = result.replace(/[ \\t]+\\n/g, '\\n');\n\n return result;\n}\n","/**\n * Claude CLI client for intelligent crawling and extraction\n * Uses `claude -p` programmatically to analyze page structure and extract content\n */\n\nimport { spawn, execSync } from 'node:child_process';\nimport { existsSync } from 'node:fs';\nimport { homedir } from 'node:os';\nimport { join } from 'node:path';\n\n/**\n * Schema for crawl strategy response from Claude\n */\nexport interface CrawlStrategy {\n urls: string[];\n reasoning: string;\n}\n\nconst CRAWL_STRATEGY_SCHEMA = {\n type: 'object',\n properties: {\n urls: {\n type: 'array',\n items: { type: 'string' },\n description: 'List of URLs to crawl based on the instruction',\n },\n reasoning: {\n type: 'string',\n description: 'Brief explanation of why these URLs were selected',\n },\n },\n required: ['urls', 'reasoning'],\n};\n\n/**\n * Client for interacting with Claude Code CLI\n */\nexport class ClaudeClient {\n private readonly timeout: number;\n private static availabilityChecked = false;\n private static available = false;\n private static claudePath: string | null = null;\n\n /**\n * Get the path to the Claude CLI binary\n * Checks in order:\n * 1. CLAUDE_BIN environment variable (explicit override)\n * 2. ~/.claude/local/claude (newer installation location)\n * 3. ~/.local/bin/claude (standard installation location)\n * 4. 'claude' in PATH (custom installations)\n */\n static getClaudePath(): string | null {\n // Check environment variable override\n const envPath = process.env['CLAUDE_BIN'];\n if (envPath !== undefined && envPath !== '' && existsSync(envPath)) {\n return envPath;\n }\n\n // Check ~/.claude/local/claude (newer location)\n const claudeLocalPath = join(homedir(), '.claude', 'local', 'claude');\n if (existsSync(claudeLocalPath)) {\n return claudeLocalPath;\n }\n\n // Check ~/.local/bin/claude (standard location)\n const localBinPath = join(homedir(), '.local', 'bin', 'claude');\n if (existsSync(localBinPath)) {\n return localBinPath;\n }\n\n // Check if 'claude' is in PATH (custom installations, uses 'command -v' which handles aliases)\n try {\n const result = execSync('command -v claude', { stdio: ['pipe', 'pipe', 'ignore'] });\n const path = result.toString().trim();\n if (path) {\n return path;\n }\n } catch {\n // Not in PATH\n }\n\n return null;\n }\n\n /**\n * Check if Claude CLI is available\n * Result is cached after first check for performance\n */\n static isAvailable(): boolean {\n if (!ClaudeClient.availabilityChecked) {\n ClaudeClient.claudePath = ClaudeClient.getClaudePath();\n ClaudeClient.available = ClaudeClient.claudePath !== null;\n ClaudeClient.availabilityChecked = true;\n }\n return ClaudeClient.available;\n }\n\n /**\n * Get the cached Claude path (call isAvailable first)\n */\n static getCachedPath(): string | null {\n return ClaudeClient.claudePath;\n }\n\n /**\n * Reset availability cache (for testing)\n */\n static resetAvailabilityCache(): void {\n ClaudeClient.availabilityChecked = false;\n ClaudeClient.available = false;\n }\n\n constructor(options: { timeout?: number } = {}) {\n this.timeout = options.timeout ?? 30000; // 30s default\n }\n\n /**\n * Determine which URLs to crawl based on natural language instruction\n *\n * @param seedUrl - The URL of the seed page (for resolving relative URLs)\n * @param seedHtml - HTML content of the seed page\n * @param instruction - Natural language crawl instruction (e.g., \"scrape all Getting Started pages\")\n * @returns List of URLs to crawl with reasoning\n */\n async determineCrawlUrls(\n seedUrl: string,\n seedHtml: string,\n instruction: string\n ): Promise<CrawlStrategy> {\n const prompt = `You are analyzing a webpage to determine which pages to crawl based on the user's instruction.\n\nBase URL: ${seedUrl}\n\nInstruction: ${instruction}\n\nWebpage HTML (analyze the navigation structure, links, and content):\n${this.truncateHtml(seedHtml, 50000)}\n\nBased on the instruction, extract and return a list of absolute URLs that should be crawled. When you encounter relative URLs (starting with \"/\" or without a protocol), resolve them against the Base URL. For example, if Base URL is \"https://example.com/docs\" and you see href=\"/docs/hooks\", return \"https://example.com/docs/hooks\".\n\nLook for navigation menus, sidebars, headers, and link structures that match the instruction.\n\nReturn only URLs that are relevant to the instruction. If the instruction mentions specific sections (e.g., \"Getting Started\"), find links in those sections.`;\n\n try {\n const result = await this.callClaude(prompt, CRAWL_STRATEGY_SCHEMA);\n const rawParsed: unknown = JSON.parse(result);\n\n // Claude CLI with --json-schema returns wrapper: {type, result, structured_output: {...}}\n // Extract structured_output if present, otherwise use raw response\n const parsed = this.extractStructuredOutput(rawParsed);\n\n // Validate and narrow type\n if (\n typeof parsed !== 'object' ||\n parsed === null ||\n !('urls' in parsed) ||\n !('reasoning' in parsed) ||\n !Array.isArray(parsed.urls) ||\n parsed.urls.length === 0 ||\n typeof parsed.reasoning !== 'string' ||\n !parsed.urls.every((url) => typeof url === 'string')\n ) {\n throw new Error('Claude returned invalid crawl strategy');\n }\n\n // Type is now properly narrowed - urls is string[] after validation\n return { urls: parsed.urls, reasoning: parsed.reasoning };\n } catch (error) {\n throw new Error(\n `Failed to determine crawl strategy: ${error instanceof Error ? error.message : String(error)}`\n );\n }\n }\n\n /**\n * Extract specific information from markdown content using natural language\n *\n * @param markdown - Page content in markdown format\n * @param instruction - Natural language extraction instruction (e.g., \"extract pricing info\")\n * @returns Extracted information as text\n */\n async extractContent(markdown: string, instruction: string): Promise<string> {\n const prompt = `${instruction}\n\nContent to analyze:\n${this.truncateMarkdown(markdown, 100000)}`;\n\n try {\n const result = await this.callClaude(prompt);\n return result.trim();\n } catch (error) {\n throw new Error(\n `Failed to extract content: ${error instanceof Error ? error.message : String(error)}`\n );\n }\n }\n\n /**\n * Call Claude CLI with a prompt\n *\n * @param prompt - The prompt to send to Claude\n * @param jsonSchema - Optional JSON schema for structured output\n * @returns Claude's response as a string\n */\n private async callClaude(prompt: string, jsonSchema?: Record<string, unknown>): Promise<string> {\n return new Promise<string>((resolve, reject) => {\n // Ensure we have Claude path\n const claudePath = ClaudeClient.getCachedPath();\n if (claudePath === null) {\n reject(new Error('Claude CLI not available'));\n return;\n }\n\n const args = ['-p'];\n\n // Add JSON schema if provided\n if (jsonSchema) {\n args.push('--json-schema', JSON.stringify(jsonSchema));\n args.push('--output-format', 'json');\n }\n\n const proc = spawn(claudePath, args, {\n stdio: ['pipe', 'pipe', 'pipe'],\n cwd: process.cwd(),\n env: { ...process.env },\n });\n\n let stdout = '';\n let stderr = '';\n let timeoutId: NodeJS.Timeout | undefined;\n\n // Set timeout\n if (this.timeout > 0) {\n timeoutId = setTimeout(() => {\n proc.kill('SIGTERM');\n reject(new Error(`Claude CLI timed out after ${String(this.timeout)}ms`));\n }, this.timeout);\n }\n\n proc.stdout.on('data', (chunk: Buffer) => {\n stdout += chunk.toString();\n });\n\n proc.stderr.on('data', (chunk: Buffer) => {\n stderr += chunk.toString();\n });\n\n proc.on('close', (code: number | null) => {\n if (timeoutId !== undefined) {\n clearTimeout(timeoutId);\n }\n\n if (code === 0) {\n resolve(stdout.trim());\n } else {\n reject(\n new Error(`Claude CLI exited with code ${String(code)}${stderr ? `: ${stderr}` : ''}`)\n );\n }\n });\n\n proc.on('error', (err) => {\n if (timeoutId !== undefined) {\n clearTimeout(timeoutId);\n }\n reject(new Error(`Failed to spawn Claude CLI: ${err.message}`));\n });\n\n // Write prompt to stdin\n proc.stdin.write(prompt);\n proc.stdin.end();\n });\n }\n\n /**\n * Truncate HTML to a maximum length (keep important parts)\n */\n private truncateHtml(html: string, maxLength: number): string {\n if (html.length <= maxLength) return html;\n\n // Try to keep the beginning (usually has navigation)\n return `${html.substring(0, maxLength)}\\n\\n[... HTML truncated ...]`;\n }\n\n /**\n * Truncate markdown to a maximum length\n */\n private truncateMarkdown(markdown: string, maxLength: number): string {\n if (markdown.length <= maxLength) return markdown;\n\n return `${markdown.substring(0, maxLength)}\\n\\n[... content truncated ...]`;\n }\n\n /**\n * Type guard to check if value is a record (plain object)\n */\n private isRecord(value: unknown): value is Record<string, unknown> {\n return typeof value === 'object' && value !== null && !Array.isArray(value);\n }\n\n /**\n * Extract structured_output from Claude CLI wrapper format if present.\n * Claude CLI with --json-schema returns: {type, result, structured_output: {...}}\n * This method extracts the inner structured_output, or returns the raw value if not wrapped.\n */\n private extractStructuredOutput(rawParsed: unknown): unknown {\n if (this.isRecord(rawParsed) && 'structured_output' in rawParsed) {\n const structuredOutput = rawParsed['structured_output'];\n if (typeof structuredOutput === 'object') {\n return structuredOutput;\n }\n }\n return rawParsed;\n }\n}\n"],"mappings":";;;;;;;;AAKA,SAAS,oBAAoB;AAC7B,OAAO,WAAW;;;ACDlB,SAAS,uBAAuB;AAChC,OAAO,qBAAqB;AAC5B,SAAS,WAAW;;;ACCpB,YAAY,aAAa;AAMzB,SAAS,wBAAwB,WAAuC;AACtE,MAAI,cAAc,UAAa,cAAc,GAAI,QAAO;AAGxD,QAAM,WAAW;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAEA,aAAW,WAAW,UAAU;AAC9B,UAAM,QAAQ,UAAU,MAAM,OAAO;AACrC,QAAI,QAAQ,CAAC,MAAM,QAAW;AAC5B,YAAM,OAAO,MAAM,CAAC,EAAE,YAAY;AAElC,UAAI,CAAC,CAAC,QAAQ,aAAa,QAAQ,OAAO,SAAS,QAAQ,EAAE,SAAS,IAAI,GAAG;AAC3E,eAAO;AAAA,MACT;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAKA,SAAS,WAAW,MAAsB;AACxC,SAAO,KACJ,QAAQ,MAAM,OAAO,EACrB,QAAQ,MAAM,MAAM,EACpB,QAAQ,MAAM,MAAM,EACpB,QAAQ,MAAM,QAAQ,EACtB,QAAQ,MAAM,QAAQ;AAC3B;AAWO,SAAS,4BAA4B,MAAsB;AAChE,MAAI,CAAC,QAAQ,OAAO,SAAS,SAAU,QAAO;AAE9C,QAAM,IAAY,aAAK,IAAI;AAG3B,IAAE,OAAO,EAAE,KAAK,CAAC,IAAI,UAAU;AAC7B,UAAM,SAAS,EAAE,KAAK;AAGtB,UAAM,YAAY,OAAO,KAAK,8BAA8B;AAE5D,QAAI,UAAU,SAAS,GAAG;AAExB,YAAM,OAAO,UAAU,QAAQ,KAAK;AACpC,YAAM,QAAQ,UAAU,MAAM;AAG9B,UAAI,WAAW,wBAAwB,MAAM,KAAK,OAAO,CAAC;AAC1D,UAAI,CAAC,UAAU;AACb,mBAAW,wBAAwB,KAAK,KAAK,OAAO,CAAC;AAAA,MACvD;AAGA,YAAM,WAAW,MAAM,KAAK;AAG5B,YAAM,WAAW,8BAA8B,QAAQ,KAAK,WAAW,QAAQ,CAAC;AAGhF,aAAO,YAAY,QAAQ;AAAA,IAC7B;AAAA,EACF,CAAC;AAGD,IAAE,eAAe,EAAE,KAAK,CAAC,IAAI,WAAW;AACtC,UAAM,UAAU,EAAE,MAAM;AACxB,QAAI,CAAC,QAAQ,KAAK,EAAE,KAAK,GAAG;AAC1B,cAAQ,OAAO;AAAA,IACjB;AAAA,EACF,CAAC;AAGD,IAAE,qBAAqB,EAAE,KAAK,CAAC,IAAI,SAAS;AAC1C,UAAM,QAAQ,EAAE,IAAI;AACpB,UAAM,YAAY,MAAM,KAAK,CAAC;AAAA,EAChC,CAAC;AAGD,IAAE,KAAK,EAAE,KAAK,CAAC,IAAI,QAAQ;AACzB,UAAM,OAAO,EAAE,GAAG;AAElB,QAAI,KAAK,KAAK,MAAM,EAAE,WAAW,GAAG;AAElC,YAAM,OAAO,KAAK,KAAK;AACvB,YAAM,OAAO,wBAAwB,KAAK,KAAK,OAAO,CAAC;AACvD,WAAK,KAAK,yBAAyB,IAAI,KAAK,WAAW,IAAI,CAAC,SAAS;AAAA,IACvE;AAAA,EACF,CAAC;AAED,SAAO,EAAE,KAAK;AAChB;AAWO,SAAS,gBAAgB,UAA0B;AACxD,MAAI,CAAC,SAAU,QAAO;AAEtB,QAAM,UAAU,SAAS,KAAK;AAC9B,MAAI,YAAY,GAAI,QAAO;AAE3B,MAAI,SAAS;AAIb,WAAS,OAAO,QAAQ,iCAAiC,OAAO;AAIhE,WAAS,OAAO,QAAQ,mBAAmB,KAAK;AAGhD,WAAS,OAAO,QAAQ,wCAAwC,YAAY;AAK5E,WAAS,OAAO,QAAQ,sBAAsB,QAAQ;AAGtD,WAAS,OAAO,QAAQ,0BAA0B,UAAU;AAG5D,WAAS,OAAO,QAAQ,+BAA+B,UAAU;AAGjE,WAAS,OAAO,QAAQ,iCAAiC,UAAU;AAGnE,WAAS,OAAO,QAAQ,gDAAgD,YAAY;AAGpF,WAAS,OAAO,QAAQ,gCAAgC,QAAQ;AAGhE,WAAS,OAAO,QAAQ,WAAW,MAAM;AAGzC,WAAS,OAAO,QAAQ,qBAAqB,MAAM;AACnD,WAAS,OAAO,QAAQ,aAAa,OAAO;AAG5C,WAAS,OAAO,QAAQ,iBAAiB,GAAG;AAI5C,WAAS,OAAO,QAAQ,qBAAqB,EAAE;AAC/C,WAAS,OAAO,QAAQ,qBAAqB,EAAE;AAC/C,WAAS,OAAO,QAAQ,qBAAqB,EAAE;AAC/C,WAAS,OAAO,QAAQ,kBAAkB,EAAE;AAC5C,WAAS,OAAO,QAAQ,kBAAkB,EAAE;AAC5C,WAAS,OAAO,QAAQ,kBAAkB,EAAE;AAG5C,WAAS,OAAO,QAAQ,mBAAmB,EAAE;AAG7C,WAAS,OAAO,QAAQ,oBAAoB,EAAE;AAG9C,WAAS,OAAO,QAAQ,mBAAmB,EAAE;AAG7C,WAAS,OAAO,QAAQ,mBAAmB,EAAE;AAC7C,WAAS,OAAO,QAAQ,oBAAoB,EAAE;AAG9C,WAAS,OAAO,QAAQ,kBAAkB,EAAE;AAI5C,WAAS,OAAO,QAAQ,qCAAqC,EAAE;AAG/D,WAAS,OAAO,QAAQ,uCAAuC,EAAE;AAGjE,WAAS,OAAO,QAAQ,aAAa,MAAM;AAC3C,WAAS,OAAO,QAAQ,aAAa,MAAM;AAC3C,WAAS,OAAO,QAAQ,cAAc,OAAO;AAG7C,WAAS,OAAO,QAAQ,WAAW,MAAM;AACzC,WAAS,OAAO,QAAQ,aAAa,IAAI;AAEzC,SAAO;AACT;;;ADrNA,IAAM,SAAS,aAAa,mBAAmB;AAgB/C,eAAsB,sBAAsB,MAAc,KAAwC;AAChG,SAAO,MAAM,EAAE,KAAK,YAAY,KAAK,OAAO,GAAG,0BAA0B;AAEzE,MAAI;AAEF,QAAI;AACJ,QAAI;AAEJ,QAAI;AACF,YAAM,UAAU,MAAM,gBAAgB,MAAM,GAAG;AAC/C,UAAI,SAAS,YAAY,UAAa,QAAQ,YAAY,IAAI;AAC5D,sBAAc,QAAQ;AACtB,gBAAQ,QAAQ,UAAU,UAAa,QAAQ,UAAU,KAAK,QAAQ,QAAQ;AAC9E,eAAO;AAAA,UACL;AAAA,YACE;AAAA,YACA;AAAA,YACA,iBAAiB,YAAY;AAAA,YAC7B,cAAc;AAAA,UAChB;AAAA,UACA;AAAA,QACF;AAAA,MACF,OAAO;AAEL,sBAAc;AACd,eAAO;AAAA,UACL,EAAE,KAAK,cAAc,KAAK;AAAA,UAC1B;AAAA,QACF;AAAA,MACF;AAAA,IACF,SAAS,cAAc;AAErB,oBAAc;AACd,aAAO;AAAA,QACL;AAAA,UACE;AAAA,UACA,cAAc;AAAA,UACd,OAAO,wBAAwB,QAAQ,aAAa,UAAU,OAAO,YAAY;AAAA,QACnF;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAGA,UAAM,eAAe,4BAA4B,WAAW;AAG5D,UAAM,kBAAkB,IAAI,gBAAgB;AAAA,MAC1C,cAAc;AAAA;AAAA,MACd,gBAAgB;AAAA;AAAA,MAChB,OAAO;AAAA,MACP,aAAa;AAAA,MACb,iBAAiB;AAAA,MACjB,WAAW;AAAA,IACb,CAAC;AAGD,oBAAgB,IAAI,GAAG;AAGvB,oBAAgB,QAAQ,uBAAuB;AAAA,MAC7C,QAAQ,CAAC,MAAM,MAAM,MAAM,MAAM,MAAM,IAAI;AAAA,MAC3C,YAAY,SAAiB,MAA2B;AACtD,cAAM,QAAQ,OAAO,KAAK,SAAS,OAAO,CAAC,CAAC;AAC5C,cAAM,SAAS,IAAI,OAAO,KAAK;AAC/B,cAAM,eAAe,QAClB,QAAQ,kBAAkB,EAAE,EAC5B,QAAQ,QAAQ,GAAG,EACnB,KAAK;AACR,eAAO,iBAAiB,KAAK;AAAA;AAAA,EAAO,MAAM,IAAI,YAAY;AAAA;AAAA,IAAS;AAAA,MACrE;AAAA,IACF,CAAC;AAGD,UAAM,cAAc,gBAAgB,SAAS,YAAY;AAGzD,UAAM,WAAW,gBAAgB,WAAW;AAE5C,WAAO;AAAA,MACL;AAAA,QACE;AAAA,QACA;AAAA,QACA,mBAAmB,YAAY;AAAA,QAC/B,qBAAqB,SAAS;AAAA,MAChC;AAAA,MACA;AAAA,IACF;AAGA,WAAO;AAAA,MACL;AAAA,QACE;AAAA,QACA,iBAAiB,eAAe,UAAU,GAAI;AAAA,MAChD;AAAA,MACA;AAAA,IACF;AAEA,WAAO;AAAA,MACL;AAAA,MACA,GAAI,UAAU,UAAa,EAAE,MAAM;AAAA,IACrC;AAAA,EACF,SAAS,OAAO;AACd,WAAO;AAAA,MACL;AAAA,QACE;AAAA,QACA,OAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,MAC9D;AAAA,MACA;AAAA,IACF;AAGA,UAAM,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AAAA,EAChE;AACF;;;AExIA,SAAS,OAAO,gBAAgB;AAChC,SAAS,kBAAkB;AAC3B,SAAS,eAAe;AACxB,SAAS,YAAY;AAUrB,IAAM,wBAAwB;AAAA,EAC5B,MAAM;AAAA,EACN,YAAY;AAAA,IACV,MAAM;AAAA,MACJ,MAAM;AAAA,MACN,OAAO,EAAE,MAAM,SAAS;AAAA,MACxB,aAAa;AAAA,IACf;AAAA,IACA,WAAW;AAAA,MACT,MAAM;AAAA,MACN,aAAa;AAAA,IACf;AAAA,EACF;AAAA,EACA,UAAU,CAAC,QAAQ,WAAW;AAChC;AAKO,IAAM,eAAN,MAAM,cAAa;AAAA,EACP;AAAA,EACjB,OAAe,sBAAsB;AAAA,EACrC,OAAe,YAAY;AAAA,EAC3B,OAAe,aAA4B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAU3C,OAAO,gBAA+B;AAEpC,UAAM,UAAU,QAAQ,IAAI,YAAY;AACxC,QAAI,YAAY,UAAa,YAAY,MAAM,WAAW,OAAO,GAAG;AAClE,aAAO;AAAA,IACT;AAGA,UAAM,kBAAkB,KAAK,QAAQ,GAAG,WAAW,SAAS,QAAQ;AACpE,QAAI,WAAW,eAAe,GAAG;AAC/B,aAAO;AAAA,IACT;AAGA,UAAM,eAAe,KAAK,QAAQ,GAAG,UAAU,OAAO,QAAQ;AAC9D,QAAI,WAAW,YAAY,GAAG;AAC5B,aAAO;AAAA,IACT;AAGA,QAAI;AACF,YAAM,SAAS,SAAS,qBAAqB,EAAE,OAAO,CAAC,QAAQ,QAAQ,QAAQ,EAAE,CAAC;AAClF,YAAM,OAAO,OAAO,SAAS,EAAE,KAAK;AACpC,UAAI,MAAM;AACR,eAAO;AAAA,MACT;AAAA,IACF,QAAQ;AAAA,IAER;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,OAAO,cAAuB;AAC5B,QAAI,CAAC,cAAa,qBAAqB;AACrC,oBAAa,aAAa,cAAa,cAAc;AACrD,oBAAa,YAAY,cAAa,eAAe;AACrD,oBAAa,sBAAsB;AAAA,IACrC;AACA,WAAO,cAAa;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,gBAA+B;AACpC,WAAO,cAAa;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,yBAA+B;AACpC,kBAAa,sBAAsB;AACnC,kBAAa,YAAY;AAAA,EAC3B;AAAA,EAEA,YAAY,UAAgC,CAAC,GAAG;AAC9C,SAAK,UAAU,QAAQ,WAAW;AAAA,EACpC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAM,mBACJ,SACA,UACA,aACwB;AACxB,UAAM,SAAS;AAAA;AAAA,YAEP,OAAO;AAAA;AAAA,eAEJ,WAAW;AAAA;AAAA;AAAA,EAGxB,KAAK,aAAa,UAAU,GAAK,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAQhC,QAAI;AACF,YAAM,SAAS,MAAM,KAAK,WAAW,QAAQ,qBAAqB;AAClE,YAAM,YAAqB,KAAK,MAAM,MAAM;AAI5C,YAAM,SAAS,KAAK,wBAAwB,SAAS;AAGrD,UACE,OAAO,WAAW,YAClB,WAAW,QACX,EAAE,UAAU,WACZ,EAAE,eAAe,WACjB,CAAC,MAAM,QAAQ,OAAO,IAAI,KAC1B,OAAO,KAAK,WAAW,KACvB,OAAO,OAAO,cAAc,YAC5B,CAAC,OAAO,KAAK,MAAM,CAAC,QAAQ,OAAO,QAAQ,QAAQ,GACnD;AACA,cAAM,IAAI,MAAM,wCAAwC;AAAA,MAC1D;AAGA,aAAO,EAAE,MAAM,OAAO,MAAM,WAAW,OAAO,UAAU;AAAA,IAC1D,SAAS,OAAO;AACd,YAAM,IAAI;AAAA,QACR,uCAAuC,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,MAC/F;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAM,eAAe,UAAkB,aAAsC;AAC3E,UAAM,SAAS,GAAG,WAAW;AAAA;AAAA;AAAA,EAG/B,KAAK,iBAAiB,UAAU,GAAM,CAAC;AAErC,QAAI;AACF,YAAM,SAAS,MAAM,KAAK,WAAW,MAAM;AAC3C,aAAO,OAAO,KAAK;AAAA,IACrB,SAAS,OAAO;AACd,YAAM,IAAI;AAAA,QACR,8BAA8B,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,MACtF;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAc,WAAW,QAAgB,YAAuD;AAC9F,WAAO,IAAI,QAAgB,CAAC,SAAS,WAAW;AAE9C,YAAM,aAAa,cAAa,cAAc;AAC9C,UAAI,eAAe,MAAM;AACvB,eAAO,IAAI,MAAM,0BAA0B,CAAC;AAC5C;AAAA,MACF;AAEA,YAAM,OAAO,CAAC,IAAI;AAGlB,UAAI,YAAY;AACd,aAAK,KAAK,iBAAiB,KAAK,UAAU,UAAU,CAAC;AACrD,aAAK,KAAK,mBAAmB,MAAM;AAAA,MACrC;AAEA,YAAM,OAAO,MAAM,YAAY,MAAM;AAAA,QACnC,OAAO,CAAC,QAAQ,QAAQ,MAAM;AAAA,QAC9B,KAAK,QAAQ,IAAI;AAAA,QACjB,KAAK,EAAE,GAAG,QAAQ,IAAI;AAAA,MACxB,CAAC;AAED,UAAI,SAAS;AACb,UAAI,SAAS;AACb,UAAI;AAGJ,UAAI,KAAK,UAAU,GAAG;AACpB,oBAAY,WAAW,MAAM;AAC3B,eAAK,KAAK,SAAS;AACnB,iBAAO,IAAI,MAAM,8BAA8B,OAAO,KAAK,OAAO,CAAC,IAAI,CAAC;AAAA,QAC1E,GAAG,KAAK,OAAO;AAAA,MACjB;AAEA,WAAK,OAAO,GAAG,QAAQ,CAAC,UAAkB;AACxC,kBAAU,MAAM,SAAS;AAAA,MAC3B,CAAC;AAED,WAAK,OAAO,GAAG,QAAQ,CAAC,UAAkB;AACxC,kBAAU,MAAM,SAAS;AAAA,MAC3B,CAAC;AAED,WAAK,GAAG,SAAS,CAAC,SAAwB;AACxC,YAAI,cAAc,QAAW;AAC3B,uBAAa,SAAS;AAAA,QACxB;AAEA,YAAI,SAAS,GAAG;AACd,kBAAQ,OAAO,KAAK,CAAC;AAAA,QACvB,OAAO;AACL;AAAA,YACE,IAAI,MAAM,+BAA+B,OAAO,IAAI,CAAC,GAAG,SAAS,KAAK,MAAM,KAAK,EAAE,EAAE;AAAA,UACvF;AAAA,QACF;AAAA,MACF,CAAC;AAED,WAAK,GAAG,SAAS,CAAC,QAAQ;AACxB,YAAI,cAAc,QAAW;AAC3B,uBAAa,SAAS;AAAA,QACxB;AACA,eAAO,IAAI,MAAM,+BAA+B,IAAI,OAAO,EAAE,CAAC;AAAA,MAChE,CAAC;AAGD,WAAK,MAAM,MAAM,MAAM;AACvB,WAAK,MAAM,IAAI;AAAA,IACjB,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAa,MAAc,WAA2B;AAC5D,QAAI,KAAK,UAAU,UAAW,QAAO;AAGrC,WAAO,GAAG,KAAK,UAAU,GAAG,SAAS,CAAC;AAAA;AAAA;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA,EAKQ,iBAAiB,UAAkB,WAA2B;AACpE,QAAI,SAAS,UAAU,UAAW,QAAO;AAEzC,WAAO,GAAG,SAAS,UAAU,GAAG,SAAS,CAAC;AAAA;AAAA;AAAA,EAC5C;AAAA;AAAA;AAAA;AAAA,EAKQ,SAAS,OAAkD;AACjE,WAAO,OAAO,UAAU,YAAY,UAAU,QAAQ,CAAC,MAAM,QAAQ,KAAK;AAAA,EAC5E;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOQ,wBAAwB,WAA6B;AAC3D,QAAI,KAAK,SAAS,SAAS,KAAK,uBAAuB,WAAW;AAChE,YAAM,mBAAmB,UAAU,mBAAmB;AACtD,UAAI,OAAO,qBAAqB,UAAU;AACxC,eAAO;AAAA,MACT;AAAA,IACF;AACA,WAAO;AAAA,EACT;AACF;;;AH/SA,IAAMA,UAAS,aAAa,SAAS;AA+B9B,IAAM,qBAAN,cAAiC,aAAa;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AAAA,EACT;AAAA,EAER,cAAc;AACZ,UAAM;AACN,SAAK,eAAe,IAAI,aAAa;AACrC,SAAK,eAAe,IAAI,aAAa;AACrC,SAAK,UAAU,oBAAI,IAAI;AACvB,SAAK,UAAU;AAAA,EACjB;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,MAAM,SAAiB,UAAwB,CAAC,GAA+B;AACpF,UAAM,EAAE,kBAAkB,oBAAoB,WAAW,IAAI,SAAS,MAAM,IAAI;AAEhF,SAAK,QAAQ,MAAM;AACnB,SAAK,UAAU;AAEf,IAAAA,QAAO;AAAA,MACL;AAAA,QACE;AAAA,QACA;AAAA,QACA,MAAM,SACF,WACA,qBAAqB,UAAa,qBAAqB,KACrD,gBACA;AAAA,QACN,uBAAuB,uBAAuB;AAAA,MAChD;AAAA,MACA;AAAA,IACF;AAEA,UAAM,gBAA+B;AAAA,MACnC,MAAM;AAAA,MACN,cAAc;AAAA,MACd,YAAY;AAAA,IACd;AACA,SAAK,KAAK,YAAY,aAAa;AAGnC,UAAM,qBAAqB,CAAC,UAAU,qBAAqB,UAAa,qBAAqB;AAE7F,QAAI,oBAAoB;AAEtB,aAAO,KAAK;AAAA,QACV;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA,QAAQ,eAAe;AAAA,MACzB;AAAA,IACF,OAAO;AACL,aAAO,KAAK,YAAY,SAAS,oBAAoB,UAAU,QAAQ,eAAe,KAAK;AAAA,IAC7F;AAEA,IAAAA,QAAO;AAAA,MACL;AAAA,QACE;AAAA,QACA,cAAc,KAAK,QAAQ;AAAA,MAC7B;AAAA,MACA;AAAA,IACF;AAGA,QAAI,KAAK,QAAQ,SAAS,KAAK,WAAW,GAAG;AAC3C,YAAM,kBAAiC;AAAA,QACrC,MAAM;AAAA,QACN,cAAc,KAAK,QAAQ;AAAA,QAC3B,YAAY;AAAA,QACZ,SAAS,iDAAiD,OAAO,QAAQ,CAAC;AAAA,QAC1E,OAAO,IAAI,MAAM,oBAAoB;AAAA,MACvC;AACA,WAAK,KAAK,YAAY,eAAe;AAAA,IACvC;AAEA,UAAM,mBAAkC;AAAA,MACtC,MAAM;AAAA,MACN,cAAc,KAAK,QAAQ;AAAA,MAC3B,YAAY,KAAK,QAAQ;AAAA,IAC3B;AACA,SAAK,KAAK,YAAY,gBAAgB;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA,EAKA,OAAe,iBACb,SACA,kBACA,oBACA,UACA,cAAuB,OACK;AAE5B,QAAI,CAAC,aAAa,YAAY,GAAG;AAC/B,YAAM,IAAI,MAAM,wEAAwE;AAAA,IAC1F;AAEA,QAAI;AAEJ,QAAI;AAEF,YAAM,wBAAuC;AAAA,QAC3C,MAAM;AAAA,QACN,cAAc;AAAA,QACd,YAAY;AAAA,QACZ,YAAY;AAAA,QACZ,SAAS;AAAA,MACX;AACA,WAAK,KAAK,YAAY,qBAAqB;AAE3C,YAAM,WAAW,MAAM,KAAK,UAAU,SAAS,WAAW;AAG1D,iBAAW,MAAM,KAAK,aAAa,mBAAmB,SAAS,UAAU,gBAAgB;AAEzF,YAAM,2BAA0C;AAAA,QAC9C,MAAM;AAAA,QACN,cAAc;AAAA,QACd,YAAY;AAAA,QACZ,SAAS,qBAAqB,OAAO,SAAS,KAAK,MAAM,CAAC,mBAAmB,SAAS,SAAS;AAAA,MACjG;AACA,WAAK,KAAK,YAAY,wBAAwB;AAAA,IAChD,SAAS,OAAO;AAEd,YAAM,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AAAA,IAChE;AAGA,QAAI,eAAe;AAEnB,eAAW,OAAO,SAAS,MAAM;AAC/B,UAAI,KAAK,WAAW,gBAAgB,SAAU;AAC9C,UAAI,KAAK,QAAQ,IAAI,GAAG,EAAG;AAE3B,UAAI;AACF,cAAM,SAAS,MAAM,KAAK;AAAA,UACxB;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AACA;AACA,cAAM;AAAA,MACR,SAAS,OAAO;AACd,cAAM,oBAAmC;AAAA,UACvC,MAAM;AAAA,UACN;AAAA,UACA,YAAY;AAAA,UACZ,YAAY;AAAA,UACZ,OAAO,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AAAA,QACjE;AACA,aAAK,KAAK,YAAY,iBAAiB;AAAA,MACzC;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,OAAe,YACb,SACA,oBACA,UACA,cAAuB,OACK;AAC5B,UAAM,QAA+C,CAAC,EAAE,KAAK,SAAS,OAAO,EAAE,CAAC;AAChF,UAAM,WAAW;AACjB,QAAI,eAAe;AAEnB,WAAO,MAAM,SAAS,KAAK,eAAe,YAAY,CAAC,KAAK,SAAS;AACnE,YAAM,UAAU,MAAM,MAAM;AAE5B,UAAI,CAAC,WAAW,KAAK,QAAQ,IAAI,QAAQ,GAAG,KAAK,QAAQ,QAAQ,UAAU;AACzE;AAAA,MACF;AAEA,UAAI;AACF,cAAM,SAAS,MAAM,KAAK;AAAA,UACxB,QAAQ;AAAA,UACR;AAAA,UACA;AAAA,UACA;AAAA,QACF;AACA,eAAO,QAAQ,QAAQ;AACvB;AAEA,cAAM;AAGN,YAAI,QAAQ,QAAQ,UAAU;AAC5B,cAAI;AACF,kBAAM,QAAQ,MAAM,KAAK,aAAa,QAAQ,KAAK,WAAW;AAE9D,gBAAI,MAAM,WAAW,GAAG;AACtB,cAAAA,QAAO,MAAM,EAAE,KAAK,QAAQ,IAAI,GAAG,0CAA0C;AAAA,YAC/E,OAAO;AACL,cAAAA,QAAO;AAAA,gBACL,EAAE,KAAK,QAAQ,KAAK,WAAW,MAAM,OAAO;AAAA,gBAC5C;AAAA,cACF;AAAA,YACF;AAEA,uBAAW,QAAQ,OAAO;AACxB,kBAAI,CAAC,KAAK,QAAQ,IAAI,IAAI,KAAK,KAAK,aAAa,SAAS,IAAI,GAAG;AAC/D,sBAAM,KAAK,EAAE,KAAK,MAAM,OAAO,QAAQ,QAAQ,EAAE,CAAC;AAAA,cACpD;AAAA,YACF;AAAA,UACF,SAAS,OAAO;AAEd,kBAAM,gBAA+B;AAAA,cACnC,MAAM;AAAA,cACN;AAAA,cACA,YAAY;AAAA,cACZ,YAAY,QAAQ;AAAA,cACpB,SAAS,gCAAgC,QAAQ,GAAG;AAAA,cACpD,OAAO,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AAAA,YACjE;AACA,iBAAK,KAAK,YAAY,aAAa;AAAA,UACrC;AAAA,QACF;AAAA,MACF,SAAS,OAAO;AACd,cAAM,WAAW,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AAIzE,YACE,SAAS,QAAQ,SAAS,mBAAmB,KAC7C,SAAS,QAAQ,SAAS,0BAA0B,KACpD,SAAS,QAAQ,SAAS,uBAAuB,GACjD;AACA,gBAAM;AAAA,QACR;AAGA,cAAM,sBAAqC;AAAA,UACzC,MAAM;AAAA,UACN;AAAA,UACA,YAAY;AAAA,UACZ,YAAY,QAAQ;AAAA,UACpB,OAAO;AAAA,QACT;AACA,aAAK,KAAK,YAAY,mBAAmB;AAAA,MAC3C;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,gBACZ,KACA,oBACA,cACA,cAAuB,OACD;AACtB,UAAM,eAA8B;AAAA,MAClC,MAAM;AAAA,MACN;AAAA,MACA,YAAY;AAAA,MACZ,YAAY;AAAA,IACd;AACA,SAAK,KAAK,YAAY,YAAY;AAGlC,SAAK,QAAQ,IAAI,GAAG;AAGpB,UAAM,OAAO,MAAM,KAAK,UAAU,KAAK,WAAW;AAIlD,UAAM,aAAa,MAAM,sBAAsB,MAAM,GAAG;AAExD,IAAAA,QAAO;AAAA,MACL;AAAA,QACE;AAAA,QACA,OAAO,WAAW;AAAA,QAClB,gBAAgB,WAAW,SAAS;AAAA,MACtC;AAAA,MACA;AAAA,IACF;AAEA,QAAI;AAGJ,QAAI,uBAAuB,UAAa,uBAAuB,IAAI;AAEjE,UAAI,CAAC,aAAa,YAAY,GAAG;AAC/B,cAAM,IAAI,MAAM,8DAA8D;AAAA,MAChF;AAEA,YAAM,qBAAoC;AAAA,QACxC,MAAM;AAAA,QACN;AAAA,QACA,YAAY;AAAA,QACZ,YAAY;AAAA,MACd;AACA,WAAK,KAAK,YAAY,kBAAkB;AAExC,kBAAY,MAAM,KAAK,aAAa,eAAe,WAAW,UAAU,kBAAkB;AAAA,IAC5F;AAEA,WAAO;AAAA,MACL;AAAA,MACA,GAAI,WAAW,UAAU,UAAa,EAAE,OAAO,WAAW,MAAM;AAAA,MAChE,UAAU,WAAW;AAAA,MACrB,GAAI,cAAc,UAAa,EAAE,UAAU;AAAA,IAC7C;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,UAAU,KAAa,cAAuB,OAAwB;AAClF,UAAM,YAAY,KAAK,IAAI;AAC3B,IAAAA,QAAO,MAAM,EAAE,KAAK,YAAY,GAAG,eAAe;AAElD,QAAI,aAAa;AACf,UAAI;AACF,cAAM,SAAS,MAAM,KAAK,aAAa,cAAc,GAAG;AACxD,cAAM,aAAa,KAAK,IAAI,IAAI;AAChC,QAAAA,QAAO;AAAA,UACL;AAAA,YACE;AAAA,YACA,aAAa;AAAA,YACb;AAAA,YACA,GAAG,iBAAiB,OAAO,MAAM,YAAY,GAAG;AAAA,UAClD;AAAA,UACA;AAAA,QACF;AACA,eAAO,OAAO;AAAA,MAChB,SAAS,OAAO;AAEd,cAAM,IAAI;AAAA,UACR,0BAA0B,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,QAClF;AAAA,MACF;AAAA,IACF;AAGA,QAAI;AACF,YAAM,WAAW,MAAM,MAAM,IAAY,KAAK;AAAA,QAC5C,SAAS;AAAA,QACT,SAAS;AAAA,UACP,cAAc;AAAA,QAChB;AAAA,MACF,CAAC;AAED,YAAM,aAAa,KAAK,IAAI,IAAI;AAChC,MAAAA,QAAO;AAAA,QACL;AAAA,UACE;AAAA,UACA,aAAa;AAAA,UACb;AAAA,UACA,GAAG,iBAAiB,SAAS,MAAM,YAAY,GAAG;AAAA,QACpD;AAAA,QACA;AAAA,MACF;AAEA,aAAO,SAAS;AAAA,IAClB,SAAS,OAAO;AACd,MAAAA,QAAO;AAAA,QACL,EAAE,KAAK,OAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,EAAE;AAAA,QACrE;AAAA,MACF;AACA,YAAM,IAAI;AAAA,QACR,mBAAmB,GAAG,KAAK,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,MACnF;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,aAAa,KAAa,cAAuB,OAA0B;AACvF,QAAI;AAEF,UAAI,aAAa;AACf,cAAMC,UAAS,MAAM,KAAK,aAAa,cAAc,GAAG;AAExD,eAAOA,QAAO,MAAM,IAAI,CAAC,SAA+B;AACtD,cAAI,OAAO,SAAS,SAAU,QAAO;AACrC,iBAAO,KAAK;AAAA,QACd,CAAC;AAAA,MACH;AAEA,YAAM,SAAS,MAAM,KAAK,aAAa,MAAM,GAAG;AAIhD,YAAM,YAAY,OAAO,QAAQ,CAAC;AAClC,UAAI,CAAC,WAAW;AACd,cAAM,IAAI,MAAM,wCAAwC,GAAG,uBAAuB;AAAA,MACpF;AAEA,aAAO,UAAU;AAAA,IACnB,SAAS,OAAgB;AAEvB,YAAM,eAAe,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAC1E,MAAAD,QAAO,MAAM,EAAE,KAAK,OAAO,aAAa,GAAG,yBAAyB;AAGpE,YAAM,IAAI,MAAM,8BAA8B,GAAG,KAAK,YAAY,EAAE;AAAA,IACtE;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAa,MAAc,MAAuB;AACxD,QAAI;AACF,YAAM,UAAU,IAAI,IAAI,IAAI,EAAE,SAAS,YAAY;AACnD,YAAM,UAAU,IAAI,IAAI,IAAI,EAAE,SAAS,YAAY;AACnD,aACE,YAAY,WAAW,QAAQ,SAAS,IAAI,OAAO,EAAE,KAAK,QAAQ,SAAS,IAAI,OAAO,EAAE;AAAA,IAE5F,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,OAAsB;AAC1B,SAAK,UAAU;AACf,UAAM,KAAK,aAAa,KAAK;AAAA,EAC/B;AACF;","names":["logger","result"]}
package/dist/index.js CHANGED
@@ -10,7 +10,7 @@ import {
10
10
  } from "./chunk-UAWKTJWN.js";
11
11
  import {
12
12
  IntelligentCrawler
13
- } from "./chunk-AIS5S77C.js";
13
+ } from "./chunk-X7E4RYJE.js";
14
14
  import {
15
15
  ASTParser,
16
16
  AdapterRegistry,
@@ -1,7 +1,7 @@
1
1
  #!/usr/bin/env node
2
2
  import {
3
3
  IntelligentCrawler
4
- } from "../chunk-AIS5S77C.js";
4
+ } from "../chunk-X7E4RYJE.js";
5
5
  import {
6
6
  JobService,
7
7
  createDocumentId,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "bluera-knowledge",
3
- "version": "0.14.0",
3
+ "version": "0.14.2",
4
4
  "description": "CLI tool for managing knowledge stores with semantic search",
5
5
  "type": "module",
6
6
  "bin": {
@@ -64,6 +64,17 @@
64
64
  "publishConfig": {
65
65
  "access": "public"
66
66
  },
67
+ "files": [
68
+ "dist/",
69
+ "python/",
70
+ "hooks/",
71
+ "commands/",
72
+ "skills/",
73
+ ".claude-plugin/",
74
+ "README.md",
75
+ "CHANGELOG.md",
76
+ "LICENSE"
77
+ ],
67
78
  "devDependencies": {
68
79
  "@eslint/js": "^9.39.2",
69
80
  "@types/babel__core": "^7.20.5",
@@ -1,15 +0,0 @@
1
- # Code Review
2
-
3
- Review the local codebase using multi-agent analysis. See @.claude/skills/code-review-repo/skill.md for process details.
4
-
5
- ## Context
6
-
7
- !`echo "---PROJECT---" && ls -la && echo "---CLAUDE.md FILES---" && find . -name CLAUDE.md 2>/dev/null`
8
-
9
- ## Workflow
10
-
11
- 1. **Gather context**: Find all CLAUDE.md files and source files
12
- 2. **Multi-agent review**: Launch 5 parallel agents (CLAUDE.md compliance, bug scan, git history, PR comments, code comments)
13
- 3. **Score issues**: Haiku agents score each issue 0-100 for confidence
14
- 4. **Filter**: Only report issues with confidence >= 80
15
- 5. **Output**: Print formatted results to console
@@ -1,34 +0,0 @@
1
- # Commit
2
-
3
- Create atomic, well-organized commits. See @.claude/skills/atomic-commits/SKILL.md for documentation check criteria.
4
-
5
- ## Context
6
-
7
- !`git status && echo "---STAGED---" && git diff --cached --stat && echo "---UNSTAGED---" && git diff --stat && echo "---UNTRACKED---" && git ls-files --others --exclude-standard && echo "---HISTORY---" && git log --oneline -10`
8
-
9
- ## Workflow
10
-
11
- 1. **Analyze**: Run `git diff HEAD` to see all changes
12
- 2. **Documentation Check**: Check if README.md or CLAUDE.md need updates (see skill)
13
- 3. **Group**: Identify logical features (see skill for grouping rules)
14
- 4. **Commit each group**:
15
- ```bash
16
- git add <files>
17
- git commit -m "<type>(<scope>): <description>"
18
- ```
19
- 5. **Handle untracked**: Categorize as commit/ignore/intentional
20
- 6. **Report**: Show commits created and final `git status --short`
21
-
22
- ## Validation
23
-
24
- Pre-commit hooks run automatically:
25
- - **Doc-only changes**: Skips all validation (instant)
26
- - **Code changes**: Runs lint + typecheck per language
27
-
28
- If hooks fail, fix issues and retry. Never use `--no-verify`.
29
-
30
- ## Safety
31
-
32
- - Never force push
33
- - Never amend commits from other sessions
34
- - Ask if unsure about grouping
@@ -1,7 +0,0 @@
1
- {
2
- "provider": "openai",
3
- "model": "codex-mini-latest",
4
- "prompt": "What are the best trigger keywords and patterns for a skill auto-activation system in a Claude Code plugin? Context: We're building a UserPromptSubmit hook for the bluera-knowledge plugin that detects when users would benefit from learning about specific skills. The 5 skills are: (1) knowledge-search - how to query indexed library sources, (2) when-to-query - deciding between BK vs Grep/Read, (3) search-optimization - optimizing search parameters, (4) advanced-workflows - multi-tool orchestration, (5) store-lifecycle - managing knowledge stores. Key insight: if a user already uses BK terminology, they don't need the skill suggested. We want to catch users who SHOULD be using BK but don't know it exists.",
5
- "response": "Below is a bite‑sized “pattern cookbook” you can drop into your `UserPromptSubmit` hook in the bluera‑knowledge Claude Code plugin. The idea is simple:\n\n1. **Look for “natural” signals** that a user is trying to do something Blue‑Knowledge (BK) was built for, but \n2. **Exclude** anyone already using BK lingo (e.g. “knowledge‑search,” “index,” “store,” “workflow”) so you don’t remind the experienced.\n\n---\n\n## 1. Skill → Trigger‑patterns matrix\n\n| Skill | What it is | Trigger patterns (if user text _contains_) | Exclude if user text _contains_ |\n|:---------------------|:---------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------|\n| **knowledge‑search** | How to query an indexed library of docs/sources. | `\\b(find|lookup|search|get me|show me|where can i find)\\b.*\\b(docs|documentation|references?|articles?)\\b` | `\\bknowledge[- ]search\\b|\\bindex(ed)?\\b|\\bstore\\b` |\n| **when‑to‑query** | Deciding whether to use BK vs. grep/`read()` etc. | `\\b(grep|read\\(\\)|scan files|open files manually|search local files)\\b|\\b(is there a better way)\\b` | `\\bwhen[- ]to[- ]query\\b|\\bblue[- ]knowledge\\b` |\n| **search‑optimization** | Tuning filters, relevance, context windows, metadata to get better hits. | `\\b(narrow|filter|refine|limit|boost|prioritize|sort)\\b.*\\b(results|hits|matches)\\b` | `\\bsearch[- ]optimization\\b|\\brelevance\\b|\\bcontext window\\b` |\n| **advanced‑workflows** | Chaining multiple tools/APIs, branching, retrieval‑then‑generation, tool orchestration. | `\\b(chain|pipeline|orchestrate|automate|workflow|multi[- ]step|multi[- ]tool)\\b` | `\\badvanced[- ]workflows\\b|\\btool orchestration\\b` |\n| **store‑lifecycle** | Setting up/updating/archiving knowledge stores, versioning, pruning old info. | `\\b(create|update|delete|archive|prune|version)\\b.*\\b(store|index|database|repository)\\b` | `\\bstore[- ]lifecycle\\b|\\bknowledge[- ]store\\b` |\n\n---\n\n### 1.1. Example Regex snippets\n\n```js\n// snippet for knowledge-search\nconst KNOWLEDGE_SEARCH_RE = /\\b(?:find|lookup|search|get me|show me|where can i find)\\b.*\\b(?:docs|documentation|references?|articles?)\\b/i;\n\n// snippet for when-to-query\nconst WHEN_TO_QUERY_RE = /\\b(?:grep|read\\(\\)|scan files|open files manually|search local files)\\b|\\b(?:is there a better way)\\b/i;\n\n// snippet for store-lifecycle\nconst STORE_LIFECYCLE_RE = /\\b(?:create|update|delete|archive|prune|version)\\b.*\\b(?:store|index|database|repository)\\b/i;\n```\n\n---\n\n## 2. Putting it together in your hook\n\nBelow is a simplified example of how you might wire these patterns into a Claude Code `UserPromptSubmit` hook. You’ll want to adjust the messaging and localization to taste.\n\n```js\n/**\n * bluera-knowledge UserPromptSubmit hook\n * Detect users who could benefit from Blue‑Knowledge skills.\n */\nexport async function onUserPromptSubmit(event, ctx) {\n const text = event.prompt.text;\n\n // quick bailout: if they already speak BK\n const hasBKTerm = /\\bknowledge[- ]search\\b|\\bwhen[- ]to[- ]query\\b|\\bsearch[- ]optimization\\b|\\badvanced[- ]workflows\\b|\\bstore[- ]lifecycle\\b|\\bblue[- ]knowledge\\b/i.test(text);\n if (hasBKTerm) return;\n\n // mapping skill → [pattern, user‐facing hint]\n const SKILLS = [\n {\n name: \"knowledge-search\",\n re: /\\b(?:find|lookup|search|get me|show me|where can i find)\\b.*\\b(?:docs|documentation|references?|articles?)\\b/i,\n hint: \"🔍 You can try using our **knowledge‑search** feature to query indexed docs—no manual grepping needed!\"\n },\n {\n name: \"when-to-query\",\n re: /\\b(?:grep|read\\(\\)|scan files|open files manually|search local files)\\b|\\b(?:is there a better way)\\b/i,\n hint: \"💡 Noticed you're grepping or opening files one by one. Our **when‑to‑query** guide helps decide BK vs. direct file search.\"\n },\n {\n name: \"search-optimization\",\n re: /\\b(?:narrow|filter|refine|limit|boost|prioritize|sort)\\b.*\\b(?:results|hits|matches)\\b/i,\n hint: \"⚙️ Want more relevant hits? See our **search‑optimization** tips for tuning relevance and filters.\"\n },\n {\n name: \"advanced-workflows\",\n re: /\\b(?:chain|pipeline|orchestrate|automate|workflow|multi[- ]step|multi[- ]tool)\\b/i,\n hint: \"🔗 Looking to chain tools or build pipelines? Check out **advanced‑workflows** for multi‑tool orchestration.\"\n },\n {\n name: \"store-lifecycle\",\n re: /\\b(?:create|update|delete|archive|prune|version)\\b.*\\b(?:store|index|database|repository)\\b/i,\n hint: \"🗄️ Managing indexes or archiving old data? Our **store‑lifecycle** guide covers store setup, pruning, and versioning.\"\n }\n ];\n\n // find the first match\n for (const skill of SKILLS) {\n if (skill.re.test(text)) {\n await ctx.reply({\n type: \"skill-suggestion\",\n skill: skill.name,\n message: skill.hint\n });\n break;\n }\n }\n}\n```\n\n---\n\n## 3. Tuning tips\n\n1. **Balance precision/recall**: \n - Start _wide_: shorter patterns catch more but risk noise. \n - Refine as you gather real prompts: update regex to avoid false positives.\n\n2. **Avoid overlap**: \n If two patterns both match, pick the one that feels _most_ immediately helpful. (Above example picks the first.)\n\n3. **User‑facing copy**: \n Keep hints crisp, emoji‑tagged, and link directly to your docs or `@help` commands.\n\n4. **Metrics & feedback loop**: \n Log suggestions shown vs. accepted. Tune patterns around low‑acceptance hits.\n\n---\n\n### Summary\n\nBy combining simple regex‑based detectors with an exclusion of existing BK terminology, you’ll catch “hidden” use cases where users are laboring manually and would love a BK shortcut. Tweak patterns over time against your live traffic, and you’ll have a reliable auto‑activation layer that boosts discoverability _without_ nagging your power users.",
6
- "timestamp": 1768432278
7
- }
@@ -1,40 +0,0 @@
1
- #!/bin/bash
2
- # Smart post-edit auto-fix hook
3
- # Auto-fixes lint issues and validates types on modified files
4
-
5
- cd "$CLAUDE_PROJECT_DIR" || exit 0
6
-
7
- # Get modified TS/JS files (uncommitted changes)
8
- MODIFIED_TS_FILES=$(git diff --name-only HEAD 2>/dev/null | grep -E '\.(ts|tsx|js|jsx)$' || true)
9
-
10
- # If no TS/JS changes, skip
11
- if [ -z "$MODIFIED_TS_FILES" ]; then
12
- exit 0
13
- fi
14
-
15
- # Auto-fix lint issues on modified files only (fast)
16
- echo "$MODIFIED_TS_FILES" | xargs npx eslint --fix --quiet 2>/dev/null || true
17
-
18
- # Check for remaining lint errors (exit 2 to block and show to Claude)
19
- LINT_OUTPUT=$(echo "$MODIFIED_TS_FILES" | xargs npx eslint --quiet 2>&1)
20
- if [ -n "$LINT_OUTPUT" ]; then
21
- echo "$LINT_OUTPUT" >&2
22
- exit 2
23
- fi
24
-
25
- # Run typecheck (exit 2 to block and show to Claude)
26
- TYPE_OUTPUT=$(npx tsc --noEmit --pretty false 2>&1)
27
- if [ -n "$TYPE_OUTPUT" ]; then
28
- echo "$TYPE_OUTPUT" | head -20 >&2
29
- exit 2
30
- fi
31
-
32
- # Check for anti-patterns in code files only (not docs/markdown)
33
- ANTI_PATTERN=$(git diff -- '*.ts' '*.tsx' '*.js' '*.jsx' ':!dist/' | grep -E '\b(fallback|deprecated|backward compatibility|legacy)\b' | grep -v '^-' | grep -E '^\+' || true)
34
- if [ -n "$ANTI_PATTERN" ]; then
35
- echo 'Anti-pattern detected (fallback/deprecated/backward compatibility/legacy). Review CLAUDE.md.' >&2
36
- echo "$ANTI_PATTERN" >&2
37
- exit 2
38
- fi
39
-
40
- exit 0
@@ -1,12 +0,0 @@
1
- # Code Quality Rules
2
-
3
- ## Fail Early and Fast
4
-
5
- Our code is expected to *work* as-designed:
6
- - Use `throw` when state is unexpected or for any error condition
7
- - Use 100% strict typing; no `any` no `as`, unless completely unavoidable and considered best practice
8
-
9
- ## Never
10
-
11
- - Write "fallback code" or "graceful degradation" code or implement "defaults" *unless* it's part of the specification
12
- - Leave commented code, nor reference outdated/deprecated implementations
@@ -1,5 +0,0 @@
1
- # Git Rules
2
-
3
- ## Never
4
-
5
- - Use `--no-verify` on Git commits; this anti-pattern completely circumvents the code protections we have in place
@@ -1,7 +0,0 @@
1
- # Versioning Rules
2
-
3
- ## Always
4
-
5
- - Use the `bun run version:*` commands after changes
6
- - Without this, the changes would not be detected by Claude Code
7
- - Push to main after version bump - releases happen automatically (no manual tagging needed)