kynjal-cli 4.0.0 → 4.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (502) hide show
  1. package/dist/src/appliance/gguf-engine.d.ts +91 -0
  2. package/dist/src/appliance/gguf-engine.d.ts.map +1 -0
  3. package/dist/src/appliance/gguf-engine.js +286 -525
  4. package/dist/src/appliance/gguf-engine.js.map +1 -1
  5. package/dist/src/appliance/ruvllm-bridge.d.ts +102 -0
  6. package/dist/src/appliance/ruvllm-bridge.d.ts.map +1 -0
  7. package/dist/src/appliance/ruvllm-bridge.js +203 -403
  8. package/dist/src/appliance/ruvllm-bridge.js.map +1 -1
  9. package/dist/src/appliance/rvfa-builder.d.ts +44 -0
  10. package/dist/src/appliance/rvfa-builder.d.ts.map +1 -0
  11. package/dist/src/appliance/rvfa-builder.js +154 -208
  12. package/dist/src/appliance/rvfa-builder.js.map +1 -1
  13. package/dist/src/appliance/rvfa-distribution.d.ts +97 -0
  14. package/dist/src/appliance/rvfa-distribution.d.ts.map +1 -0
  15. package/dist/src/appliance/rvfa-distribution.js +260 -423
  16. package/dist/src/appliance/rvfa-distribution.js.map +1 -1
  17. package/dist/src/appliance/rvfa-format.d.ts +111 -0
  18. package/dist/src/appliance/rvfa-format.d.ts.map +1 -0
  19. package/dist/src/appliance/rvfa-format.js +128 -200
  20. package/dist/src/appliance/rvfa-format.js.map +1 -1
  21. package/dist/src/appliance/rvfa-runner.d.ts +69 -0
  22. package/dist/src/appliance/rvfa-runner.d.ts.map +1 -0
  23. package/dist/src/appliance/rvfa-runner.js +168 -304
  24. package/dist/src/appliance/rvfa-runner.js.map +1 -1
  25. package/dist/src/appliance/rvfa-signing.d.ts +123 -0
  26. package/dist/src/appliance/rvfa-signing.d.ts.map +1 -0
  27. package/dist/src/appliance/rvfa-signing.js +173 -295
  28. package/dist/src/appliance/rvfa-signing.js.map +1 -1
  29. package/dist/src/benchmarks/pretrain/index.d.ts +58 -0
  30. package/dist/src/benchmarks/pretrain/index.d.ts.map +1 -0
  31. package/dist/src/benchmarks/pretrain/index.js +331 -542
  32. package/dist/src/benchmarks/pretrain/index.js.map +1 -1
  33. package/dist/src/commands/agent.js +574 -697
  34. package/dist/src/commands/agent.js.map +1 -1
  35. package/dist/src/commands/analyze.js +1218 -1548
  36. package/dist/src/commands/analyze.js.map +1 -1
  37. package/dist/src/commands/appliance-advanced.js +158 -267
  38. package/dist/src/commands/appliance-advanced.js.map +1 -1
  39. package/dist/src/commands/appliance.js +318 -493
  40. package/dist/src/commands/appliance.js.map +1 -1
  41. package/dist/src/commands/benchmark.js +372 -523
  42. package/dist/src/commands/benchmark.js.map +1 -1
  43. package/dist/src/commands/claims.js +274 -364
  44. package/dist/src/commands/claims.js.map +1 -1
  45. package/dist/src/commands/cleanup.js +113 -157
  46. package/dist/src/commands/cleanup.js.map +1 -1
  47. package/dist/src/commands/completions.js +477 -118
  48. package/dist/src/commands/completions.js.map +1 -1
  49. package/dist/src/commands/config.js +237 -303
  50. package/dist/src/commands/config.js.map +1 -1
  51. package/dist/src/commands/daemon.js +487 -596
  52. package/dist/src/commands/daemon.js.map +1 -1
  53. package/dist/src/commands/deployment.js +194 -275
  54. package/dist/src/commands/deployment.js.map +1 -1
  55. package/dist/src/commands/doctor.js +504 -686
  56. package/dist/src/commands/doctor.js.map +1 -1
  57. package/dist/src/commands/embeddings.js +1293 -1543
  58. package/dist/src/commands/embeddings.js.map +1 -1
  59. package/dist/src/commands/guidance.js +449 -596
  60. package/dist/src/commands/guidance.js.map +1 -1
  61. package/dist/src/commands/hive-mind.js +854 -938
  62. package/dist/src/commands/hive-mind.js.map +1 -1
  63. package/dist/src/commands/hooks.js +3112 -3519
  64. package/dist/src/commands/hooks.js.map +1 -1
  65. package/dist/src/commands/index.d.ts +115 -0
  66. package/dist/src/commands/index.d.ts.map +1 -0
  67. package/dist/src/commands/index.js +126 -308
  68. package/dist/src/commands/index.js.map +1 -1
  69. package/dist/src/commands/init.js +788 -940
  70. package/dist/src/commands/init.js.map +1 -1
  71. package/dist/src/commands/issues.js +383 -558
  72. package/dist/src/commands/issues.js.map +1 -1
  73. package/dist/src/commands/mcp.js +493 -605
  74. package/dist/src/commands/mcp.js.map +1 -1
  75. package/dist/src/commands/memory.js +833 -1026
  76. package/dist/src/commands/memory.js.map +1 -1
  77. package/dist/src/commands/migrate.js +282 -347
  78. package/dist/src/commands/migrate.js.map +1 -1
  79. package/dist/src/commands/neural.js +1289 -1563
  80. package/dist/src/commands/neural.js.map +1 -1
  81. package/dist/src/commands/performance.js +497 -643
  82. package/dist/src/commands/performance.js.map +1 -1
  83. package/dist/src/commands/plugins.js +668 -841
  84. package/dist/src/commands/plugins.js.map +1 -1
  85. package/dist/src/commands/process.js +392 -447
  86. package/dist/src/commands/process.js.map +1 -1
  87. package/dist/src/commands/progress.js +162 -256
  88. package/dist/src/commands/progress.js.map +1 -1
  89. package/dist/src/commands/providers.js +150 -220
  90. package/dist/src/commands/providers.js.map +1 -1
  91. package/dist/src/commands/route.js +520 -665
  92. package/dist/src/commands/route.js.map +1 -1
  93. package/dist/src/commands/ruvector/backup.js +505 -651
  94. package/dist/src/commands/ruvector/backup.js.map +1 -1
  95. package/dist/src/commands/ruvector/benchmark.js +349 -401
  96. package/dist/src/commands/ruvector/benchmark.js.map +1 -1
  97. package/dist/src/commands/ruvector/import.js +224 -266
  98. package/dist/src/commands/ruvector/import.js.map +1 -1
  99. package/dist/src/commands/ruvector/index.js +37 -75
  100. package/dist/src/commands/ruvector/index.js.map +1 -1
  101. package/dist/src/commands/ruvector/init.js +336 -359
  102. package/dist/src/commands/ruvector/init.js.map +1 -1
  103. package/dist/src/commands/ruvector/migrate.js +335 -322
  104. package/dist/src/commands/ruvector/migrate.js.map +1 -1
  105. package/dist/src/commands/ruvector/optimize.js +375 -431
  106. package/dist/src/commands/ruvector/optimize.js.map +1 -1
  107. package/dist/src/commands/ruvector/setup.js +703 -117
  108. package/dist/src/commands/ruvector/setup.js.map +1 -1
  109. package/dist/src/commands/ruvector/status.js +364 -419
  110. package/dist/src/commands/ruvector/status.js.map +1 -1
  111. package/dist/src/commands/security.js +485 -608
  112. package/dist/src/commands/security.js.map +1 -1
  113. package/dist/src/commands/session.js +504 -626
  114. package/dist/src/commands/session.js.map +1 -1
  115. package/dist/src/commands/start.js +267 -364
  116. package/dist/src/commands/start.js.map +1 -1
  117. package/dist/src/commands/status.js +380 -486
  118. package/dist/src/commands/status.js.map +1 -1
  119. package/dist/src/commands/swarm.js +408 -488
  120. package/dist/src/commands/swarm.js.map +1 -1
  121. package/dist/src/commands/task.js +423 -538
  122. package/dist/src/commands/task.js.map +1 -1
  123. package/dist/src/commands/transfer-store.js +322 -412
  124. package/dist/src/commands/transfer-store.js.map +1 -1
  125. package/dist/src/commands/update.js +196 -291
  126. package/dist/src/commands/update.js.map +1 -1
  127. package/dist/src/commands/workflow.js +386 -486
  128. package/dist/src/commands/workflow.js.map +1 -1
  129. package/dist/src/config-adapter.d.ts +15 -0
  130. package/dist/src/config-adapter.d.ts.map +1 -0
  131. package/dist/src/config-adapter.js +38 -39
  132. package/dist/src/config-adapter.js.map +1 -1
  133. package/dist/src/index.d.ts +77 -0
  134. package/dist/src/index.d.ts.map +1 -0
  135. package/dist/src/index.js +309 -411
  136. package/dist/src/index.js.map +1 -1
  137. package/dist/src/infrastructure/in-memory-repositories.d.ts +68 -0
  138. package/dist/src/infrastructure/in-memory-repositories.d.ts.map +1 -0
  139. package/dist/src/infrastructure/in-memory-repositories.js +246 -507
  140. package/dist/src/infrastructure/in-memory-repositories.js.map +1 -1
  141. package/dist/src/init/claudemd-generator.d.ts +25 -0
  142. package/dist/src/init/claudemd-generator.d.ts.map +1 -0
  143. package/dist/src/init/claudemd-generator.js +368 -78
  144. package/dist/src/init/claudemd-generator.js.map +1 -1
  145. package/dist/src/init/executor.d.ts +41 -0
  146. package/dist/src/init/executor.d.ts.map +1 -0
  147. package/dist/src/init/executor.js +1307 -996
  148. package/dist/src/init/executor.js.map +1 -1
  149. package/dist/src/init/helpers-generator.d.ts +60 -0
  150. package/dist/src/init/helpers-generator.d.ts.map +1 -0
  151. package/dist/src/init/helpers-generator.js +657 -12
  152. package/dist/src/init/helpers-generator.js.map +1 -1
  153. package/dist/src/init/index.d.ts +1 -1
  154. package/dist/src/init/index.d.ts.map +1 -1
  155. package/dist/src/init/index.js +1 -1
  156. package/dist/src/init/index.js.map +1 -1
  157. package/dist/src/init/mcp-generator.js +33 -37
  158. package/dist/src/init/mcp-generator.js.map +1 -1
  159. package/dist/src/init/settings-generator.js +76 -77
  160. package/dist/src/init/settings-generator.js.map +1 -1
  161. package/dist/src/init/statusline-generator.js +801 -3
  162. package/dist/src/init/statusline-generator.js.map +1 -1
  163. package/dist/src/init/types.d.ts +1 -1
  164. package/dist/src/init/types.d.ts.map +1 -1
  165. package/dist/src/init/types.js +76 -59
  166. package/dist/src/init/types.js.map +1 -1
  167. package/dist/src/mcp-client.d.ts +92 -0
  168. package/dist/src/mcp-client.d.ts.map +1 -0
  169. package/dist/src/mcp-client.js +81 -125
  170. package/dist/src/mcp-client.js.map +1 -1
  171. package/dist/src/mcp-server.d.ts +161 -0
  172. package/dist/src/mcp-server.d.ts.map +1 -0
  173. package/dist/src/mcp-server.js +470 -757
  174. package/dist/src/mcp-server.js.map +1 -1
  175. package/dist/src/mcp-tools/agent-tools.js +391 -492
  176. package/dist/src/mcp-tools/agent-tools.js.map +1 -1
  177. package/dist/src/mcp-tools/agentdb-tools.js +332 -533
  178. package/dist/src/mcp-tools/agentdb-tools.js.map +1 -1
  179. package/dist/src/mcp-tools/analyze-tools.js +172 -236
  180. package/dist/src/mcp-tools/analyze-tools.js.map +1 -1
  181. package/dist/src/mcp-tools/auto-install.d.ts +83 -0
  182. package/dist/src/mcp-tools/auto-install.d.ts.map +1 -0
  183. package/dist/src/mcp-tools/auto-install.js +80 -142
  184. package/dist/src/mcp-tools/auto-install.js.map +1 -1
  185. package/dist/src/mcp-tools/browser-tools.js +252 -375
  186. package/dist/src/mcp-tools/browser-tools.js.map +1 -1
  187. package/dist/src/mcp-tools/claims-tools.js +473 -565
  188. package/dist/src/mcp-tools/claims-tools.js.map +1 -1
  189. package/dist/src/mcp-tools/config-tools.js +197 -272
  190. package/dist/src/mcp-tools/config-tools.js.map +1 -1
  191. package/dist/src/mcp-tools/coordination-tools.js +500 -572
  192. package/dist/src/mcp-tools/coordination-tools.js.map +1 -1
  193. package/dist/src/mcp-tools/daa-tools.js +286 -364
  194. package/dist/src/mcp-tools/daa-tools.js.map +1 -1
  195. package/dist/src/mcp-tools/embeddings-tools.js +582 -693
  196. package/dist/src/mcp-tools/embeddings-tools.js.map +1 -1
  197. package/dist/src/mcp-tools/github-tools.js +260 -311
  198. package/dist/src/mcp-tools/github-tools.js.map +1 -1
  199. package/dist/src/mcp-tools/hive-mind-tools.js +573 -640
  200. package/dist/src/mcp-tools/hive-mind-tools.js.map +1 -1
  201. package/dist/src/mcp-tools/hooks-tools.js +2215 -2648
  202. package/dist/src/mcp-tools/hooks-tools.js.map +1 -1
  203. package/dist/src/mcp-tools/memory-tools.js +350 -505
  204. package/dist/src/mcp-tools/memory-tools.js.map +1 -1
  205. package/dist/src/mcp-tools/neural-tools.js +315 -412
  206. package/dist/src/mcp-tools/neural-tools.js.map +1 -1
  207. package/dist/src/mcp-tools/performance-tools.js +420 -480
  208. package/dist/src/mcp-tools/performance-tools.js.map +1 -1
  209. package/dist/src/mcp-tools/progress-tools.js +204 -278
  210. package/dist/src/mcp-tools/progress-tools.js.map +1 -1
  211. package/dist/src/mcp-tools/ruvllm-tools.js +163 -279
  212. package/dist/src/mcp-tools/ruvllm-tools.js.map +1 -1
  213. package/dist/src/mcp-tools/security-tools.js +297 -429
  214. package/dist/src/mcp-tools/security-tools.js.map +1 -1
  215. package/dist/src/mcp-tools/session-tools.js +185 -234
  216. package/dist/src/mcp-tools/session-tools.js.map +1 -1
  217. package/dist/src/mcp-tools/swarm-tools.js +207 -260
  218. package/dist/src/mcp-tools/swarm-tools.js.map +1 -1
  219. package/dist/src/mcp-tools/system-tools.js +276 -325
  220. package/dist/src/mcp-tools/system-tools.js.map +1 -1
  221. package/dist/src/mcp-tools/task-tools.js +270 -336
  222. package/dist/src/mcp-tools/task-tools.js.map +1 -1
  223. package/dist/src/mcp-tools/terminal-tools.js +148 -196
  224. package/dist/src/mcp-tools/terminal-tools.js.map +1 -1
  225. package/dist/src/mcp-tools/transfer-tools.js +186 -333
  226. package/dist/src/mcp-tools/transfer-tools.js.map +1 -1
  227. package/dist/src/mcp-tools/types.d.ts +31 -0
  228. package/dist/src/mcp-tools/types.d.ts.map +1 -0
  229. package/dist/src/mcp-tools/wasm-agent-tools.js +133 -280
  230. package/dist/src/mcp-tools/wasm-agent-tools.js.map +1 -1
  231. package/dist/src/mcp-tools/workflow-tools.js +405 -450
  232. package/dist/src/mcp-tools/workflow-tools.js.map +1 -1
  233. package/dist/src/memory/ewc-consolidation.d.ts +295 -0
  234. package/dist/src/memory/ewc-consolidation.d.ts.map +1 -0
  235. package/dist/src/memory/ewc-consolidation.js +190 -303
  236. package/dist/src/memory/ewc-consolidation.js.map +1 -1
  237. package/dist/src/memory/intelligence.d.ts +338 -0
  238. package/dist/src/memory/intelligence.d.ts.map +1 -0
  239. package/dist/src/memory/intelligence.js +569 -794
  240. package/dist/src/memory/intelligence.js.map +1 -1
  241. package/dist/src/memory/memory-bridge.d.ts +407 -0
  242. package/dist/src/memory/memory-bridge.d.ts.map +1 -0
  243. package/dist/src/memory/memory-bridge.js +1170 -1640
  244. package/dist/src/memory/memory-bridge.js.map +1 -1
  245. package/dist/src/memory/memory-initializer.d.ts +412 -0
  246. package/dist/src/memory/memory-initializer.d.ts.map +1 -0
  247. package/dist/src/memory/memory-initializer.js +1836 -1851
  248. package/dist/src/memory/memory-initializer.js.map +1 -1
  249. package/dist/src/memory/sona-optimizer.d.ts +227 -0
  250. package/dist/src/memory/sona-optimizer.d.ts.map +1 -0
  251. package/dist/src/memory/sona-optimizer.js +199 -329
  252. package/dist/src/memory/sona-optimizer.js.map +1 -1
  253. package/dist/src/output.d.ts +2 -2
  254. package/dist/src/output.d.ts.map +1 -1
  255. package/dist/src/output.js +242 -272
  256. package/dist/src/output.js.map +1 -1
  257. package/dist/src/parser.d.ts +51 -0
  258. package/dist/src/parser.d.ts.map +1 -0
  259. package/dist/src/parser.js +140 -187
  260. package/dist/src/parser.js.map +1 -1
  261. package/dist/src/plugins/manager.d.ts +133 -0
  262. package/dist/src/plugins/manager.d.ts.map +1 -0
  263. package/dist/src/plugins/manager.js +285 -521
  264. package/dist/src/plugins/manager.js.map +1 -1
  265. package/dist/src/plugins/store/discovery.d.ts +88 -0
  266. package/dist/src/plugins/store/discovery.d.ts.map +1 -0
  267. package/dist/src/plugins/store/discovery.js +271 -358
  268. package/dist/src/plugins/store/discovery.js.map +1 -1
  269. package/dist/src/plugins/store/index.d.ts +76 -0
  270. package/dist/src/plugins/store/index.d.ts.map +1 -0
  271. package/dist/src/plugins/store/index.js +48 -105
  272. package/dist/src/plugins/store/index.js.map +1 -1
  273. package/dist/src/plugins/store/search.d.ts +46 -0
  274. package/dist/src/plugins/store/search.d.ts.map +1 -0
  275. package/dist/src/plugins/store/search.js +69 -107
  276. package/dist/src/plugins/store/search.js.map +1 -1
  277. package/dist/src/plugins/store/types.d.ts +274 -0
  278. package/dist/src/plugins/store/types.d.ts.map +1 -0
  279. package/dist/src/plugins/tests/demo-plugin-store.js +113 -160
  280. package/dist/src/plugins/tests/demo-plugin-store.js.map +1 -1
  281. package/dist/src/plugins/tests/standalone-test.js +172 -223
  282. package/dist/src/plugins/tests/standalone-test.js.map +1 -1
  283. package/dist/src/plugins/tests/test-plugin-store.js +190 -228
  284. package/dist/src/plugins/tests/test-plugin-store.js.map +1 -1
  285. package/dist/src/production/circuit-breaker.d.ts +101 -0
  286. package/dist/src/production/circuit-breaker.d.ts.map +1 -0
  287. package/dist/src/production/circuit-breaker.js +62 -126
  288. package/dist/src/production/circuit-breaker.js.map +1 -1
  289. package/dist/src/production/error-handler.d.ts +92 -0
  290. package/dist/src/production/error-handler.d.ts.map +1 -0
  291. package/dist/src/production/error-handler.js +86 -156
  292. package/dist/src/production/error-handler.js.map +1 -1
  293. package/dist/src/production/monitoring.d.ts +161 -0
  294. package/dist/src/production/monitoring.d.ts.map +1 -0
  295. package/dist/src/production/monitoring.js +139 -220
  296. package/dist/src/production/monitoring.js.map +1 -1
  297. package/dist/src/production/rate-limiter.d.ts +80 -0
  298. package/dist/src/production/rate-limiter.d.ts.map +1 -0
  299. package/dist/src/production/rate-limiter.js +74 -93
  300. package/dist/src/production/rate-limiter.js.map +1 -1
  301. package/dist/src/production/retry.d.ts +48 -0
  302. package/dist/src/production/retry.d.ts.map +1 -0
  303. package/dist/src/production/retry.js +75 -167
  304. package/dist/src/production/retry.js.map +1 -1
  305. package/dist/src/prompt.d.ts +44 -0
  306. package/dist/src/prompt.d.ts.map +1 -0
  307. package/dist/src/prompt.js +436 -560
  308. package/dist/src/prompt.js.map +1 -1
  309. package/dist/src/runtime/headless.d.ts +60 -0
  310. package/dist/src/runtime/headless.d.ts.map +1 -0
  311. package/dist/src/runtime/headless.js +197 -286
  312. package/dist/src/runtime/headless.js.map +1 -1
  313. package/dist/src/ruvector/agent-wasm.d.ts +182 -0
  314. package/dist/src/ruvector/agent-wasm.d.ts.map +1 -0
  315. package/dist/src/ruvector/agent-wasm.js +156 -351
  316. package/dist/src/ruvector/agent-wasm.js.map +1 -1
  317. package/dist/src/ruvector/ast-analyzer.d.ts +67 -0
  318. package/dist/src/ruvector/ast-analyzer.d.ts.map +1 -0
  319. package/dist/src/ruvector/ast-analyzer.js +145 -232
  320. package/dist/src/ruvector/ast-analyzer.js.map +1 -1
  321. package/dist/src/ruvector/coverage-router.d.ts +160 -0
  322. package/dist/src/ruvector/coverage-router.d.ts.map +1 -0
  323. package/dist/src/ruvector/coverage-router.js +287 -419
  324. package/dist/src/ruvector/coverage-router.js.map +1 -1
  325. package/dist/src/ruvector/coverage-tools.js +56 -101
  326. package/dist/src/ruvector/coverage-tools.js.map +1 -1
  327. package/dist/src/ruvector/diff-classifier.d.ts +175 -0
  328. package/dist/src/ruvector/diff-classifier.d.ts.map +1 -0
  329. package/dist/src/ruvector/diff-classifier.js +324 -451
  330. package/dist/src/ruvector/diff-classifier.js.map +1 -1
  331. package/dist/src/ruvector/enhanced-model-router.d.ts +146 -0
  332. package/dist/src/ruvector/enhanced-model-router.d.ts.map +1 -0
  333. package/dist/src/ruvector/enhanced-model-router.js +260 -336
  334. package/dist/src/ruvector/enhanced-model-router.js.map +1 -1
  335. package/dist/src/ruvector/flash-attention.d.ts +195 -0
  336. package/dist/src/ruvector/flash-attention.d.ts.map +1 -0
  337. package/dist/src/ruvector/flash-attention.js +223 -254
  338. package/dist/src/ruvector/flash-attention.js.map +1 -1
  339. package/dist/src/ruvector/graph-analyzer.d.ts +187 -0
  340. package/dist/src/ruvector/graph-analyzer.d.ts.map +1 -0
  341. package/dist/src/ruvector/graph-analyzer.js +486 -680
  342. package/dist/src/ruvector/graph-analyzer.js.map +1 -1
  343. package/dist/src/ruvector/index.d.ts +40 -0
  344. package/dist/src/ruvector/index.d.ts.map +1 -0
  345. package/dist/src/ruvector/index.js +36 -106
  346. package/dist/src/ruvector/index.js.map +1 -1
  347. package/dist/src/ruvector/lora-adapter.d.ts +218 -0
  348. package/dist/src/ruvector/lora-adapter.d.ts.map +1 -0
  349. package/dist/src/ruvector/lora-adapter.js +155 -248
  350. package/dist/src/ruvector/lora-adapter.js.map +1 -1
  351. package/dist/src/ruvector/model-router.d.ts +220 -0
  352. package/dist/src/ruvector/model-router.d.ts.map +1 -0
  353. package/dist/src/ruvector/model-router.js +175 -248
  354. package/dist/src/ruvector/model-router.js.map +1 -1
  355. package/dist/src/ruvector/moe-router.d.ts +206 -0
  356. package/dist/src/ruvector/moe-router.d.ts.map +1 -0
  357. package/dist/src/ruvector/moe-router.js +228 -286
  358. package/dist/src/ruvector/moe-router.js.map +1 -1
  359. package/dist/src/ruvector/q-learning-router.d.ts +211 -0
  360. package/dist/src/ruvector/q-learning-router.d.ts.map +1 -0
  361. package/dist/src/ruvector/q-learning-router.js +257 -338
  362. package/dist/src/ruvector/q-learning-router.js.map +1 -1
  363. package/dist/src/ruvector/ruvllm-wasm.d.ts +179 -0
  364. package/dist/src/ruvector/ruvllm-wasm.d.ts.map +1 -0
  365. package/dist/src/ruvector/ruvllm-wasm.js +270 -434
  366. package/dist/src/ruvector/ruvllm-wasm.js.map +1 -1
  367. package/dist/src/ruvector/semantic-router.d.ts +77 -0
  368. package/dist/src/ruvector/semantic-router.d.ts.map +1 -0
  369. package/dist/src/ruvector/semantic-router.js +60 -67
  370. package/dist/src/ruvector/semantic-router.js.map +1 -1
  371. package/dist/src/ruvector/vector-db.d.ts +69 -0
  372. package/dist/src/ruvector/vector-db.d.ts.map +1 -0
  373. package/dist/src/ruvector/vector-db.js +119 -205
  374. package/dist/src/ruvector/vector-db.js.map +1 -1
  375. package/dist/src/services/agentic-flow-bridge.d.ts +50 -0
  376. package/dist/src/services/agentic-flow-bridge.d.ts.map +1 -0
  377. package/dist/src/services/agentic-flow-bridge.js +32 -105
  378. package/dist/src/services/agentic-flow-bridge.js.map +1 -1
  379. package/dist/src/services/claim-service.d.ts +204 -0
  380. package/dist/src/services/claim-service.d.ts.map +1 -0
  381. package/dist/src/services/claim-service.js +615 -940
  382. package/dist/src/services/claim-service.js.map +1 -1
  383. package/dist/src/services/container-worker-pool.d.ts +197 -0
  384. package/dist/src/services/container-worker-pool.d.ts.map +1 -0
  385. package/dist/src/services/container-worker-pool.js +398 -666
  386. package/dist/src/services/container-worker-pool.js.map +1 -1
  387. package/dist/src/services/headless-worker-executor.d.ts +304 -0
  388. package/dist/src/services/headless-worker-executor.d.ts.map +1 -0
  389. package/dist/src/services/headless-worker-executor.js +441 -467
  390. package/dist/src/services/headless-worker-executor.js.map +1 -1
  391. package/dist/src/services/index.d.ts +4 -4
  392. package/dist/src/services/index.d.ts.map +1 -1
  393. package/dist/src/services/index.js +4 -4
  394. package/dist/src/services/index.js.map +1 -1
  395. package/dist/src/services/registry-api.d.ts +58 -0
  396. package/dist/src/services/registry-api.d.ts.map +1 -0
  397. package/dist/src/services/registry-api.js +92 -200
  398. package/dist/src/services/registry-api.js.map +1 -1
  399. package/dist/src/services/ruvector-training.d.ts +222 -0
  400. package/dist/src/services/ruvector-training.d.ts.map +1 -0
  401. package/dist/src/services/ruvector-training.js +257 -337
  402. package/dist/src/services/ruvector-training.js.map +1 -1
  403. package/dist/src/services/worker-daemon.d.ts +228 -0
  404. package/dist/src/services/worker-daemon.d.ts.map +1 -0
  405. package/dist/src/services/worker-daemon.js +591 -849
  406. package/dist/src/services/worker-daemon.js.map +1 -1
  407. package/dist/src/services/worker-queue.d.ts +194 -0
  408. package/dist/src/services/worker-queue.d.ts.map +1 -0
  409. package/dist/src/services/worker-queue.js +331 -548
  410. package/dist/src/services/worker-queue.js.map +1 -1
  411. package/dist/src/suggest.d.ts +53 -0
  412. package/dist/src/suggest.d.ts.map +1 -0
  413. package/dist/src/suggest.js +45 -55
  414. package/dist/src/suggest.js.map +1 -1
  415. package/dist/src/transfer/anonymization/index.js +29 -37
  416. package/dist/src/transfer/anonymization/index.js.map +1 -1
  417. package/dist/src/transfer/deploy-seraphine.js +128 -155
  418. package/dist/src/transfer/deploy-seraphine.js.map +1 -1
  419. package/dist/src/transfer/export.d.ts +25 -0
  420. package/dist/src/transfer/export.d.ts.map +1 -0
  421. package/dist/src/transfer/export.js +84 -142
  422. package/dist/src/transfer/export.js.map +1 -1
  423. package/dist/src/transfer/index.d.ts +1 -1
  424. package/dist/src/transfer/index.d.ts.map +1 -1
  425. package/dist/src/transfer/index.js +0 -2
  426. package/dist/src/transfer/index.js.map +1 -1
  427. package/dist/src/transfer/ipfs/client.d.ts +109 -0
  428. package/dist/src/transfer/ipfs/client.d.ts.map +1 -0
  429. package/dist/src/transfer/ipfs/client.js +187 -337
  430. package/dist/src/transfer/ipfs/client.js.map +1 -1
  431. package/dist/src/transfer/ipfs/upload.d.ts +95 -0
  432. package/dist/src/transfer/ipfs/upload.d.ts.map +1 -0
  433. package/dist/src/transfer/ipfs/upload.js +288 -434
  434. package/dist/src/transfer/ipfs/upload.js.map +1 -1
  435. package/dist/src/transfer/models/seraphine.d.ts +72 -0
  436. package/dist/src/transfer/models/seraphine.d.ts.map +1 -0
  437. package/dist/src/transfer/models/seraphine.js +55 -55
  438. package/dist/src/transfer/models/seraphine.js.map +1 -1
  439. package/dist/src/transfer/serialization/cfp.d.ts +49 -0
  440. package/dist/src/transfer/serialization/cfp.d.ts.map +1 -0
  441. package/dist/src/transfer/serialization/cfp.js +30 -31
  442. package/dist/src/transfer/serialization/cfp.js.map +1 -1
  443. package/dist/src/transfer/storage/gcs.d.ts +82 -0
  444. package/dist/src/transfer/storage/gcs.d.ts.map +1 -0
  445. package/dist/src/transfer/storage/gcs.js +165 -232
  446. package/dist/src/transfer/storage/gcs.js.map +1 -1
  447. package/dist/src/transfer/store/discovery.d.ts +84 -0
  448. package/dist/src/transfer/store/discovery.d.ts.map +1 -0
  449. package/dist/src/transfer/store/discovery.js +239 -349
  450. package/dist/src/transfer/store/discovery.js.map +1 -1
  451. package/dist/src/transfer/store/download.d.ts +70 -0
  452. package/dist/src/transfer/store/download.d.ts.map +1 -0
  453. package/dist/src/transfer/store/download.js +243 -365
  454. package/dist/src/transfer/store/download.js.map +1 -1
  455. package/dist/src/transfer/store/index.d.ts +84 -0
  456. package/dist/src/transfer/store/index.d.ts.map +1 -0
  457. package/dist/src/transfer/store/index.js +63 -130
  458. package/dist/src/transfer/store/index.js.map +1 -1
  459. package/dist/src/transfer/store/publish.d.ts +76 -0
  460. package/dist/src/transfer/store/publish.d.ts.map +1 -0
  461. package/dist/src/transfer/store/publish.js +184 -258
  462. package/dist/src/transfer/store/publish.js.map +1 -1
  463. package/dist/src/transfer/store/registry.js +50 -72
  464. package/dist/src/transfer/store/registry.js.map +1 -1
  465. package/dist/src/transfer/store/search.d.ts +54 -0
  466. package/dist/src/transfer/store/search.d.ts.map +1 -0
  467. package/dist/src/transfer/store/search.js +64 -96
  468. package/dist/src/transfer/store/search.js.map +1 -1
  469. package/dist/src/transfer/store/tests/standalone-test.js +174 -231
  470. package/dist/src/transfer/store/tests/standalone-test.js.map +1 -1
  471. package/dist/src/transfer/test-seraphine.js +95 -130
  472. package/dist/src/transfer/test-seraphine.js.map +1 -1
  473. package/dist/src/transfer/tests/test-store.js +194 -239
  474. package/dist/src/transfer/tests/test-store.js.map +1 -1
  475. package/dist/src/transfer/types.d.ts +245 -0
  476. package/dist/src/transfer/types.d.ts.map +1 -0
  477. package/dist/src/types.d.ts +198 -0
  478. package/dist/src/types.d.ts.map +1 -0
  479. package/dist/src/types.js +26 -55
  480. package/dist/src/types.js.map +1 -1
  481. package/dist/src/update/checker.d.ts +34 -0
  482. package/dist/src/update/checker.d.ts.map +1 -0
  483. package/dist/src/update/checker.js +106 -183
  484. package/dist/src/update/checker.js.map +1 -1
  485. package/dist/src/update/executor.d.ts +32 -0
  486. package/dist/src/update/executor.d.ts.map +1 -0
  487. package/dist/src/update/executor.js +135 -198
  488. package/dist/src/update/executor.js.map +1 -1
  489. package/dist/src/update/index.d.ts +33 -0
  490. package/dist/src/update/index.d.ts.map +1 -0
  491. package/dist/src/update/index.js +38 -85
  492. package/dist/src/update/index.js.map +1 -1
  493. package/dist/src/update/rate-limiter.d.ts +20 -0
  494. package/dist/src/update/rate-limiter.d.ts.map +1 -0
  495. package/dist/src/update/rate-limiter.js +19 -31
  496. package/dist/src/update/rate-limiter.js.map +1 -1
  497. package/dist/src/update/validator.d.ts +17 -0
  498. package/dist/src/update/validator.d.ts.map +1 -0
  499. package/dist/src/update/validator.js +38 -64
  500. package/dist/src/update/validator.js.map +1 -1
  501. package/dist/tsconfig.tsbuildinfo +1 -1
  502. package/package.json +1 -1
@@ -12,664 +12,573 @@
12
12
  *
13
13
  * Created with ❤️ by ruv.io
14
14
  */
15
- var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
16
- function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
17
- return new (P || (P = Promise))(function (resolve, reject) {
18
- function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
19
- function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
20
- function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
21
- step((generator = generator.apply(thisArg, _arguments || [])).next());
22
- });
23
- };
24
- var __generator = (this && this.__generator) || function (thisArg, body) {
25
- var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
26
- return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
27
- function verb(n) { return function (v) { return step([n, v]); }; }
28
- function step(op) {
29
- if (f) throw new TypeError("Generator is already executing.");
30
- while (_) try {
31
- if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
32
- if (y = 0, t) op = [op[0] & 2, t.value];
33
- switch (op[0]) {
34
- case 0: case 1: t = op; break;
35
- case 4: _.label++; return { value: op[1], done: false };
36
- case 5: _.label++; y = op[1]; op = [0]; continue;
37
- case 7: op = _.ops.pop(); _.trys.pop(); continue;
38
- default:
39
- if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
40
- if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
41
- if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
42
- if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
43
- if (t[2]) _.ops.pop();
44
- _.trys.pop(); continue;
45
- }
46
- op = body.call(thisArg, _);
47
- } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
48
- if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
49
- }
50
- };
51
15
  import { output } from '../output.js';
52
16
  // Dynamic imports for embeddings package
53
- function getEmbeddings() {
54
- return __awaiter(this, void 0, void 0, function () {
55
- var _a;
56
- return __generator(this, function (_b) {
57
- switch (_b.label) {
58
- case 0:
59
- _b.trys.push([0, 2, , 3]);
60
- return [4 /*yield*/, import('@claude-flow/embeddings')];
61
- case 1: return [2 /*return*/, _b.sent()];
62
- case 2:
63
- _a = _b.sent();
64
- return [2 /*return*/, null];
65
- case 3: return [2 /*return*/];
66
- }
67
- });
68
- });
17
+ async function getEmbeddings() {
18
+ try {
19
+ return await import('@claude-flow/embeddings');
20
+ }
21
+ catch {
22
+ return null;
23
+ }
69
24
  }
70
25
  // Generate subcommand - REAL implementation
71
- var generateCommand = {
26
+ const generateCommand = {
72
27
  name: 'generate',
73
28
  description: 'Generate embeddings for text',
74
29
  options: [
75
30
  { name: 'text', short: 't', type: 'string', description: 'Text to embed', required: true },
76
- { name: 'provider', short: 'p', type: 'string', description: 'Provider: openai, transformers, agentic-flow, local', "default": 'local' },
31
+ { name: 'provider', short: 'p', type: 'string', description: 'Provider: openai, transformers, agentic-flow, local', default: 'local' },
77
32
  { name: 'model', short: 'm', type: 'string', description: 'Model to use' },
78
- { name: 'output', short: 'o', type: 'string', description: 'Output format: json, array, preview', "default": 'preview' },
33
+ { name: 'output', short: 'o', type: 'string', description: 'Output format: json, array, preview', default: 'preview' },
79
34
  ],
80
35
  examples: [
81
36
  { command: 'claude-flow embeddings generate -t "Hello world"', description: 'Generate embedding' },
82
37
  { command: 'claude-flow embeddings generate -t "Test" -o json', description: 'Output as JSON' },
83
38
  ],
84
- action: function (ctx) { return __awaiter(void 0, void 0, Promise, function () {
85
- var text, provider, outputFormat, spinner, _a, generateEmbedding, loadEmbeddingModel, startTime, modelInfo, result, duration, preview, error_1;
86
- return __generator(this, function (_b) {
87
- switch (_b.label) {
88
- case 0:
89
- text = ctx.flags.text;
90
- provider = ctx.flags.provider || 'local';
91
- outputFormat = ctx.flags.output || 'preview';
92
- if (!text) {
93
- output.printError('Text is required');
94
- return [2 /*return*/, { success: false, exitCode: 1 }];
95
- }
96
- output.writeln();
97
- output.writeln(output.bold('Generate Embedding'));
98
- output.writeln(output.dim('─'.repeat(50)));
99
- spinner = output.createSpinner({ text: "Generating with " + provider + "...", spinner: 'dots' });
100
- spinner.start();
101
- _b.label = 1;
102
- case 1:
103
- _b.trys.push([1, 5, , 6]);
104
- return [4 /*yield*/, import('../memory/memory-initializer.js')];
105
- case 2:
106
- _a = _b.sent(), generateEmbedding = _a.generateEmbedding, loadEmbeddingModel = _a.loadEmbeddingModel;
107
- startTime = Date.now();
108
- return [4 /*yield*/, loadEmbeddingModel({ verbose: false })];
109
- case 3:
110
- modelInfo = _b.sent();
111
- return [4 /*yield*/, generateEmbedding(text)];
112
- case 4:
113
- result = _b.sent();
114
- duration = Date.now() - startTime;
115
- spinner.succeed("Embedding generated in " + duration + "ms");
116
- if (outputFormat === 'json') {
117
- output.printJson({
118
- text: text.substring(0, 100),
119
- embedding: result.embedding,
120
- dimensions: result.dimensions,
121
- model: result.model,
122
- duration: duration
123
- });
124
- return [2 /*return*/, { success: true, data: result }];
125
- }
126
- if (outputFormat === 'array') {
127
- output.writeln(JSON.stringify(result.embedding));
128
- return [2 /*return*/, { success: true, data: result }];
129
- }
130
- preview = result.embedding.slice(0, 8).map(function (v) { return v.toFixed(6); });
131
- output.writeln();
132
- output.printBox([
133
- "Provider: " + provider,
134
- "Model: " + result.model + " (" + modelInfo.modelName + ")",
135
- "Dimensions: " + result.dimensions,
136
- "Text: \"" + text.substring(0, 40) + (text.length > 40 ? '...' : '') + "\"",
137
- "Generation time: " + duration + "ms",
138
- "",
139
- "Vector preview (first 8 of " + result.dimensions + "):",
140
- "[" + preview.join(', ') + ", ...]",
141
- ].join('\n'), 'Result');
142
- return [2 /*return*/, { success: true, data: result }];
143
- case 5:
144
- error_1 = _b.sent();
145
- spinner.fail('Embedding generation failed');
146
- output.printError(error_1 instanceof Error ? error_1.message : String(error_1));
147
- return [2 /*return*/, { success: false, exitCode: 1 }];
148
- case 6: return [2 /*return*/];
39
+ action: async (ctx) => {
40
+ const text = ctx.flags.text;
41
+ const provider = ctx.flags.provider || 'local';
42
+ const outputFormat = ctx.flags.output || 'preview';
43
+ if (!text) {
44
+ output.printError('Text is required');
45
+ return { success: false, exitCode: 1 };
46
+ }
47
+ output.writeln();
48
+ output.writeln(output.bold('Generate Embedding'));
49
+ output.writeln(output.dim('─'.repeat(50)));
50
+ const spinner = output.createSpinner({ text: `Generating with ${provider}...`, spinner: 'dots' });
51
+ spinner.start();
52
+ try {
53
+ // Use real embedding generator
54
+ const { generateEmbedding, loadEmbeddingModel } = await import('../memory/memory-initializer.js');
55
+ const startTime = Date.now();
56
+ const modelInfo = await loadEmbeddingModel({ verbose: false });
57
+ const result = await generateEmbedding(text);
58
+ const duration = Date.now() - startTime;
59
+ spinner.succeed(`Embedding generated in ${duration}ms`);
60
+ if (outputFormat === 'json') {
61
+ output.printJson({
62
+ text: text.substring(0, 100),
63
+ embedding: result.embedding,
64
+ dimensions: result.dimensions,
65
+ model: result.model,
66
+ duration
67
+ });
68
+ return { success: true, data: result };
149
69
  }
150
- });
151
- }); }
70
+ if (outputFormat === 'array') {
71
+ output.writeln(JSON.stringify(result.embedding));
72
+ return { success: true, data: result };
73
+ }
74
+ // Preview format (default)
75
+ const preview = result.embedding.slice(0, 8).map(v => v.toFixed(6));
76
+ output.writeln();
77
+ output.printBox([
78
+ `Provider: ${provider}`,
79
+ `Model: ${result.model} (${modelInfo.modelName})`,
80
+ `Dimensions: ${result.dimensions}`,
81
+ `Text: "${text.substring(0, 40)}${text.length > 40 ? '...' : ''}"`,
82
+ `Generation time: ${duration}ms`,
83
+ ``,
84
+ `Vector preview (first 8 of ${result.dimensions}):`,
85
+ `[${preview.join(', ')}, ...]`,
86
+ ].join('\n'), 'Result');
87
+ return { success: true, data: result };
88
+ }
89
+ catch (error) {
90
+ spinner.fail('Embedding generation failed');
91
+ output.printError(error instanceof Error ? error.message : String(error));
92
+ return { success: false, exitCode: 1 };
93
+ }
94
+ },
152
95
  };
153
96
  // Search subcommand - REAL implementation using sql.js
154
- var searchCommand = {
97
+ const searchCommand = {
155
98
  name: 'search',
156
99
  description: 'Semantic similarity search',
157
100
  options: [
158
101
  { name: 'query', short: 'q', type: 'string', description: 'Search query', required: true },
159
- { name: 'collection', short: 'c', type: 'string', description: 'Namespace to search', "default": 'default' },
160
- { name: 'limit', short: 'l', type: 'number', description: 'Max results', "default": '10' },
161
- { name: 'threshold', short: 't', type: 'number', description: 'Similarity threshold (0-1)', "default": '0.5' },
162
- { name: 'db-path', type: 'string', description: 'Database path', "default": '.swarm/memory.db' },
102
+ { name: 'collection', short: 'c', type: 'string', description: 'Namespace to search', default: 'default' },
103
+ { name: 'limit', short: 'l', type: 'number', description: 'Max results', default: '10' },
104
+ { name: 'threshold', short: 't', type: 'number', description: 'Similarity threshold (0-1)', default: '0.5' },
105
+ { name: 'db-path', type: 'string', description: 'Database path', default: '.swarm/memory.db' },
163
106
  ],
164
107
  examples: [
165
108
  { command: 'claude-flow embeddings search -q "error handling"', description: 'Search for similar' },
166
109
  { command: 'claude-flow embeddings search -q "test" -l 5', description: 'Limit results' },
167
110
  ],
168
- action: function (ctx) { return __awaiter(void 0, void 0, Promise, function () {
169
- var query, namespace, limit, threshold, dbPath, spinner, fs, path, fullDbPath, initSqlJs, SQL, fileBuffer, db, startTime, generateEmbedding, queryResult, queryEmbedding, entries, results, _i, _a, row, _b, id, key, ns, content, embeddingJson, embedding, similarity, keywordEntries, _loop_1, _c, _d, row, topResults, searchTime, error_2;
170
- var _e, _f;
171
- return __generator(this, function (_g) {
172
- switch (_g.label) {
173
- case 0:
174
- query = ctx.flags.query;
175
- namespace = ctx.flags.collection || 'default';
176
- limit = parseInt(ctx.flags.limit || '10', 10);
177
- threshold = parseFloat(ctx.flags.threshold || '0.5');
178
- dbPath = ctx.flags['db-path'] || '.swarm/memory.db';
179
- if (!query) {
180
- output.printError('Query is required');
181
- return [2 /*return*/, { success: false, exitCode: 1 }];
182
- }
183
- output.writeln();
184
- output.writeln(output.bold('Semantic Search'));
185
- output.writeln(output.dim(''.repeat(60)));
186
- spinner = output.createSpinner({ text: 'Searching...', spinner: 'dots' });
187
- spinner.start();
188
- _g.label = 1;
189
- case 1:
190
- _g.trys.push([1, 8, , 9]);
191
- return [4 /*yield*/, import('fs')];
192
- case 2:
193
- fs = _g.sent();
194
- return [4 /*yield*/, import('path')];
195
- case 3:
196
- path = _g.sent();
197
- fullDbPath = path.resolve(process.cwd(), dbPath);
198
- // Check if database exists
199
- if (!fs.existsSync(fullDbPath)) {
200
- spinner.fail('Database not found');
201
- output.printWarning("No database at " + fullDbPath);
202
- output.printInfo('Run: claude-flow memory init');
203
- return [2 /*return*/, { success: false, exitCode: 1 }];
204
- }
205
- return [4 /*yield*/, import('sql.js')];
206
- case 4:
207
- initSqlJs = (_g.sent())["default"];
208
- return [4 /*yield*/, initSqlJs()];
209
- case 5:
210
- SQL = _g.sent();
211
- fileBuffer = fs.readFileSync(fullDbPath);
212
- db = new SQL.Database(fileBuffer);
213
- startTime = Date.now();
214
- return [4 /*yield*/, import('../memory/memory-initializer.js')];
215
- case 6:
216
- generateEmbedding = (_g.sent()).generateEmbedding;
217
- return [4 /*yield*/, generateEmbedding(query)];
218
- case 7:
219
- queryResult = _g.sent();
220
- queryEmbedding = queryResult.embedding;
221
- entries = db.exec("\n SELECT id, key, namespace, content, embedding, embedding_dimensions\n FROM memory_entries\n WHERE status = 'active'\n AND embedding IS NOT NULL\n " + (namespace !== 'all' ? "AND namespace = '" + namespace + "'" : '') + "\n LIMIT 1000\n ");
222
- results = [];
223
- if ((_e = entries[0]) === null || _e === void 0 ? void 0 : _e.values) {
224
- for (_i = 0, _a = entries[0].values; _i < _a.length; _i++) {
225
- row = _a[_i];
226
- _b = row, id = _b[0], key = _b[1], ns = _b[2], content = _b[3], embeddingJson = _b[4];
227
- if (!embeddingJson)
228
- continue;
229
- try {
230
- embedding = JSON.parse(embeddingJson);
231
- similarity = cosineSimilarity(queryEmbedding, embedding);
232
- if (similarity >= threshold) {
233
- results.push({
234
- score: similarity,
235
- id: id.substring(0, 10),
236
- key: key || id.substring(0, 15),
237
- content: (content || '').substring(0, 45) + ((content || '').length > 45 ? '...' : ''),
238
- namespace: ns || 'default'
239
- });
240
- }
241
- }
242
- catch (_h) {
243
- // Skip entries with invalid embeddings
244
- }
111
+ action: async (ctx) => {
112
+ const query = ctx.flags.query;
113
+ const namespace = ctx.flags.collection || 'default';
114
+ const limit = parseInt(ctx.flags.limit || '10', 10);
115
+ const threshold = parseFloat(ctx.flags.threshold || '0.5');
116
+ const dbPath = ctx.flags['db-path'] || '.swarm/memory.db';
117
+ if (!query) {
118
+ output.printError('Query is required');
119
+ return { success: false, exitCode: 1 };
120
+ }
121
+ output.writeln();
122
+ output.writeln(output.bold('Semantic Search'));
123
+ output.writeln(output.dim(''.repeat(60)));
124
+ const spinner = output.createSpinner({ text: 'Searching...', spinner: 'dots' });
125
+ spinner.start();
126
+ try {
127
+ const fs = await import('fs');
128
+ const path = await import('path');
129
+ const fullDbPath = path.resolve(process.cwd(), dbPath);
130
+ // Check if database exists
131
+ if (!fs.existsSync(fullDbPath)) {
132
+ spinner.fail('Database not found');
133
+ output.printWarning(`No database at ${fullDbPath}`);
134
+ output.printInfo('Run: claude-flow memory init');
135
+ return { success: false, exitCode: 1 };
136
+ }
137
+ // Load sql.js
138
+ const initSqlJs = (await import('sql.js')).default;
139
+ const SQL = await initSqlJs();
140
+ const fileBuffer = fs.readFileSync(fullDbPath);
141
+ const db = new SQL.Database(fileBuffer);
142
+ const startTime = Date.now();
143
+ // Generate embedding for query
144
+ const { generateEmbedding } = await import('../memory/memory-initializer.js');
145
+ const queryResult = await generateEmbedding(query);
146
+ const queryEmbedding = queryResult.embedding;
147
+ // Get all entries with embeddings from database
148
+ const entries = db.exec(`
149
+ SELECT id, key, namespace, content, embedding, embedding_dimensions
150
+ FROM memory_entries
151
+ WHERE status = 'active'
152
+ AND embedding IS NOT NULL
153
+ ${namespace !== 'all' ? `AND namespace = '${namespace}'` : ''}
154
+ LIMIT 1000
155
+ `);
156
+ const results = [];
157
+ if (entries[0]?.values) {
158
+ for (const row of entries[0].values) {
159
+ const [id, key, ns, content, embeddingJson] = row;
160
+ if (!embeddingJson)
161
+ continue;
162
+ try {
163
+ const embedding = JSON.parse(embeddingJson);
164
+ // Calculate cosine similarity
165
+ const similarity = cosineSimilarity(queryEmbedding, embedding);
166
+ if (similarity >= threshold) {
167
+ results.push({
168
+ score: similarity,
169
+ id: id.substring(0, 10),
170
+ key: key || id.substring(0, 15),
171
+ content: (content || '').substring(0, 45) + ((content || '').length > 45 ? '...' : ''),
172
+ namespace: ns || 'default'
173
+ });
245
174
  }
246
175
  }
247
- // Also search entries without embeddings using keyword match
248
- if (results.length < limit) {
249
- keywordEntries = db.exec("\n SELECT id, key, namespace, content\n FROM memory_entries\n WHERE status = 'active'\n AND (content LIKE '%" + query.replace(/'/g, "''") + "%' OR key LIKE '%" + query.replace(/'/g, "''") + "%')\n " + (namespace !== 'all' ? "AND namespace = '" + namespace + "'" : '') + "\n LIMIT " + (limit - results.length) + "\n ");
250
- if ((_f = keywordEntries[0]) === null || _f === void 0 ? void 0 : _f.values) {
251
- _loop_1 = function (row) {
252
- var _j = row, id = _j[0], key = _j[1], ns = _j[2], content = _j[3];
253
- // Avoid duplicates
254
- if (!results.some(function (r) { return r.id === id.substring(0, 10); })) {
255
- results.push({
256
- score: 0.5,
257
- id: id.substring(0, 10),
258
- key: key || id.substring(0, 15),
259
- content: (content || '').substring(0, 45) + ((content || '').length > 45 ? '...' : ''),
260
- namespace: ns || 'default'
261
- });
262
- }
263
- };
264
- for (_c = 0, _d = keywordEntries[0].values; _c < _d.length; _c++) {
265
- row = _d[_c];
266
- _loop_1(row);
267
- }
268
- }
176
+ catch {
177
+ // Skip entries with invalid embeddings
269
178
  }
270
- // Sort by score descending
271
- results.sort(function (a, b) { return b.score - a.score; });
272
- topResults = results.slice(0, limit);
273
- searchTime = Date.now() - startTime;
274
- db.close();
275
- spinner.succeed("Found " + topResults.length + " matches (" + searchTime + "ms)");
276
- if (topResults.length === 0) {
277
- output.writeln();
278
- output.printWarning('No matches found');
279
- output.printInfo("Try: claude-flow memory store -k \"key\" --value \"your data\"");
280
- return [2 /*return*/, { success: true, data: [] }];
179
+ }
180
+ }
181
+ // Also search entries without embeddings using keyword match
182
+ if (results.length < limit) {
183
+ const keywordEntries = db.exec(`
184
+ SELECT id, key, namespace, content
185
+ FROM memory_entries
186
+ WHERE status = 'active'
187
+ AND (content LIKE '%${query.replace(/'/g, "''")}%' OR key LIKE '%${query.replace(/'/g, "''")}%')
188
+ ${namespace !== 'all' ? `AND namespace = '${namespace}'` : ''}
189
+ LIMIT ${limit - results.length}
190
+ `);
191
+ if (keywordEntries[0]?.values) {
192
+ for (const row of keywordEntries[0].values) {
193
+ const [id, key, ns, content] = row;
194
+ // Avoid duplicates
195
+ if (!results.some(r => r.id === id.substring(0, 10))) {
196
+ results.push({
197
+ score: 0.5, // Keyword match base score
198
+ id: id.substring(0, 10),
199
+ key: key || id.substring(0, 15),
200
+ content: (content || '').substring(0, 45) + ((content || '').length > 45 ? '...' : ''),
201
+ namespace: ns || 'default'
202
+ });
203
+ }
281
204
  }
282
- output.writeln();
283
- output.printTable({
284
- columns: [
285
- { key: 'score', header: 'Score', width: 10 },
286
- { key: 'key', header: 'Key', width: 18 },
287
- { key: 'content', header: 'Content', width: 42 },
288
- ],
289
- data: topResults.map(function (r) { return ({
290
- score: r.score >= 0.8 ? output.success(r.score.toFixed(2)) :
291
- r.score >= 0.6 ? output.warning(r.score.toFixed(2)) :
292
- output.dim(r.score.toFixed(2)),
293
- key: r.key,
294
- content: r.content
295
- }); })
296
- });
297
- output.writeln();
298
- output.writeln(output.dim("Searched " + namespace + " namespace (" + queryResult.model + ", " + searchTime + "ms)"));
299
- return [2 /*return*/, { success: true, data: topResults }];
300
- case 8:
301
- error_2 = _g.sent();
302
- spinner.fail('Search failed');
303
- output.printError(error_2 instanceof Error ? error_2.message : String(error_2));
304
- return [2 /*return*/, { success: false, exitCode: 1 }];
305
- case 9: return [2 /*return*/];
205
+ }
306
206
  }
307
- });
308
- }); }
207
+ // Sort by score descending
208
+ results.sort((a, b) => b.score - a.score);
209
+ const topResults = results.slice(0, limit);
210
+ const searchTime = Date.now() - startTime;
211
+ db.close();
212
+ spinner.succeed(`Found ${topResults.length} matches (${searchTime}ms)`);
213
+ if (topResults.length === 0) {
214
+ output.writeln();
215
+ output.printWarning('No matches found');
216
+ output.printInfo(`Try: claude-flow memory store -k "key" --value "your data"`);
217
+ return { success: true, data: [] };
218
+ }
219
+ output.writeln();
220
+ output.printTable({
221
+ columns: [
222
+ { key: 'score', header: 'Score', width: 10 },
223
+ { key: 'key', header: 'Key', width: 18 },
224
+ { key: 'content', header: 'Content', width: 42 },
225
+ ],
226
+ data: topResults.map(r => ({
227
+ score: r.score >= 0.8 ? output.success(r.score.toFixed(2)) :
228
+ r.score >= 0.6 ? output.warning(r.score.toFixed(2)) :
229
+ output.dim(r.score.toFixed(2)),
230
+ key: r.key,
231
+ content: r.content
232
+ })),
233
+ });
234
+ output.writeln();
235
+ output.writeln(output.dim(`Searched ${namespace} namespace (${queryResult.model}, ${searchTime}ms)`));
236
+ return { success: true, data: topResults };
237
+ }
238
+ catch (error) {
239
+ spinner.fail('Search failed');
240
+ output.printError(error instanceof Error ? error.message : String(error));
241
+ return { success: false, exitCode: 1 };
242
+ }
243
+ },
309
244
  };
310
245
  /**
311
246
  * Optimized cosine similarity
312
247
  * V8 JIT-friendly - ~0.5μs per 384-dim vector comparison
313
248
  */
314
249
  function cosineSimilarity(a, b) {
315
- var len = Math.min(a.length, b.length);
250
+ const len = Math.min(a.length, b.length);
316
251
  if (len === 0)
317
252
  return 0;
318
- var dot = 0, normA = 0, normB = 0;
253
+ let dot = 0, normA = 0, normB = 0;
319
254
  // Simple loop - V8 optimizes this well
320
- for (var i = 0; i < len; i++) {
321
- var ai = a[i], bi = b[i];
255
+ for (let i = 0; i < len; i++) {
256
+ const ai = a[i], bi = b[i];
322
257
  dot += ai * bi;
323
258
  normA += ai * ai;
324
259
  normB += bi * bi;
325
260
  }
326
- var mag = Math.sqrt(normA * normB);
261
+ const mag = Math.sqrt(normA * normB);
327
262
  return mag === 0 ? 0 : dot / mag;
328
263
  }
329
264
  // Compare subcommand - REAL similarity computation
330
- var compareCommand = {
265
+ const compareCommand = {
331
266
  name: 'compare',
332
267
  description: 'Compare similarity between texts',
333
268
  options: [
334
269
  { name: 'text1', type: 'string', description: 'First text', required: true },
335
270
  { name: 'text2', type: 'string', description: 'Second text', required: true },
336
- { name: 'metric', short: 'm', type: 'string', description: 'Metric: cosine, euclidean, dot', "default": 'cosine' },
271
+ { name: 'metric', short: 'm', type: 'string', description: 'Metric: cosine, euclidean, dot', default: 'cosine' },
337
272
  ],
338
273
  examples: [
339
274
  { command: 'claude-flow embeddings compare --text1 "Hello" --text2 "Hi there"', description: 'Compare texts' },
340
275
  ],
341
- action: function (ctx) { return __awaiter(void 0, void 0, Promise, function () {
342
- var text1, text2, metric, spinner, generateEmbedding, startTime, _a, emb1, emb2, embedTime, similarity, sumSq, i, diff, distance, dot, i, error_3;
343
- return __generator(this, function (_b) {
344
- switch (_b.label) {
345
- case 0:
346
- text1 = ctx.flags.text1;
347
- text2 = ctx.flags.text2;
348
- metric = ctx.flags.metric || 'cosine';
349
- if (!text1 || !text2) {
350
- output.printError('Both text1 and text2 are required');
351
- return [2 /*return*/, { success: false, exitCode: 1 }];
276
+ action: async (ctx) => {
277
+ const text1 = ctx.flags.text1;
278
+ const text2 = ctx.flags.text2;
279
+ const metric = ctx.flags.metric || 'cosine';
280
+ if (!text1 || !text2) {
281
+ output.printError('Both text1 and text2 are required');
282
+ return { success: false, exitCode: 1 };
283
+ }
284
+ output.writeln();
285
+ output.writeln(output.bold('Text Similarity (Real)'));
286
+ output.writeln(output.dim('─'.repeat(50)));
287
+ const spinner = output.createSpinner({ text: 'Generating embeddings...', spinner: 'dots' });
288
+ spinner.start();
289
+ try {
290
+ const { generateEmbedding } = await import('../memory/memory-initializer.js');
291
+ // Generate real embeddings for both texts
292
+ const startTime = Date.now();
293
+ const [emb1, emb2] = await Promise.all([
294
+ generateEmbedding(text1),
295
+ generateEmbedding(text2),
296
+ ]);
297
+ const embedTime = Date.now() - startTime;
298
+ spinner.setText('Computing similarity...');
299
+ // Compute real similarity based on metric
300
+ let similarity;
301
+ switch (metric) {
302
+ case 'euclidean': {
303
+ // Euclidean distance (converted to similarity: 1 / (1 + distance))
304
+ let sumSq = 0;
305
+ for (let i = 0; i < emb1.embedding.length; i++) {
306
+ const diff = emb1.embedding[i] - emb2.embedding[i];
307
+ sumSq += diff * diff;
352
308
  }
353
- output.writeln();
354
- output.writeln(output.bold('Text Similarity (Real)'));
355
- output.writeln(output.dim('─'.repeat(50)));
356
- spinner = output.createSpinner({ text: 'Generating embeddings...', spinner: 'dots' });
357
- spinner.start();
358
- _b.label = 1;
359
- case 1:
360
- _b.trys.push([1, 4, , 5]);
361
- return [4 /*yield*/, import('../memory/memory-initializer.js')];
362
- case 2:
363
- generateEmbedding = (_b.sent()).generateEmbedding;
364
- startTime = Date.now();
365
- return [4 /*yield*/, Promise.all([
366
- generateEmbedding(text1),
367
- generateEmbedding(text2),
368
- ])];
369
- case 3:
370
- _a = _b.sent(), emb1 = _a[0], emb2 = _a[1];
371
- embedTime = Date.now() - startTime;
372
- spinner.setText('Computing similarity...');
373
- similarity = void 0;
374
- switch (metric) {
375
- case 'euclidean': {
376
- sumSq = 0;
377
- for (i = 0; i < emb1.embedding.length; i++) {
378
- diff = emb1.embedding[i] - emb2.embedding[i];
379
- sumSq += diff * diff;
380
- }
381
- distance = Math.sqrt(sumSq);
382
- similarity = 1 / (1 + distance);
383
- break;
384
- }
385
- case 'dot': {
386
- dot = 0;
387
- for (i = 0; i < emb1.embedding.length; i++) {
388
- dot += emb1.embedding[i] * emb2.embedding[i];
389
- }
390
- similarity = dot;
391
- break;
392
- }
393
- case 'cosine':
394
- default: {
395
- // Cosine similarity
396
- similarity = cosineSimilarity(emb1.embedding, emb2.embedding);
397
- }
309
+ const distance = Math.sqrt(sumSq);
310
+ similarity = 1 / (1 + distance);
311
+ break;
312
+ }
313
+ case 'dot': {
314
+ // Dot product
315
+ let dot = 0;
316
+ for (let i = 0; i < emb1.embedding.length; i++) {
317
+ dot += emb1.embedding[i] * emb2.embedding[i];
398
318
  }
399
- spinner.succeed("Comparison complete (" + embedTime + "ms)");
400
- output.writeln();
401
- output.printBox([
402
- "Text 1: \"" + text1.substring(0, 30) + (text1.length > 30 ? '...' : '') + "\"",
403
- "Text 2: \"" + text2.substring(0, 30) + (text2.length > 30 ? '...' : '') + "\"",
404
- "",
405
- "Model: " + emb1.model + " (" + emb1.dimensions + "-dim)",
406
- "Metric: " + metric,
407
- "Similarity: " + (similarity > 0.8 ? output.success(similarity.toFixed(4)) : similarity > 0.5 ? output.warning(similarity.toFixed(4)) : output.dim(similarity.toFixed(4))),
408
- "",
409
- "Interpretation: " + (similarity > 0.8 ? 'Highly similar' : similarity > 0.5 ? 'Moderately similar' : 'Dissimilar'),
410
- ].join('\n'), 'Result');
411
- return [2 /*return*/, { success: true, data: { similarity: similarity, metric: metric, embedTime: embedTime } }];
412
- case 4:
413
- error_3 = _b.sent();
414
- spinner.fail('Comparison failed');
415
- output.printError(error_3 instanceof Error ? error_3.message : String(error_3));
416
- return [2 /*return*/, { success: false, exitCode: 1 }];
417
- case 5: return [2 /*return*/];
319
+ similarity = dot;
320
+ break;
321
+ }
322
+ case 'cosine':
323
+ default: {
324
+ // Cosine similarity
325
+ similarity = cosineSimilarity(emb1.embedding, emb2.embedding);
326
+ }
418
327
  }
419
- });
420
- }); }
328
+ spinner.succeed(`Comparison complete (${embedTime}ms)`);
329
+ output.writeln();
330
+ output.printBox([
331
+ `Text 1: "${text1.substring(0, 30)}${text1.length > 30 ? '...' : ''}"`,
332
+ `Text 2: "${text2.substring(0, 30)}${text2.length > 30 ? '...' : ''}"`,
333
+ ``,
334
+ `Model: ${emb1.model} (${emb1.dimensions}-dim)`,
335
+ `Metric: ${metric}`,
336
+ `Similarity: ${similarity > 0.8 ? output.success(similarity.toFixed(4)) : similarity > 0.5 ? output.warning(similarity.toFixed(4)) : output.dim(similarity.toFixed(4))}`,
337
+ ``,
338
+ `Interpretation: ${similarity > 0.8 ? 'Highly similar' : similarity > 0.5 ? 'Moderately similar' : 'Dissimilar'}`,
339
+ ].join('\n'), 'Result');
340
+ return { success: true, data: { similarity, metric, embedTime } };
341
+ }
342
+ catch (error) {
343
+ spinner.fail('Comparison failed');
344
+ output.printError(error instanceof Error ? error.message : String(error));
345
+ return { success: false, exitCode: 1 };
346
+ }
347
+ },
421
348
  };
422
349
  // Collections subcommand - REAL implementation using sql.js
423
- var collectionsCommand = {
350
+ const collectionsCommand = {
424
351
  name: 'collections',
425
352
  description: 'Manage embedding collections (namespaces)',
426
353
  options: [
427
- { name: 'action', short: 'a', type: 'string', description: 'Action: list, stats', "default": 'list' },
354
+ { name: 'action', short: 'a', type: 'string', description: 'Action: list, stats', default: 'list' },
428
355
  { name: 'name', short: 'n', type: 'string', description: 'Namespace name' },
429
- { name: 'db-path', type: 'string', description: 'Database path', "default": '.swarm/memory.db' },
356
+ { name: 'db-path', type: 'string', description: 'Database path', default: '.swarm/memory.db' },
430
357
  ],
431
358
  examples: [
432
359
  { command: 'claude-flow embeddings collections', description: 'List collections' },
433
360
  { command: 'claude-flow embeddings collections -a stats', description: 'Show detailed stats' },
434
361
  ],
435
- action: function (ctx) { return __awaiter(void 0, void 0, Promise, function () {
436
- var action, dbPath, fs, path, fullDbPath, initSqlJs, SQL, fileBuffer, db, statsQuery, indexQuery, collections, _i, _a, row, _b, namespace, total, withEmbeddings, avgDims, contentSize, error_4;
437
- var _c;
438
- return __generator(this, function (_d) {
439
- switch (_d.label) {
440
- case 0:
441
- action = ctx.flags.action || 'list';
442
- dbPath = ctx.flags['db-path'] || '.swarm/memory.db';
443
- output.writeln();
444
- output.writeln(output.bold('Embedding Collections (Namespaces)'));
445
- output.writeln(output.dim('─'.repeat(60)));
446
- _d.label = 1;
447
- case 1:
448
- _d.trys.push([1, 6, , 7]);
449
- return [4 /*yield*/, import('fs')];
450
- case 2:
451
- fs = _d.sent();
452
- return [4 /*yield*/, import('path')];
453
- case 3:
454
- path = _d.sent();
455
- fullDbPath = path.resolve(process.cwd(), dbPath);
456
- // Check if database exists
457
- if (!fs.existsSync(fullDbPath)) {
458
- output.printWarning('No database found');
459
- output.printInfo('Run: claude-flow memory init');
460
- output.writeln();
461
- output.writeln(output.dim('No collections yet - initialize memory first'));
462
- return [2 /*return*/, { success: true, data: [] }];
463
- }
464
- return [4 /*yield*/, import('sql.js')];
465
- case 4:
466
- initSqlJs = (_d.sent())["default"];
467
- return [4 /*yield*/, initSqlJs()];
468
- case 5:
469
- SQL = _d.sent();
470
- fileBuffer = fs.readFileSync(fullDbPath);
471
- db = new SQL.Database(fileBuffer);
472
- statsQuery = db.exec("\n SELECT\n namespace,\n COUNT(*) as total_entries,\n SUM(CASE WHEN embedding IS NOT NULL THEN 1 ELSE 0 END) as with_embeddings,\n AVG(embedding_dimensions) as avg_dimensions,\n SUM(LENGTH(content)) as total_content_size\n FROM memory_entries\n WHERE status = 'active'\n GROUP BY namespace\n ORDER BY total_entries DESC\n ");
473
- indexQuery = db.exec("SELECT name, dimensions, hnsw_m FROM vector_indexes");
474
- collections = [];
475
- if ((_c = statsQuery[0]) === null || _c === void 0 ? void 0 : _c.values) {
476
- for (_i = 0, _a = statsQuery[0].values; _i < _a.length; _i++) {
477
- row = _a[_i];
478
- _b = row, namespace = _b[0], total = _b[1], withEmbeddings = _b[2], avgDims = _b[3], contentSize = _b[4];
479
- collections.push({
480
- name: namespace || 'default',
481
- vectors: withEmbeddings.toLocaleString(),
482
- total: total.toLocaleString(),
483
- dimensions: avgDims ? Math.round(avgDims).toString() : '-',
484
- index: withEmbeddings > 0 ? 'HNSW' : 'None',
485
- size: formatBytes(contentSize || 0)
486
- });
487
- }
488
- }
489
- db.close();
490
- if (collections.length === 0) {
491
- output.printWarning('No collections found');
492
- output.writeln();
493
- output.writeln(output.dim('Store some data first:'));
494
- output.writeln(output.highlight(' claude-flow memory store -k "key" --value "data"'));
495
- return [2 /*return*/, { success: true, data: [] }];
496
- }
497
- output.printTable({
498
- columns: [
499
- { key: 'name', header: 'Namespace', width: 18 },
500
- { key: 'total', header: 'Entries', width: 10 },
501
- { key: 'vectors', header: 'Vectors', width: 10 },
502
- { key: 'dimensions', header: 'Dims', width: 8 },
503
- { key: 'index', header: 'Index', width: 8 },
504
- { key: 'size', header: 'Size', width: 10 },
505
- ],
506
- data: collections
362
+ action: async (ctx) => {
363
+ const action = ctx.flags.action || 'list';
364
+ const dbPath = ctx.flags['db-path'] || '.swarm/memory.db';
365
+ output.writeln();
366
+ output.writeln(output.bold('Embedding Collections (Namespaces)'));
367
+ output.writeln(output.dim('─'.repeat(60)));
368
+ try {
369
+ const fs = await import('fs');
370
+ const path = await import('path');
371
+ const fullDbPath = path.resolve(process.cwd(), dbPath);
372
+ // Check if database exists
373
+ if (!fs.existsSync(fullDbPath)) {
374
+ output.printWarning('No database found');
375
+ output.printInfo('Run: claude-flow memory init');
376
+ output.writeln();
377
+ output.writeln(output.dim('No collections yet - initialize memory first'));
378
+ return { success: true, data: [] };
379
+ }
380
+ // Load sql.js and query real data
381
+ const initSqlJs = (await import('sql.js')).default;
382
+ const SQL = await initSqlJs();
383
+ const fileBuffer = fs.readFileSync(fullDbPath);
384
+ const db = new SQL.Database(fileBuffer);
385
+ // Get collection stats from database
386
+ const statsQuery = db.exec(`
387
+ SELECT
388
+ namespace,
389
+ COUNT(*) as total_entries,
390
+ SUM(CASE WHEN embedding IS NOT NULL THEN 1 ELSE 0 END) as with_embeddings,
391
+ AVG(embedding_dimensions) as avg_dimensions,
392
+ SUM(LENGTH(content)) as total_content_size
393
+ FROM memory_entries
394
+ WHERE status = 'active'
395
+ GROUP BY namespace
396
+ ORDER BY total_entries DESC
397
+ `);
398
+ // Get vector index info
399
+ const indexQuery = db.exec(`SELECT name, dimensions, hnsw_m FROM vector_indexes`);
400
+ const collections = [];
401
+ if (statsQuery[0]?.values) {
402
+ for (const row of statsQuery[0].values) {
403
+ const [namespace, total, withEmbeddings, avgDims, contentSize] = row;
404
+ collections.push({
405
+ name: namespace || 'default',
406
+ vectors: withEmbeddings.toLocaleString(),
407
+ total: total.toLocaleString(),
408
+ dimensions: avgDims ? Math.round(avgDims).toString() : '-',
409
+ index: withEmbeddings > 0 ? 'HNSW' : 'None',
410
+ size: formatBytes(contentSize || 0)
507
411
  });
508
- output.writeln();
509
- output.writeln(output.dim("Database: " + fullDbPath));
510
- return [2 /*return*/, { success: true, data: collections }];
511
- case 6:
512
- error_4 = _d.sent();
513
- output.printError(error_4 instanceof Error ? error_4.message : String(error_4));
514
- return [2 /*return*/, { success: false, exitCode: 1 }];
515
- case 7: return [2 /*return*/];
412
+ }
516
413
  }
517
- });
518
- }); }
414
+ db.close();
415
+ if (collections.length === 0) {
416
+ output.printWarning('No collections found');
417
+ output.writeln();
418
+ output.writeln(output.dim('Store some data first:'));
419
+ output.writeln(output.highlight(' claude-flow memory store -k "key" --value "data"'));
420
+ return { success: true, data: [] };
421
+ }
422
+ output.printTable({
423
+ columns: [
424
+ { key: 'name', header: 'Namespace', width: 18 },
425
+ { key: 'total', header: 'Entries', width: 10 },
426
+ { key: 'vectors', header: 'Vectors', width: 10 },
427
+ { key: 'dimensions', header: 'Dims', width: 8 },
428
+ { key: 'index', header: 'Index', width: 8 },
429
+ { key: 'size', header: 'Size', width: 10 },
430
+ ],
431
+ data: collections,
432
+ });
433
+ output.writeln();
434
+ output.writeln(output.dim(`Database: ${fullDbPath}`));
435
+ return { success: true, data: collections };
436
+ }
437
+ catch (error) {
438
+ output.printError(error instanceof Error ? error.message : String(error));
439
+ return { success: false, exitCode: 1 };
440
+ }
441
+ },
519
442
  };
520
443
  // Helper: Format bytes to human readable
521
444
  function formatBytes(bytes) {
522
445
  if (bytes === 0)
523
446
  return '0 B';
524
- var k = 1024;
525
- var sizes = ['B', 'KB', 'MB', 'GB'];
526
- var i = Math.floor(Math.log(bytes) / Math.log(k));
447
+ const k = 1024;
448
+ const sizes = ['B', 'KB', 'MB', 'GB'];
449
+ const i = Math.floor(Math.log(bytes) / Math.log(k));
527
450
  return parseFloat((bytes / Math.pow(k, i)).toFixed(1)) + ' ' + sizes[i];
528
451
  }
529
452
  // Index subcommand - REAL HNSW stats
530
- var indexCommand = {
453
+ const indexCommand = {
531
454
  name: 'index',
532
455
  description: 'Manage HNSW indexes',
533
456
  options: [
534
- { name: 'action', short: 'a', type: 'string', description: 'Action: build, rebuild, status, optimize', "default": 'status' },
457
+ { name: 'action', short: 'a', type: 'string', description: 'Action: build, rebuild, status, optimize', default: 'status' },
535
458
  { name: 'collection', short: 'c', type: 'string', description: 'Collection/namespace name' },
536
- { name: 'ef-construction', type: 'number', description: 'HNSW ef_construction parameter', "default": '200' },
537
- { name: 'm', type: 'number', description: 'HNSW M parameter', "default": '16' },
459
+ { name: 'ef-construction', type: 'number', description: 'HNSW ef_construction parameter', default: '200' },
460
+ { name: 'm', type: 'number', description: 'HNSW M parameter', default: '16' },
538
461
  ],
539
462
  examples: [
540
463
  { command: 'claude-flow embeddings index', description: 'Show index status' },
541
464
  { command: 'claude-flow embeddings index -a build -c documents', description: 'Build index' },
542
465
  { command: 'claude-flow embeddings index -a optimize -c patterns', description: 'Optimize index' },
543
466
  ],
544
- action: function (ctx) { return __awaiter(void 0, void 0, Promise, function () {
545
- var action, collection, efConstruction, m, _a, getHNSWStatus, getHNSWIndex, searchHNSWIndex, generateEmbedding, status, testQuery, start, results, searchTime, bruteForceEstimate, speedup, spinner, index, newStatus, error_5;
546
- return __generator(this, function (_b) {
547
- switch (_b.label) {
548
- case 0:
549
- action = ctx.flags.action || 'status';
550
- collection = ctx.flags.collection;
551
- efConstruction = parseInt(ctx.flags['ef-construction'] || '200', 10);
552
- m = parseInt(ctx.flags.m || '16', 10);
553
- output.writeln();
554
- output.writeln(output.bold("HNSW Index: " + action));
555
- output.writeln(output.dim('─'.repeat(50)));
556
- _b.label = 1;
557
- case 1:
558
- _b.trys.push([1, 10, , 11]);
559
- return [4 /*yield*/, import('../memory/memory-initializer.js')];
560
- case 2:
561
- _a = _b.sent(), getHNSWStatus = _a.getHNSWStatus, getHNSWIndex = _a.getHNSWIndex, searchHNSWIndex = _a.searchHNSWIndex, generateEmbedding = _a.generateEmbedding;
562
- status = getHNSWStatus();
563
- if (!(action === 'status')) return [3 /*break*/, 7];
564
- output.writeln();
565
- output.printTable({
566
- columns: [
567
- { key: 'metric', header: 'Metric', width: 24 },
568
- { key: 'value', header: 'Value', width: 30 },
569
- ],
570
- data: [
571
- { metric: 'HNSW Available', value: status.available ? output.success('Yes (@ruvector/core)') : output.warning('No') },
572
- { metric: 'Index Initialized', value: status.initialized ? output.success('Yes') : output.dim('No') },
573
- { metric: 'Vector Count', value: status.entryCount.toLocaleString() },
574
- { metric: 'Dimensions', value: String(status.dimensions) },
575
- { metric: 'Distance Metric', value: 'Cosine' },
576
- { metric: 'HNSW M', value: String(m) },
577
- { metric: 'ef_construction', value: String(efConstruction) },
578
- ]
579
- });
580
- if (!(status.available && status.entryCount > 0)) return [3 /*break*/, 5];
467
+ action: async (ctx) => {
468
+ const action = ctx.flags.action || 'status';
469
+ const collection = ctx.flags.collection;
470
+ const efConstruction = parseInt(ctx.flags['ef-construction'] || '200', 10);
471
+ const m = parseInt(ctx.flags.m || '16', 10);
472
+ output.writeln();
473
+ output.writeln(output.bold(`HNSW Index: ${action}`));
474
+ output.writeln(output.dim(''.repeat(50)));
475
+ try {
476
+ const { getHNSWStatus, getHNSWIndex, searchHNSWIndex, generateEmbedding } = await import('../memory/memory-initializer.js');
477
+ // Get real HNSW status
478
+ const status = getHNSWStatus();
479
+ if (action === 'status') {
480
+ output.writeln();
481
+ output.printTable({
482
+ columns: [
483
+ { key: 'metric', header: 'Metric', width: 24 },
484
+ { key: 'value', header: 'Value', width: 30 },
485
+ ],
486
+ data: [
487
+ { metric: 'HNSW Available', value: status.available ? output.success('Yes (@ruvector/core)') : output.warning('No') },
488
+ { metric: 'Index Initialized', value: status.initialized ? output.success('Yes') : output.dim('No') },
489
+ { metric: 'Vector Count', value: status.entryCount.toLocaleString() },
490
+ { metric: 'Dimensions', value: String(status.dimensions) },
491
+ { metric: 'Distance Metric', value: 'Cosine' },
492
+ { metric: 'HNSW M', value: String(m) },
493
+ { metric: 'ef_construction', value: String(efConstruction) },
494
+ ],
495
+ });
496
+ if (status.available && status.entryCount > 0) {
581
497
  // Run a quick benchmark to show actual performance
582
498
  output.writeln();
583
499
  output.writeln(output.dim('Running quick performance test...'));
584
- return [4 /*yield*/, generateEmbedding('test performance query')];
585
- case 3:
586
- testQuery = _b.sent();
587
- start = performance.now();
588
- return [4 /*yield*/, searchHNSWIndex(testQuery.embedding, { k: 10 })];
589
- case 4:
590
- results = _b.sent();
591
- searchTime = performance.now() - start;
592
- bruteForceEstimate = status.entryCount * 0.0005;
593
- speedup = bruteForceEstimate / (searchTime / 1000);
500
+ const testQuery = await generateEmbedding('test performance query');
501
+ const start = performance.now();
502
+ const results = await searchHNSWIndex(testQuery.embedding, { k: 10 });
503
+ const searchTime = performance.now() - start;
504
+ // Estimate brute force time (0.5μs per comparison)
505
+ const bruteForceEstimate = status.entryCount * 0.0005;
506
+ const speedup = bruteForceEstimate / (searchTime / 1000);
594
507
  output.writeln();
595
508
  output.printBox([
596
- "Performance (n=" + status.entryCount + "):",
597
- " HNSW Search: " + searchTime.toFixed(2) + "ms",
598
- " Brute Force Est: " + (bruteForceEstimate * 1000).toFixed(2) + "ms",
599
- " Speedup: ~" + Math.round(speedup) + "x",
600
- " Results: " + ((results === null || results === void 0 ? void 0 : results.length) || 0) + " matches",
509
+ `Performance (n=${status.entryCount}):`,
510
+ ` HNSW Search: ${searchTime.toFixed(2)}ms`,
511
+ ` Brute Force Est: ${(bruteForceEstimate * 1000).toFixed(2)}ms`,
512
+ ` Speedup: ~${Math.round(speedup)}x`,
513
+ ` Results: ${results?.length || 0} matches`,
601
514
  ].join('\n'), 'Search Performance');
602
- return [3 /*break*/, 6];
603
- case 5:
604
- if (!status.available) {
605
- output.writeln();
606
- output.printWarning('@ruvector/core not available');
607
- output.printInfo('Install: npm install @ruvector/core');
608
- }
609
- else {
610
- output.writeln();
611
- output.printInfo('Index is empty. Store some entries to populate it.');
612
- output.printInfo('Run: claude-flow memory store -k "key" --value "text"');
613
- }
614
- _b.label = 6;
615
- case 6: return [2 /*return*/, { success: true, data: status }];
616
- case 7:
617
- if (!(action === 'build' || action === 'rebuild')) return [3 /*break*/, 9];
618
- if (!collection) {
619
- output.printError('Collection is required for build/rebuild');
620
- return [2 /*return*/, { success: false, exitCode: 1 }];
621
- }
622
- spinner = output.createSpinner({ text: action + "ing index for " + collection + "...", spinner: 'dots' });
623
- spinner.start();
624
- return [4 /*yield*/, getHNSWIndex({ forceRebuild: action === 'rebuild' })];
625
- case 8:
626
- index = _b.sent();
627
- if (!index) {
628
- spinner.fail('@ruvector/core not available');
629
- output.printInfo('Install: npm install @ruvector/core');
630
- return [2 /*return*/, { success: false, exitCode: 1 }];
631
- }
632
- spinner.succeed("Index " + action + " complete");
633
- newStatus = getHNSWStatus();
515
+ }
516
+ else if (!status.available) {
634
517
  output.writeln();
635
- output.printBox([
636
- "Collection: " + collection,
637
- "Action: " + action,
638
- "Vectors: " + newStatus.entryCount,
639
- "Dimensions: " + newStatus.dimensions,
640
- "M: " + m,
641
- "ef_construction: " + efConstruction,
642
- ].join('\n'), 'Index Built');
643
- return [2 /*return*/, { success: true, data: newStatus }];
644
- case 9:
645
- // Optimize action
646
- if (action === 'optimize') {
647
- output.printInfo('HNSW index is optimized automatically during search');
648
- output.printInfo('No manual optimization required');
649
- return [2 /*return*/, { success: true }];
650
- }
651
- output.printError("Unknown action: " + action);
652
- return [2 /*return*/, { success: false, exitCode: 1 }];
653
- case 10:
654
- error_5 = _b.sent();
655
- output.printError(error_5 instanceof Error ? error_5.message : String(error_5));
656
- return [2 /*return*/, { success: false, exitCode: 1 }];
657
- case 11: return [2 /*return*/];
518
+ output.printWarning('@ruvector/core not available');
519
+ output.printInfo('Install: npm install @ruvector/core');
520
+ }
521
+ else {
522
+ output.writeln();
523
+ output.printInfo('Index is empty. Store some entries to populate it.');
524
+ output.printInfo('Run: claude-flow memory store -k "key" --value "text"');
525
+ }
526
+ return { success: true, data: status };
658
527
  }
659
- });
660
- }); }
528
+ // Build/Rebuild action
529
+ if (action === 'build' || action === 'rebuild') {
530
+ if (!collection) {
531
+ output.printError('Collection is required for build/rebuild');
532
+ return { success: false, exitCode: 1 };
533
+ }
534
+ const spinner = output.createSpinner({ text: `${action}ing index for ${collection}...`, spinner: 'dots' });
535
+ spinner.start();
536
+ // Force rebuild if requested
537
+ const index = await getHNSWIndex({ forceRebuild: action === 'rebuild' });
538
+ if (!index) {
539
+ spinner.fail('@ruvector/core not available');
540
+ output.printInfo('Install: npm install @ruvector/core');
541
+ return { success: false, exitCode: 1 };
542
+ }
543
+ spinner.succeed(`Index ${action} complete`);
544
+ const newStatus = getHNSWStatus();
545
+ output.writeln();
546
+ output.printBox([
547
+ `Collection: ${collection}`,
548
+ `Action: ${action}`,
549
+ `Vectors: ${newStatus.entryCount}`,
550
+ `Dimensions: ${newStatus.dimensions}`,
551
+ `M: ${m}`,
552
+ `ef_construction: ${efConstruction}`,
553
+ ].join('\n'), 'Index Built');
554
+ return { success: true, data: newStatus };
555
+ }
556
+ // Optimize action
557
+ if (action === 'optimize') {
558
+ output.printInfo('HNSW index is optimized automatically during search');
559
+ output.printInfo('No manual optimization required');
560
+ return { success: true };
561
+ }
562
+ output.printError(`Unknown action: ${action}`);
563
+ return { success: false, exitCode: 1 };
564
+ }
565
+ catch (error) {
566
+ output.printError(error instanceof Error ? error.message : String(error));
567
+ return { success: false, exitCode: 1 };
568
+ }
569
+ },
661
570
  };
662
571
  // Init subcommand - Initialize ONNX models and hyperbolic config
663
- var initCommand = {
572
+ const initCommand = {
664
573
  name: 'init',
665
574
  description: 'Initialize embedding subsystem with ONNX model and hyperbolic config',
666
575
  options: [
667
- { name: 'model', short: 'm', type: 'string', description: 'ONNX model ID', "default": 'all-MiniLM-L6-v2' },
668
- { name: 'hyperbolic', type: 'boolean', description: 'Enable hyperbolic (Poincaré ball) embeddings', "default": 'true' },
669
- { name: 'curvature', short: 'c', type: 'string', description: 'Poincaré ball curvature (use --curvature=-1 for negative)', "default": '-1' },
670
- { name: 'download', short: 'd', type: 'boolean', description: 'Download model during init', "default": 'true' },
671
- { name: 'cache-size', type: 'string', description: 'LRU cache entries', "default": '256' },
672
- { name: 'force', short: 'f', type: 'boolean', description: 'Overwrite existing configuration', "default": 'false' },
576
+ { name: 'model', short: 'm', type: 'string', description: 'ONNX model ID', default: 'all-MiniLM-L6-v2' },
577
+ { name: 'hyperbolic', type: 'boolean', description: 'Enable hyperbolic (Poincaré ball) embeddings', default: 'true' },
578
+ { name: 'curvature', short: 'c', type: 'string', description: 'Poincaré ball curvature (use --curvature=-1 for negative)', default: '-1' },
579
+ { name: 'download', short: 'd', type: 'boolean', description: 'Download model during init', default: 'true' },
580
+ { name: 'cache-size', type: 'string', description: 'LRU cache entries', default: '256' },
581
+ { name: 'force', short: 'f', type: 'boolean', description: 'Overwrite existing configuration', default: 'false' },
673
582
  ],
674
583
  examples: [
675
584
  { command: 'claude-flow embeddings init', description: 'Initialize with defaults' },
@@ -678,243 +587,219 @@ var initCommand = {
678
587
  { command: 'claude-flow embeddings init --curvature=-0.5', description: 'Custom curvature (use = for negative)' },
679
588
  { command: 'claude-flow embeddings init --force', description: 'Overwrite existing config' },
680
589
  ],
681
- action: function (ctx) { return __awaiter(void 0, void 0, Promise, function () {
682
- var model, hyperbolic, download, force, curvatureRaw, curvature, cacheSizeRaw, cacheSize, fs, path, configDir, modelDir, configPath, spinner_1, embeddings, dimension, config, error_6;
683
- return __generator(this, function (_a) {
684
- switch (_a.label) {
685
- case 0:
686
- model = ctx.flags.model || 'all-MiniLM-L6-v2';
687
- hyperbolic = ctx.flags.hyperbolic !== false;
688
- download = ctx.flags.download !== false;
689
- force = ctx.flags.force === true;
690
- curvatureRaw = ctx.flags.curvature || '-1';
691
- curvature = parseFloat(curvatureRaw);
692
- cacheSizeRaw = (ctx.flags['cache-size'] || ctx.flags.cacheSize || '256');
693
- cacheSize = parseInt(cacheSizeRaw, 10);
694
- output.writeln();
695
- output.writeln(output.bold('Initialize Embedding Subsystem'));
696
- output.writeln(output.dim(''.repeat(55)));
697
- _a.label = 1;
698
- case 1:
699
- _a.trys.push([1, 9, , 10]);
700
- return [4 /*yield*/, import('fs')];
701
- case 2:
702
- fs = _a.sent();
703
- return [4 /*yield*/, import('path')];
704
- case 3:
705
- path = _a.sent();
706
- configDir = path.join(process.cwd(), '.claude-flow');
707
- modelDir = path.join(configDir, 'models');
708
- configPath = path.join(configDir, 'embeddings.json');
709
- // Check for existing config
710
- if (fs.existsSync(configPath) && !force) {
711
- output.printWarning('Embeddings already initialized');
712
- output.printInfo("Config exists: " + configPath);
713
- output.writeln();
714
- output.writeln(output.dim('Use --force to overwrite existing configuration'));
715
- return [2 /*return*/, { success: false, exitCode: 1 }];
716
- }
717
- spinner_1 = output.createSpinner({ text: 'Initializing...', spinner: 'dots' });
718
- spinner_1.start();
719
- if (!fs.existsSync(configDir)) {
720
- fs.mkdirSync(configDir, { recursive: true });
721
- }
722
- if (!fs.existsSync(modelDir)) {
723
- fs.mkdirSync(modelDir, { recursive: true });
724
- }
725
- if (!download) return [3 /*break*/, 8];
726
- spinner_1.setText("Downloading ONNX model: " + model + "...");
727
- return [4 /*yield*/, getEmbeddings()];
728
- case 4:
729
- embeddings = _a.sent();
730
- if (!embeddings) return [3 /*break*/, 6];
731
- return [4 /*yield*/, embeddings.downloadEmbeddingModel(model, modelDir, function (p) {
732
- spinner_1.setText("Downloading " + model + "... " + p.percent.toFixed(0) + "%");
733
- })];
734
- case 5:
735
- _a.sent();
736
- return [3 /*break*/, 8];
737
- case 6:
738
- // Simulate download for when embeddings package not available
739
- return [4 /*yield*/, new Promise(function (r) { return setTimeout(r, 500); })];
740
- case 7:
590
+ action: async (ctx) => {
591
+ const model = ctx.flags.model || 'all-MiniLM-L6-v2';
592
+ const hyperbolic = ctx.flags.hyperbolic !== false;
593
+ const download = ctx.flags.download !== false;
594
+ const force = ctx.flags.force === true;
595
+ // Parse curvature - handle both kebab-case and direct value
596
+ const curvatureRaw = ctx.flags.curvature || '-1';
597
+ const curvature = parseFloat(curvatureRaw);
598
+ // Parse cache-size - check both kebab-case and camelCase
599
+ const cacheSizeRaw = (ctx.flags['cache-size'] || ctx.flags.cacheSize || '256');
600
+ const cacheSize = parseInt(cacheSizeRaw, 10);
601
+ output.writeln();
602
+ output.writeln(output.bold('Initialize Embedding Subsystem'));
603
+ output.writeln(output.dim('─'.repeat(55)));
604
+ try {
605
+ const fs = await import('fs');
606
+ const path = await import('path');
607
+ // Create directories
608
+ const configDir = path.join(process.cwd(), '.claude-flow');
609
+ const modelDir = path.join(configDir, 'models');
610
+ const configPath = path.join(configDir, 'embeddings.json');
611
+ // Check for existing config
612
+ if (fs.existsSync(configPath) && !force) {
613
+ output.printWarning('Embeddings already initialized');
614
+ output.printInfo(`Config exists: ${configPath}`);
615
+ output.writeln();
616
+ output.writeln(output.dim('Use --force to overwrite existing configuration'));
617
+ return { success: false, exitCode: 1 };
618
+ }
619
+ const spinner = output.createSpinner({ text: 'Initializing...', spinner: 'dots' });
620
+ spinner.start();
621
+ if (!fs.existsSync(configDir)) {
622
+ fs.mkdirSync(configDir, { recursive: true });
623
+ }
624
+ if (!fs.existsSync(modelDir)) {
625
+ fs.mkdirSync(modelDir, { recursive: true });
626
+ }
627
+ // Download model if requested
628
+ if (download) {
629
+ spinner.setText(`Downloading ONNX model: ${model}...`);
630
+ const embeddings = await getEmbeddings();
631
+ if (embeddings) {
632
+ await embeddings.downloadEmbeddingModel(model, modelDir, (p) => {
633
+ spinner.setText(`Downloading ${model}... ${p.percent.toFixed(0)}%`);
634
+ });
635
+ }
636
+ else {
741
637
  // Simulate download for when embeddings package not available
742
- _a.sent();
638
+ await new Promise(r => setTimeout(r, 500));
743
639
  output.writeln(output.dim(' (Simulated - @claude-flow/embeddings not installed)'));
744
- _a.label = 8;
745
- case 8:
746
- // Write embeddings config
747
- spinner_1.setText('Writing configuration...');
748
- dimension = model.includes('mpnet') ? 768 : 384;
749
- config = {
750
- model: model,
751
- modelPath: modelDir,
752
- dimension: dimension,
753
- cacheSize: cacheSize,
754
- hyperbolic: {
755
- enabled: hyperbolic,
756
- curvature: curvature,
757
- epsilon: 1e-15,
758
- maxNorm: 1 - 1e-5
759
- },
760
- neural: {
761
- enabled: true,
762
- driftThreshold: 0.3,
763
- decayRate: 0.01
764
- },
765
- initialized: new Date().toISOString()
766
- };
767
- fs.writeFileSync(configPath, JSON.stringify(config, null, 2));
768
- spinner_1.succeed('Embedding subsystem initialized');
769
- output.writeln();
770
- output.printTable({
771
- columns: [
772
- { key: 'setting', header: 'Setting', width: 18 },
773
- { key: 'value', header: 'Value', width: 40 },
774
- ],
775
- data: [
776
- { setting: 'Model', value: model },
777
- { setting: 'Dimension', value: String(dimension) },
778
- { setting: 'Cache Size', value: String(cacheSize) + ' entries' },
779
- { setting: 'Hyperbolic', value: hyperbolic ? output.success('Enabled') + " (c=" + curvature + ")" : output.dim('Disabled') },
780
- { setting: 'Neural Substrate', value: output.success('Enabled') },
781
- { setting: 'Model Path', value: modelDir },
782
- { setting: 'Config', value: configPath },
783
- ]
784
- });
785
- output.writeln();
786
- if (hyperbolic) {
787
- output.printBox([
788
- 'Hyperbolic Embeddings (Poincaré Ball):',
789
- '• Better for hierarchical data (trees, taxonomies)',
790
- '• Exponential capacity in low dimensions',
791
- '• Distance preserves hierarchy structure',
792
- '',
793
- 'Use: embeddings hyperbolic -a convert',
794
- ].join('\n'), 'Hyperbolic Space');
795
- }
796
- output.writeln();
797
- output.writeln(output.dim('Next steps:'));
798
- output.printList([
799
- 'embeddings generate -t "test text" - Test embedding generation',
800
- 'embeddings search -q "query" - Semantic search',
801
- 'memory store -k key --value text - Store with auto-embedding',
802
- ]);
803
- return [2 /*return*/, { success: true, data: config }];
804
- case 9:
805
- error_6 = _a.sent();
806
- output.printError('Initialization failed: ' + (error_6 instanceof Error ? error_6.message : String(error_6)));
807
- return [2 /*return*/, { success: false, exitCode: 1 }];
808
- case 10: return [2 /*return*/];
640
+ }
809
641
  }
810
- });
811
- }); }
642
+ // Write embeddings config
643
+ spinner.setText('Writing configuration...');
644
+ const dimension = model.includes('mpnet') ? 768 : 384;
645
+ const config = {
646
+ model,
647
+ modelPath: modelDir,
648
+ dimension,
649
+ cacheSize,
650
+ hyperbolic: {
651
+ enabled: hyperbolic,
652
+ curvature,
653
+ epsilon: 1e-15,
654
+ maxNorm: 1 - 1e-5,
655
+ },
656
+ neural: {
657
+ enabled: true,
658
+ driftThreshold: 0.3,
659
+ decayRate: 0.01,
660
+ },
661
+ initialized: new Date().toISOString(),
662
+ };
663
+ fs.writeFileSync(configPath, JSON.stringify(config, null, 2));
664
+ spinner.succeed('Embedding subsystem initialized');
665
+ output.writeln();
666
+ output.printTable({
667
+ columns: [
668
+ { key: 'setting', header: 'Setting', width: 18 },
669
+ { key: 'value', header: 'Value', width: 40 },
670
+ ],
671
+ data: [
672
+ { setting: 'Model', value: model },
673
+ { setting: 'Dimension', value: String(dimension) },
674
+ { setting: 'Cache Size', value: String(cacheSize) + ' entries' },
675
+ { setting: 'Hyperbolic', value: hyperbolic ? `${output.success('Enabled')} (c=${curvature})` : output.dim('Disabled') },
676
+ { setting: 'Neural Substrate', value: output.success('Enabled') },
677
+ { setting: 'Model Path', value: modelDir },
678
+ { setting: 'Config', value: configPath },
679
+ ],
680
+ });
681
+ output.writeln();
682
+ if (hyperbolic) {
683
+ output.printBox([
684
+ 'Hyperbolic Embeddings (Poincaré Ball):',
685
+ '• Better for hierarchical data (trees, taxonomies)',
686
+ '• Exponential capacity in low dimensions',
687
+ '• Distance preserves hierarchy structure',
688
+ '',
689
+ 'Use: embeddings hyperbolic -a convert',
690
+ ].join('\n'), 'Hyperbolic Space');
691
+ }
692
+ output.writeln();
693
+ output.writeln(output.dim('Next steps:'));
694
+ output.printList([
695
+ 'embeddings generate -t "test text" - Test embedding generation',
696
+ 'embeddings search -q "query" - Semantic search',
697
+ 'memory store -k key --value text - Store with auto-embedding',
698
+ ]);
699
+ return { success: true, data: config };
700
+ }
701
+ catch (error) {
702
+ output.printError('Initialization failed: ' + (error instanceof Error ? error.message : String(error)));
703
+ return { success: false, exitCode: 1 };
704
+ }
705
+ },
812
706
  };
813
707
  // Providers subcommand
814
- var providersCommand = {
708
+ const providersCommand = {
815
709
  name: 'providers',
816
710
  description: 'List available embedding providers',
817
711
  options: [],
818
712
  examples: [
819
713
  { command: 'claude-flow embeddings providers', description: 'List providers' },
820
714
  ],
821
- action: function () { return __awaiter(void 0, void 0, Promise, function () {
822
- return __generator(this, function (_a) {
823
- output.writeln();
824
- output.writeln(output.bold('Embedding Providers'));
825
- output.writeln(output.dim('─'.repeat(70)));
826
- output.printTable({
827
- columns: [
828
- { key: 'provider', header: 'Provider', width: 18 },
829
- { key: 'model', header: 'Model', width: 25 },
830
- { key: 'dims', header: 'Dims', width: 8 },
831
- { key: 'type', header: 'Type', width: 10 },
832
- { key: 'status', header: 'Status', width: 12 },
833
- ],
834
- data: [
835
- { provider: 'OpenAI', model: 'text-embedding-3-small', dims: '1536', type: 'Cloud', status: output.success('Ready') },
836
- { provider: 'OpenAI', model: 'text-embedding-3-large', dims: '3072', type: 'Cloud', status: output.success('Ready') },
837
- { provider: 'Transformers.js', model: 'all-MiniLM-L6-v2', dims: '384', type: 'Local', status: output.success('Ready') },
838
- { provider: 'Agentic Flow', model: 'ONNX optimized', dims: '384', type: 'Local', status: output.success('Ready') },
839
- { provider: 'Mock', model: 'mock-embedding', dims: '384', type: 'Dev', status: output.dim('Dev only') },
840
- ]
841
- });
842
- output.writeln();
843
- output.writeln(output.dim('Agentic Flow provider uses WASM SIMD for 75x faster inference'));
844
- return [2 /*return*/, { success: true }];
715
+ action: async () => {
716
+ output.writeln();
717
+ output.writeln(output.bold('Embedding Providers'));
718
+ output.writeln(output.dim(''.repeat(70)));
719
+ output.printTable({
720
+ columns: [
721
+ { key: 'provider', header: 'Provider', width: 18 },
722
+ { key: 'model', header: 'Model', width: 25 },
723
+ { key: 'dims', header: 'Dims', width: 8 },
724
+ { key: 'type', header: 'Type', width: 10 },
725
+ { key: 'status', header: 'Status', width: 12 },
726
+ ],
727
+ data: [
728
+ { provider: 'OpenAI', model: 'text-embedding-3-small', dims: '1536', type: 'Cloud', status: output.success('Ready') },
729
+ { provider: 'OpenAI', model: 'text-embedding-3-large', dims: '3072', type: 'Cloud', status: output.success('Ready') },
730
+ { provider: 'Transformers.js', model: 'all-MiniLM-L6-v2', dims: '384', type: 'Local', status: output.success('Ready') },
731
+ { provider: 'Agentic Flow', model: 'ONNX optimized', dims: '384', type: 'Local', status: output.success('Ready') },
732
+ { provider: 'Mock', model: 'mock-embedding', dims: '384', type: 'Dev', status: output.dim('Dev only') },
733
+ ],
845
734
  });
846
- }); }
735
+ output.writeln();
736
+ output.writeln(output.dim('Agentic Flow provider uses WASM SIMD for 75x faster inference'));
737
+ return { success: true };
738
+ },
847
739
  };
848
740
  // Chunk subcommand
849
- var chunkCommand = {
741
+ const chunkCommand = {
850
742
  name: 'chunk',
851
743
  description: 'Chunk text for embedding with overlap',
852
744
  options: [
853
745
  { name: 'text', short: 't', type: 'string', description: 'Text to chunk', required: true },
854
- { name: 'max-size', short: 's', type: 'number', description: 'Max chunk size in chars', "default": '512' },
855
- { name: 'overlap', short: 'o', type: 'number', description: 'Overlap between chunks', "default": '50' },
856
- { name: 'strategy', type: 'string', description: 'Strategy: character, sentence, paragraph, token', "default": 'sentence' },
746
+ { name: 'max-size', short: 's', type: 'number', description: 'Max chunk size in chars', default: '512' },
747
+ { name: 'overlap', short: 'o', type: 'number', description: 'Overlap between chunks', default: '50' },
748
+ { name: 'strategy', type: 'string', description: 'Strategy: character, sentence, paragraph, token', default: 'sentence' },
857
749
  { name: 'file', short: 'f', type: 'string', description: 'File to chunk (instead of text)' },
858
750
  ],
859
751
  examples: [
860
752
  { command: 'claude-flow embeddings chunk -t "Long text..." -s 256', description: 'Chunk with 256 char limit' },
861
753
  { command: 'claude-flow embeddings chunk -f doc.txt --strategy paragraph', description: 'Chunk file by paragraph' },
862
754
  ],
863
- action: function (ctx) { return __awaiter(void 0, void 0, Promise, function () {
864
- var embeddings, text, maxSize, overlap, strategy, result;
865
- return __generator(this, function (_a) {
866
- switch (_a.label) {
867
- case 0: return [4 /*yield*/, getEmbeddings()];
868
- case 1:
869
- embeddings = _a.sent();
870
- text = ctx.flags.text || '';
871
- maxSize = parseInt(ctx.flags['max-size'] || '512', 10);
872
- overlap = parseInt(ctx.flags.overlap || '50', 10);
873
- strategy = ctx.flags.strategy || 'sentence';
874
- output.writeln();
875
- output.writeln(output.bold('Document Chunking'));
876
- output.writeln(output.dim('─'.repeat(50)));
877
- if (!embeddings) {
878
- output.printWarning('@claude-flow/embeddings not installed, showing preview');
879
- output.writeln();
880
- output.printBox([
881
- "Strategy: " + strategy,
882
- "Max Size: " + maxSize + " chars",
883
- "Overlap: " + overlap + " chars",
884
- "",
885
- "Estimated chunks: " + Math.ceil(text.length / (maxSize - overlap)),
886
- ].join('\n'), 'Chunking Preview');
887
- return [2 /*return*/, { success: true }];
888
- }
889
- result = embeddings.chunkText(text, { maxChunkSize: maxSize, overlap: overlap, strategy: strategy });
890
- output.writeln();
891
- output.printTable({
892
- columns: [
893
- { key: 'idx', header: '#', width: 5 },
894
- { key: 'length', header: 'Chars', width: 8 },
895
- { key: 'tokens', header: 'Tokens', width: 8 },
896
- { key: 'preview', header: 'Preview', width: 45 },
897
- ],
898
- data: result.chunks.map(function (c, i) { return ({
899
- idx: String(i + 1),
900
- length: String(c.length),
901
- tokens: String(c.tokenCount),
902
- preview: c.text.substring(0, 42) + (c.text.length > 42 ? '...' : '')
903
- }); })
904
- });
905
- output.writeln();
906
- output.writeln(output.dim("Total: " + result.totalChunks + " chunks from " + result.originalLength + " chars"));
907
- return [2 /*return*/, { success: true }];
908
- }
755
+ action: async (ctx) => {
756
+ const embeddings = await getEmbeddings();
757
+ const text = ctx.flags.text || '';
758
+ const maxSize = parseInt(ctx.flags['max-size'] || '512', 10);
759
+ const overlap = parseInt(ctx.flags.overlap || '50', 10);
760
+ const strategy = ctx.flags.strategy || 'sentence';
761
+ output.writeln();
762
+ output.writeln(output.bold('Document Chunking'));
763
+ output.writeln(output.dim(''.repeat(50)));
764
+ if (!embeddings) {
765
+ output.printWarning('@claude-flow/embeddings not installed, showing preview');
766
+ output.writeln();
767
+ output.printBox([
768
+ `Strategy: ${strategy}`,
769
+ `Max Size: ${maxSize} chars`,
770
+ `Overlap: ${overlap} chars`,
771
+ ``,
772
+ `Estimated chunks: ${Math.ceil(text.length / (maxSize - overlap))}`,
773
+ ].join('\n'), 'Chunking Preview');
774
+ return { success: true };
775
+ }
776
+ const result = embeddings.chunkText(text, { maxChunkSize: maxSize, overlap, strategy: strategy });
777
+ output.writeln();
778
+ output.printTable({
779
+ columns: [
780
+ { key: 'idx', header: '#', width: 5 },
781
+ { key: 'length', header: 'Chars', width: 8 },
782
+ { key: 'tokens', header: 'Tokens', width: 8 },
783
+ { key: 'preview', header: 'Preview', width: 45 },
784
+ ],
785
+ data: result.chunks.map((c, i) => ({
786
+ idx: String(i + 1),
787
+ length: String(c.length),
788
+ tokens: String(c.tokenCount),
789
+ preview: c.text.substring(0, 42) + (c.text.length > 42 ? '...' : ''),
790
+ })),
909
791
  });
910
- }); }
792
+ output.writeln();
793
+ output.writeln(output.dim(`Total: ${result.totalChunks} chunks from ${result.originalLength} chars`));
794
+ return { success: true };
795
+ },
911
796
  };
912
797
  // Normalize subcommand
913
- var normalizeCommand = {
798
+ const normalizeCommand = {
914
799
  name: 'normalize',
915
800
  description: 'Normalize embedding vectors',
916
801
  options: [
917
- { name: 'type', short: 't', type: 'string', description: 'Type: l2, l1, minmax, zscore', "default": 'l2' },
802
+ { name: 'type', short: 't', type: 'string', description: 'Type: l2, l1, minmax, zscore', default: 'l2' },
918
803
  { name: 'input', short: 'i', type: 'string', description: 'Input embedding (JSON array)' },
919
804
  { name: 'check', short: 'c', type: 'boolean', description: 'Check if already normalized' },
920
805
  ],
@@ -922,159 +807,147 @@ var normalizeCommand = {
922
807
  { command: 'claude-flow embeddings normalize -i "[0.5, 0.3, 0.8]" -t l2', description: 'L2 normalize' },
923
808
  { command: 'claude-flow embeddings normalize --check -i "[...]"', description: 'Check if normalized' },
924
809
  ],
925
- action: function (ctx) { return __awaiter(void 0, void 0, Promise, function () {
926
- var type, check;
927
- return __generator(this, function (_a) {
928
- type = ctx.flags.type || 'l2';
929
- check = ctx.flags.check;
930
- output.writeln();
931
- output.writeln(output.bold('Embedding Normalization'));
932
- output.writeln(output.dim('─'.repeat(50)));
933
- output.printTable({
934
- columns: [
935
- { key: 'type', header: 'Type', width: 12 },
936
- { key: 'formula', header: 'Formula', width: 30 },
937
- { key: 'use', header: 'Best For', width: 25 },
938
- ],
939
- data: [
940
- { type: output.success('L2'), formula: 'v / ||v||₂', use: 'Cosine similarity' },
941
- { type: 'L1', formula: 'v / ||v||₁', use: 'Sparse vectors' },
942
- { type: 'Min-Max', formula: '(v - min) / (max - min)', use: 'Bounded range [0,1]' },
943
- { type: 'Z-Score', formula: '(v - μ) / σ', use: 'Statistical analysis' },
944
- ]
945
- });
946
- output.writeln();
947
- output.writeln(output.dim("Selected: " + type.toUpperCase() + " normalization"));
948
- output.writeln(output.dim('Most embedding models pre-normalize with L2'));
949
- return [2 /*return*/, { success: true }];
810
+ action: async (ctx) => {
811
+ const type = ctx.flags.type || 'l2';
812
+ const check = ctx.flags.check;
813
+ output.writeln();
814
+ output.writeln(output.bold('Embedding Normalization'));
815
+ output.writeln(output.dim('─'.repeat(50)));
816
+ output.printTable({
817
+ columns: [
818
+ { key: 'type', header: 'Type', width: 12 },
819
+ { key: 'formula', header: 'Formula', width: 30 },
820
+ { key: 'use', header: 'Best For', width: 25 },
821
+ ],
822
+ data: [
823
+ { type: output.success('L2'), formula: 'v / ||v||₂', use: 'Cosine similarity' },
824
+ { type: 'L1', formula: 'v / ||v||₁', use: 'Sparse vectors' },
825
+ { type: 'Min-Max', formula: '(v - min) / (max - min)', use: 'Bounded range [0,1]' },
826
+ { type: 'Z-Score', formula: '(v - μ) / σ', use: 'Statistical analysis' },
827
+ ],
950
828
  });
951
- }); }
829
+ output.writeln();
830
+ output.writeln(output.dim(`Selected: ${type.toUpperCase()} normalization`));
831
+ output.writeln(output.dim('Most embedding models pre-normalize with L2'));
832
+ return { success: true };
833
+ },
952
834
  };
953
835
  // Hyperbolic subcommand
954
- var hyperbolicCommand = {
836
+ const hyperbolicCommand = {
955
837
  name: 'hyperbolic',
956
838
  description: 'Hyperbolic embedding operations (Poincaré ball)',
957
839
  options: [
958
- { name: 'action', short: 'a', type: 'string', description: 'Action: convert, distance, centroid', "default": 'convert' },
959
- { name: 'curvature', short: 'c', type: 'number', description: 'Hyperbolic curvature', "default": '-1' },
840
+ { name: 'action', short: 'a', type: 'string', description: 'Action: convert, distance, centroid', default: 'convert' },
841
+ { name: 'curvature', short: 'c', type: 'number', description: 'Hyperbolic curvature', default: '-1' },
960
842
  { name: 'input', short: 'i', type: 'string', description: 'Input embedding(s) JSON' },
961
843
  ],
962
844
  examples: [
963
845
  { command: 'claude-flow embeddings hyperbolic -a convert -i "[0.5, 0.3]"', description: 'Convert to Poincaré' },
964
846
  { command: 'claude-flow embeddings hyperbolic -a distance', description: 'Compute hyperbolic distance' },
965
847
  ],
966
- action: function (ctx) { return __awaiter(void 0, void 0, Promise, function () {
967
- var action, curvature, inputJson, hyperbolic, input, vec, rawResult, result, _a, v1, v2, dist, vectors, rawCentroid, centroid, error_7;
968
- return __generator(this, function (_b) {
969
- switch (_b.label) {
970
- case 0:
971
- action = ctx.flags.action || 'convert';
972
- curvature = parseFloat(ctx.flags.curvature || '-1');
973
- inputJson = ctx.flags.input;
848
+ action: async (ctx) => {
849
+ const action = ctx.flags.action || 'convert';
850
+ const curvature = parseFloat(ctx.flags.curvature || '-1');
851
+ const inputJson = ctx.flags.input;
852
+ output.writeln();
853
+ output.writeln(output.bold('Hyperbolic Embeddings'));
854
+ output.writeln(output.dim('Poincaré Ball Model'));
855
+ output.writeln(output.dim('─'.repeat(50)));
856
+ // Try to import hyperbolic functions from embeddings package
857
+ try {
858
+ const hyperbolic = await import('@claude-flow/embeddings').then(m => m).catch(() => null);
859
+ if (!hyperbolic || !hyperbolic.euclideanToPoincare) {
860
+ output.printWarning('@claude-flow/embeddings hyperbolic module not available');
861
+ output.printInfo('Install with: npm install @claude-flow/embeddings');
862
+ return { success: false, exitCode: 1 };
863
+ }
864
+ if (!inputJson) {
865
+ // Show help if no input
866
+ output.printBox([
867
+ 'Hyperbolic embeddings excel at:',
868
+ '• Hierarchical data representation',
869
+ '• Tree-like structure preservation',
870
+ '• Low-dimensional hierarchy encoding',
871
+ '',
872
+ 'Actions: convert, distance, centroid',
873
+ '',
874
+ 'Examples:',
875
+ ' -a convert -i "[0.5, 0.3, 0.1]"',
876
+ ' -a distance -i "[[0.1,0.2],[0.3,0.4]]"',
877
+ ].join('\n'), 'Hyperbolic Geometry');
878
+ return { success: true };
879
+ }
880
+ // Parse input vector(s)
881
+ let input;
882
+ try {
883
+ input = JSON.parse(inputJson);
884
+ }
885
+ catch {
886
+ output.printError('Invalid JSON input. Use format: "[0.5, 0.3]" or "[[0.1,0.2],[0.3,0.4]]"');
887
+ return { success: false, exitCode: 1 };
888
+ }
889
+ switch (action) {
890
+ case 'convert': {
891
+ const vec = Array.isArray(input[0]) ? input[0] : input;
892
+ const rawResult = hyperbolic.euclideanToPoincare(vec, { curvature });
893
+ const result = Array.from(rawResult);
894
+ output.writeln(output.success('Euclidean → Poincaré conversion:'));
974
895
  output.writeln();
975
- output.writeln(output.bold('Hyperbolic Embeddings'));
976
- output.writeln(output.dim('Poincaré Ball Model'));
977
- output.writeln(output.dim('─'.repeat(50)));
978
- _b.label = 1;
979
- case 1:
980
- _b.trys.push([1, 3, , 4]);
981
- return [4 /*yield*/, import('@claude-flow/embeddings').then(function (m) { return m; })["catch"](function () { return null; })];
982
- case 2:
983
- hyperbolic = _b.sent();
984
- if (!hyperbolic || !hyperbolic.euclideanToPoincare) {
985
- output.printWarning('@claude-flow/embeddings hyperbolic module not available');
986
- output.printInfo('Install with: npm install @claude-flow/embeddings');
987
- return [2 /*return*/, { success: false, exitCode: 1 }];
896
+ output.writeln(`Input (Euclidean): [${vec.slice(0, 6).map(v => v.toFixed(4)).join(', ')}${vec.length > 6 ? ', ...' : ''}]`);
897
+ output.writeln(`Output (Poincaré): [${result.slice(0, 6).map(v => v.toFixed(4)).join(', ')}${result.length > 6 ? ', ...' : ''}]`);
898
+ output.writeln(`Curvature: ${curvature}`);
899
+ output.writeln(`Norm: ${Math.sqrt(result.reduce((s, v) => s + v * v, 0)).toFixed(6)} (must be < 1)`);
900
+ return { success: true, data: { result } };
901
+ }
902
+ case 'distance': {
903
+ if (!Array.isArray(input[0]) || input.length < 2) {
904
+ output.printError('Distance requires two vectors: "[[v1],[v2]]"');
905
+ return { success: false, exitCode: 1 };
988
906
  }
989
- if (!inputJson) {
990
- // Show help if no input
991
- output.printBox([
992
- 'Hyperbolic embeddings excel at:',
993
- '• Hierarchical data representation',
994
- '• Tree-like structure preservation',
995
- '• Low-dimensional hierarchy encoding',
996
- '',
997
- 'Actions: convert, distance, centroid',
998
- '',
999
- 'Examples:',
1000
- ' -a convert -i "[0.5, 0.3, 0.1]"',
1001
- ' -a distance -i "[[0.1,0.2],[0.3,0.4]]"',
1002
- ].join('\n'), 'Hyperbolic Geometry');
1003
- return [2 /*return*/, { success: true }];
1004
- }
1005
- input = void 0;
1006
- try {
1007
- input = JSON.parse(inputJson);
1008
- }
1009
- catch (_c) {
1010
- output.printError('Invalid JSON input. Use format: "[0.5, 0.3]" or "[[0.1,0.2],[0.3,0.4]]"');
1011
- return [2 /*return*/, { success: false, exitCode: 1 }];
1012
- }
1013
- switch (action) {
1014
- case 'convert': {
1015
- vec = Array.isArray(input[0]) ? input[0] : input;
1016
- rawResult = hyperbolic.euclideanToPoincare(vec, { curvature: curvature });
1017
- result = Array.from(rawResult);
1018
- output.writeln(output.success('Euclidean → Poincaré conversion:'));
1019
- output.writeln();
1020
- output.writeln("Input (Euclidean): [" + vec.slice(0, 6).map(function (v) { return v.toFixed(4); }).join(', ') + (vec.length > 6 ? ', ...' : '') + "]");
1021
- output.writeln("Output (Poincar\u00E9): [" + result.slice(0, 6).map(function (v) { return v.toFixed(4); }).join(', ') + (result.length > 6 ? ', ...' : '') + "]");
1022
- output.writeln("Curvature: " + curvature);
1023
- output.writeln("Norm: " + Math.sqrt(result.reduce(function (s, v) { return s + v * v; }, 0)).toFixed(6) + " (must be < 1)");
1024
- return [2 /*return*/, { success: true, data: { result: result } }];
1025
- }
1026
- case 'distance': {
1027
- if (!Array.isArray(input[0]) || input.length < 2) {
1028
- output.printError('Distance requires two vectors: "[[v1],[v2]]"');
1029
- return [2 /*return*/, { success: false, exitCode: 1 }];
1030
- }
1031
- _a = input, v1 = _a[0], v2 = _a[1];
1032
- dist = hyperbolic.hyperbolicDistance(v1, v2, { curvature: curvature });
1033
- output.writeln(output.success('Hyperbolic (geodesic) distance:'));
1034
- output.writeln();
1035
- output.writeln("Vector 1: [" + v1.slice(0, 4).map(function (v) { return v.toFixed(4); }).join(', ') + "...]");
1036
- output.writeln("Vector 2: [" + v2.slice(0, 4).map(function (v) { return v.toFixed(4); }).join(', ') + "...]");
1037
- output.writeln("Distance: " + dist.toFixed(6));
1038
- return [2 /*return*/, { success: true, data: { distance: dist } }];
1039
- }
1040
- case 'centroid': {
1041
- if (!Array.isArray(input[0])) {
1042
- output.printError('Centroid requires multiple vectors: "[[v1],[v2],...]"');
1043
- return [2 /*return*/, { success: false, exitCode: 1 }];
1044
- }
1045
- vectors = input;
1046
- rawCentroid = hyperbolic.hyperbolicCentroid(vectors, { curvature: curvature });
1047
- centroid = Array.from(rawCentroid);
1048
- output.writeln(output.success('Hyperbolic centroid (Fréchet mean):'));
1049
- output.writeln();
1050
- output.writeln("Input vectors: " + vectors.length);
1051
- output.writeln("Centroid: [" + centroid.slice(0, 6).map(function (v) { return v.toFixed(4); }).join(', ') + (centroid.length > 6 ? ', ...' : '') + "]");
1052
- return [2 /*return*/, { success: true, data: { centroid: centroid } }];
1053
- }
1054
- default:
1055
- output.printError("Unknown action: " + action + ". Use: convert, distance, centroid");
1056
- return [2 /*return*/, { success: false, exitCode: 1 }];
907
+ const [v1, v2] = input;
908
+ const dist = hyperbolic.hyperbolicDistance(v1, v2, { curvature });
909
+ output.writeln(output.success('Hyperbolic (geodesic) distance:'));
910
+ output.writeln();
911
+ output.writeln(`Vector 1: [${v1.slice(0, 4).map(v => v.toFixed(4)).join(', ')}...]`);
912
+ output.writeln(`Vector 2: [${v2.slice(0, 4).map(v => v.toFixed(4)).join(', ')}...]`);
913
+ output.writeln(`Distance: ${dist.toFixed(6)}`);
914
+ return { success: true, data: { distance: dist } };
915
+ }
916
+ case 'centroid': {
917
+ if (!Array.isArray(input[0])) {
918
+ output.printError('Centroid requires multiple vectors: "[[v1],[v2],...]"');
919
+ return { success: false, exitCode: 1 };
1057
920
  }
1058
- return [3 /*break*/, 4];
1059
- case 3:
1060
- error_7 = _b.sent();
1061
- output.printError("Hyperbolic operation failed: " + error_7.message);
1062
- return [2 /*return*/, { success: false, exitCode: 1 }];
1063
- case 4: return [2 /*return*/];
921
+ const vectors = input;
922
+ const rawCentroid = hyperbolic.hyperbolicCentroid(vectors, { curvature });
923
+ const centroid = Array.from(rawCentroid);
924
+ output.writeln(output.success('Hyperbolic centroid (Fréchet mean):'));
925
+ output.writeln();
926
+ output.writeln(`Input vectors: ${vectors.length}`);
927
+ output.writeln(`Centroid: [${centroid.slice(0, 6).map(v => v.toFixed(4)).join(', ')}${centroid.length > 6 ? ', ...' : ''}]`);
928
+ return { success: true, data: { centroid } };
929
+ }
930
+ default:
931
+ output.printError(`Unknown action: ${action}. Use: convert, distance, centroid`);
932
+ return { success: false, exitCode: 1 };
1064
933
  }
1065
- });
1066
- }); }
934
+ }
935
+ catch (error) {
936
+ output.printError(`Hyperbolic operation failed: ${error.message}`);
937
+ return { success: false, exitCode: 1 };
938
+ }
939
+ },
1067
940
  };
1068
941
  // Neural subcommand
1069
- var neuralCommand = {
942
+ const neuralCommand = {
1070
943
  name: 'neural',
1071
944
  description: 'Neural substrate features (RuVector integration)',
1072
945
  options: [
1073
- { name: 'feature', short: 'f', type: 'string', description: 'Feature: drift, memory, swarm, coherence, all', "default": 'all' },
946
+ { name: 'feature', short: 'f', type: 'string', description: 'Feature: drift, memory, swarm, coherence, all', default: 'all' },
1074
947
  { name: 'init', type: 'boolean', description: 'Initialize neural substrate with RuVector' },
1075
- { name: 'drift-threshold', type: 'string', description: 'Semantic drift detection threshold', "default": '0.3' },
1076
- { name: 'decay-rate', type: 'string', description: 'Memory decay rate (hippocampal dynamics)', "default": '0.01' },
1077
- { name: 'consolidation-interval', type: 'string', description: 'Memory consolidation interval (ms)', "default": '60000' },
948
+ { name: 'drift-threshold', type: 'string', description: 'Semantic drift detection threshold', default: '0.3' },
949
+ { name: 'decay-rate', type: 'string', description: 'Memory decay rate (hippocampal dynamics)', default: '0.01' },
950
+ { name: 'consolidation-interval', type: 'string', description: 'Memory consolidation interval (ms)', default: '60000' },
1078
951
  ],
1079
952
  examples: [
1080
953
  { command: 'claude-flow embeddings neural --init', description: 'Initialize RuVector substrate' },
@@ -1083,676 +956,555 @@ var neuralCommand = {
1083
956
  { command: 'claude-flow embeddings neural -f coherence', description: 'Safety & alignment monitoring' },
1084
957
  { command: 'claude-flow embeddings neural --drift-threshold=0.2', description: 'Custom drift threshold' },
1085
958
  ],
1086
- action: function (ctx) { return __awaiter(void 0, void 0, Promise, function () {
1087
- var feature, init, driftThreshold, decayRate, consolidationInterval, fs, path, configPath, config, neuralConfig, features, ruvector;
1088
- return __generator(this, function (_a) {
1089
- switch (_a.label) {
1090
- case 0:
1091
- feature = ctx.flags.feature || 'all';
1092
- init = ctx.flags.init;
1093
- driftThreshold = parseFloat((ctx.flags['drift-threshold'] || ctx.flags.driftThreshold || '0.3'));
1094
- decayRate = parseFloat((ctx.flags['decay-rate'] || ctx.flags.decayRate || '0.01'));
1095
- consolidationInterval = parseInt((ctx.flags['consolidation-interval'] || ctx.flags.consolidationInterval || '60000'), 10);
1096
- output.writeln();
1097
- output.writeln(output.bold('Neural Embedding Substrate (RuVector)'));
1098
- output.writeln(output.dim('Treating embeddings as a synthetic nervous system'));
1099
- output.writeln(output.dim(''.repeat(60)));
1100
- return [4 /*yield*/, import('fs')];
1101
- case 1:
1102
- fs = _a.sent();
1103
- return [4 /*yield*/, import('path')];
1104
- case 2:
1105
- path = _a.sent();
1106
- configPath = path.join(process.cwd(), '.claude-flow', 'embeddings.json');
1107
- if (!fs.existsSync(configPath)) {
1108
- output.printWarning('Embeddings not initialized');
1109
- output.printInfo('Run "embeddings init" first to configure ONNX model');
1110
- return [2 /*return*/, { success: false, exitCode: 1 }];
1111
- }
1112
- config = {};
1113
- try {
1114
- config = JSON.parse(fs.readFileSync(configPath, 'utf8'));
1115
- }
1116
- catch (_b) {
1117
- config = {};
1118
- }
1119
- if (init) {
1120
- // Initialize neural substrate configuration
1121
- config.neural = {
1122
- enabled: true,
1123
- driftThreshold: driftThreshold,
1124
- decayRate: decayRate,
1125
- consolidationInterval: consolidationInterval,
1126
- ruvector: {
1127
- enabled: true,
1128
- sona: true,
1129
- flashAttention: true,
1130
- ewcPlusPlus: true
1131
- },
1132
- features: {
1133
- semanticDrift: true,
1134
- memoryPhysics: true,
1135
- stateMachine: true,
1136
- swarmCoordination: true,
1137
- coherenceMonitor: true
1138
- },
1139
- initializedAt: new Date().toISOString()
1140
- };
1141
- fs.writeFileSync(configPath, JSON.stringify(config, null, 2));
1142
- output.printSuccess('Neural substrate initialized');
1143
- output.writeln();
1144
- }
1145
- neuralConfig = (config.neural || {});
1146
- features = (neuralConfig.features || {});
1147
- ruvector = (neuralConfig.ruvector || {});
1148
- output.printTable({
1149
- columns: [
1150
- { key: 'feature', header: 'Feature', width: 24 },
1151
- { key: 'description', header: 'Description', width: 38 },
1152
- { key: 'status', header: 'Status', width: 12 },
1153
- ],
1154
- data: [
1155
- {
1156
- feature: 'SemanticDriftDetector',
1157
- description: "Monitor semantic movement (threshold: " + driftThreshold + ")",
1158
- status: features.semanticDrift ? output.success('Active') : output.dim('Inactive')
1159
- },
1160
- {
1161
- feature: 'MemoryPhysics',
1162
- description: "Hippocampal dynamics (decay: " + decayRate + ")",
1163
- status: features.memoryPhysics ? output.success('Active') : output.dim('Inactive')
1164
- },
1165
- {
1166
- feature: 'EmbeddingStateMachine',
1167
- description: 'Agent state through geometry',
1168
- status: features.stateMachine ? output.success('Active') : output.dim('Inactive')
1169
- },
1170
- {
1171
- feature: 'SwarmCoordinator',
1172
- description: 'Multi-agent embedding coordination',
1173
- status: features.swarmCoordination ? output.success('Active') : output.dim('Inactive')
1174
- },
1175
- {
1176
- feature: 'CoherenceMonitor',
1177
- description: 'Safety & alignment detection',
1178
- status: features.coherenceMonitor ? output.success('Active') : output.dim('Inactive')
1179
- },
1180
- ]
1181
- });
1182
- output.writeln();
1183
- output.writeln(output.bold('RuVector Integration'));
1184
- output.printTable({
1185
- columns: [
1186
- { key: 'component', header: 'Component', width: 24 },
1187
- { key: 'description', header: 'Description', width: 38 },
1188
- { key: 'status', header: 'Status', width: 12 },
1189
- ],
1190
- data: [
1191
- {
1192
- component: 'SONA',
1193
- description: 'Self-Optimizing Neural Architecture (<0.05ms)',
1194
- status: ruvector.sona ? output.success('Enabled') : output.dim('Disabled')
1195
- },
1196
- {
1197
- component: 'Flash Attention',
1198
- description: '2.49x-7.47x attention speedup',
1199
- status: ruvector.flashAttention ? output.success('Enabled') : output.dim('Disabled')
1200
- },
1201
- {
1202
- component: 'EWC++',
1203
- description: 'Elastic Weight Consolidation (anti-forgetting)',
1204
- status: ruvector.ewcPlusPlus ? output.success('Enabled') : output.dim('Disabled')
1205
- },
1206
- {
1207
- component: 'Hyperbolic Space',
1208
- description: 'Poincaré ball for hierarchy preservation',
1209
- status: config.hyperbolic ? output.success('Enabled') : output.dim('Disabled')
1210
- },
1211
- ]
1212
- });
1213
- output.writeln();
1214
- if (!neuralConfig.enabled) {
1215
- output.printInfo('Run with --init to enable neural substrate');
1216
- }
1217
- else {
1218
- output.writeln(output.dim('Configuration: .claude-flow/embeddings.json'));
1219
- output.writeln(output.dim('Next: Use "hooks pretrain" to train patterns'));
1220
- }
1221
- return [2 /*return*/, { success: true, data: { config: neuralConfig, feature: feature } }];
1222
- }
959
+ action: async (ctx) => {
960
+ const feature = ctx.flags.feature || 'all';
961
+ const init = ctx.flags.init;
962
+ const driftThreshold = parseFloat((ctx.flags['drift-threshold'] || ctx.flags.driftThreshold || '0.3'));
963
+ const decayRate = parseFloat((ctx.flags['decay-rate'] || ctx.flags.decayRate || '0.01'));
964
+ const consolidationInterval = parseInt((ctx.flags['consolidation-interval'] || ctx.flags.consolidationInterval || '60000'), 10);
965
+ output.writeln();
966
+ output.writeln(output.bold('Neural Embedding Substrate (RuVector)'));
967
+ output.writeln(output.dim('Treating embeddings as a synthetic nervous system'));
968
+ output.writeln(output.dim(''.repeat(60)));
969
+ // Check if embeddings config exists
970
+ const fs = await import('fs');
971
+ const path = await import('path');
972
+ const configPath = path.join(process.cwd(), '.claude-flow', 'embeddings.json');
973
+ if (!fs.existsSync(configPath)) {
974
+ output.printWarning('Embeddings not initialized');
975
+ output.printInfo('Run "embeddings init" first to configure ONNX model');
976
+ return { success: false, exitCode: 1 };
977
+ }
978
+ // Load and update config
979
+ let config = {};
980
+ try {
981
+ config = JSON.parse(fs.readFileSync(configPath, 'utf8'));
982
+ }
983
+ catch {
984
+ config = {};
985
+ }
986
+ if (init) {
987
+ // Initialize neural substrate configuration
988
+ config.neural = {
989
+ enabled: true,
990
+ driftThreshold,
991
+ decayRate,
992
+ consolidationInterval,
993
+ ruvector: {
994
+ enabled: true,
995
+ sona: true, // Self-Optimizing Neural Architecture
996
+ flashAttention: true,
997
+ ewcPlusPlus: true, // Elastic Weight Consolidation
998
+ },
999
+ features: {
1000
+ semanticDrift: true,
1001
+ memoryPhysics: true,
1002
+ stateMachine: true,
1003
+ swarmCoordination: true,
1004
+ coherenceMonitor: true,
1005
+ },
1006
+ initializedAt: new Date().toISOString(),
1007
+ };
1008
+ fs.writeFileSync(configPath, JSON.stringify(config, null, 2));
1009
+ output.printSuccess('Neural substrate initialized');
1010
+ output.writeln();
1011
+ }
1012
+ const neuralConfig = (config.neural || {});
1013
+ const features = (neuralConfig.features || {});
1014
+ const ruvector = (neuralConfig.ruvector || {});
1015
+ output.printTable({
1016
+ columns: [
1017
+ { key: 'feature', header: 'Feature', width: 24 },
1018
+ { key: 'description', header: 'Description', width: 38 },
1019
+ { key: 'status', header: 'Status', width: 12 },
1020
+ ],
1021
+ data: [
1022
+ {
1023
+ feature: 'SemanticDriftDetector',
1024
+ description: `Monitor semantic movement (threshold: ${driftThreshold})`,
1025
+ status: features.semanticDrift ? output.success('Active') : output.dim('Inactive')
1026
+ },
1027
+ {
1028
+ feature: 'MemoryPhysics',
1029
+ description: `Hippocampal dynamics (decay: ${decayRate})`,
1030
+ status: features.memoryPhysics ? output.success('Active') : output.dim('Inactive')
1031
+ },
1032
+ {
1033
+ feature: 'EmbeddingStateMachine',
1034
+ description: 'Agent state through geometry',
1035
+ status: features.stateMachine ? output.success('Active') : output.dim('Inactive')
1036
+ },
1037
+ {
1038
+ feature: 'SwarmCoordinator',
1039
+ description: 'Multi-agent embedding coordination',
1040
+ status: features.swarmCoordination ? output.success('Active') : output.dim('Inactive')
1041
+ },
1042
+ {
1043
+ feature: 'CoherenceMonitor',
1044
+ description: 'Safety & alignment detection',
1045
+ status: features.coherenceMonitor ? output.success('Active') : output.dim('Inactive')
1046
+ },
1047
+ ],
1223
1048
  });
1224
- }); }
1049
+ output.writeln();
1050
+ output.writeln(output.bold('RuVector Integration'));
1051
+ output.printTable({
1052
+ columns: [
1053
+ { key: 'component', header: 'Component', width: 24 },
1054
+ { key: 'description', header: 'Description', width: 38 },
1055
+ { key: 'status', header: 'Status', width: 12 },
1056
+ ],
1057
+ data: [
1058
+ {
1059
+ component: 'SONA',
1060
+ description: 'Self-Optimizing Neural Architecture (<0.05ms)',
1061
+ status: ruvector.sona ? output.success('Enabled') : output.dim('Disabled')
1062
+ },
1063
+ {
1064
+ component: 'Flash Attention',
1065
+ description: '2.49x-7.47x attention speedup',
1066
+ status: ruvector.flashAttention ? output.success('Enabled') : output.dim('Disabled')
1067
+ },
1068
+ {
1069
+ component: 'EWC++',
1070
+ description: 'Elastic Weight Consolidation (anti-forgetting)',
1071
+ status: ruvector.ewcPlusPlus ? output.success('Enabled') : output.dim('Disabled')
1072
+ },
1073
+ {
1074
+ component: 'Hyperbolic Space',
1075
+ description: 'Poincaré ball for hierarchy preservation',
1076
+ status: config.hyperbolic ? output.success('Enabled') : output.dim('Disabled')
1077
+ },
1078
+ ],
1079
+ });
1080
+ output.writeln();
1081
+ if (!neuralConfig.enabled) {
1082
+ output.printInfo('Run with --init to enable neural substrate');
1083
+ }
1084
+ else {
1085
+ output.writeln(output.dim('Configuration: .claude-flow/embeddings.json'));
1086
+ output.writeln(output.dim('Next: Use "hooks pretrain" to train patterns'));
1087
+ }
1088
+ return { success: true, data: { config: neuralConfig, feature } };
1089
+ },
1225
1090
  };
1226
1091
  // Models subcommand
1227
- var modelsCommand = {
1092
+ const modelsCommand = {
1228
1093
  name: 'models',
1229
1094
  description: 'List and download embedding models',
1230
1095
  options: [
1231
1096
  { name: 'download', short: 'd', type: 'string', description: 'Model ID to download' },
1232
- { name: 'list', short: 'l', type: 'boolean', description: 'List available models', "default": 'true' },
1097
+ { name: 'list', short: 'l', type: 'boolean', description: 'List available models', default: 'true' },
1233
1098
  ],
1234
1099
  examples: [
1235
1100
  { command: 'claude-flow embeddings models', description: 'List models' },
1236
1101
  { command: 'claude-flow embeddings models -d all-MiniLM-L6-v2', description: 'Download model' },
1237
1102
  ],
1238
- action: function (ctx) { return __awaiter(void 0, void 0, Promise, function () {
1239
- var download, embeddings, spinner_2, err_1, models, _a;
1240
- return __generator(this, function (_b) {
1241
- switch (_b.label) {
1242
- case 0:
1243
- download = ctx.flags.download;
1244
- return [4 /*yield*/, getEmbeddings()];
1245
- case 1:
1246
- embeddings = _b.sent();
1247
- output.writeln();
1248
- output.writeln(output.bold('Embedding Models'));
1249
- output.writeln(output.dim('─'.repeat(60)));
1250
- if (!download) return [3 /*break*/, 9];
1251
- spinner_2 = output.createSpinner({ text: "Downloading " + download + "...", spinner: 'dots' });
1252
- spinner_2.start();
1253
- if (!embeddings) return [3 /*break*/, 6];
1254
- _b.label = 2;
1255
- case 2:
1256
- _b.trys.push([2, 4, , 5]);
1257
- return [4 /*yield*/, embeddings.downloadEmbeddingModel(download, '.models', function (p) {
1258
- spinner_2.setText("Downloading " + download + "... " + p.percent.toFixed(1) + "%");
1259
- })];
1260
- case 3:
1261
- _b.sent();
1262
- spinner_2.succeed("Downloaded " + download);
1263
- return [3 /*break*/, 5];
1264
- case 4:
1265
- err_1 = _b.sent();
1266
- spinner_2.fail("Failed to download: " + err_1);
1267
- return [2 /*return*/, { success: false, exitCode: 1 }];
1268
- case 5: return [3 /*break*/, 8];
1269
- case 6: return [4 /*yield*/, new Promise(function (r) { return setTimeout(r, 500); })];
1270
- case 7:
1271
- _b.sent();
1272
- spinner_2.succeed("Download complete (simulated)");
1273
- _b.label = 8;
1274
- case 8: return [2 /*return*/, { success: true }];
1275
- case 9:
1276
- models = [
1277
- { id: 'all-MiniLM-L6-v2', dimension: 384, size: '23MB', quantized: false, downloaded: true },
1278
- { id: 'all-mpnet-base-v2', dimension: 768, size: '110MB', quantized: false, downloaded: false },
1279
- { id: 'paraphrase-MiniLM-L3-v2', dimension: 384, size: '17MB', quantized: false, downloaded: false },
1280
- ];
1281
- if (!embeddings) return [3 /*break*/, 13];
1282
- _b.label = 10;
1283
- case 10:
1284
- _b.trys.push([10, 12, , 13]);
1285
- return [4 /*yield*/, embeddings.listEmbeddingModels()];
1286
- case 11:
1287
- models = _b.sent();
1288
- return [3 /*break*/, 13];
1289
- case 12:
1290
- _a = _b.sent();
1291
- return [3 /*break*/, 13];
1292
- case 13:
1293
- output.printTable({
1294
- columns: [
1295
- { key: 'id', header: 'Model ID', width: 28 },
1296
- { key: 'dimension', header: 'Dims', width: 8 },
1297
- { key: 'size', header: 'Size', width: 10 },
1298
- { key: 'quantized', header: 'Quant', width: 8 },
1299
- { key: 'downloaded', header: 'Status', width: 12 },
1300
- ],
1301
- data: models.map(function (m) { return ({
1302
- id: m.id,
1303
- dimension: String(m.dimension),
1304
- size: m.size,
1305
- quantized: m.quantized ? 'Yes' : 'No',
1306
- downloaded: m.downloaded ? output.success('Downloaded') : output.dim('Available')
1307
- }); })
1103
+ action: async (ctx) => {
1104
+ const download = ctx.flags.download;
1105
+ const embeddings = await getEmbeddings();
1106
+ output.writeln();
1107
+ output.writeln(output.bold('Embedding Models'));
1108
+ output.writeln(output.dim('─'.repeat(60)));
1109
+ if (download) {
1110
+ const spinner = output.createSpinner({ text: `Downloading ${download}...`, spinner: 'dots' });
1111
+ spinner.start();
1112
+ if (embeddings) {
1113
+ try {
1114
+ await embeddings.downloadEmbeddingModel(download, '.models', (p) => {
1115
+ spinner.setText(`Downloading ${download}... ${p.percent.toFixed(1)}%`);
1308
1116
  });
1309
- return [2 /*return*/, { success: true }];
1117
+ spinner.succeed(`Downloaded ${download}`);
1118
+ }
1119
+ catch (err) {
1120
+ spinner.fail(`Failed to download: ${err}`);
1121
+ return { success: false, exitCode: 1 };
1122
+ }
1123
+ }
1124
+ else {
1125
+ await new Promise(r => setTimeout(r, 500));
1126
+ spinner.succeed(`Download complete (simulated)`);
1310
1127
  }
1128
+ return { success: true };
1129
+ }
1130
+ // List models
1131
+ let models = [
1132
+ { id: 'all-MiniLM-L6-v2', dimension: 384, size: '23MB', quantized: false, downloaded: true },
1133
+ { id: 'all-mpnet-base-v2', dimension: 768, size: '110MB', quantized: false, downloaded: false },
1134
+ { id: 'paraphrase-MiniLM-L3-v2', dimension: 384, size: '17MB', quantized: false, downloaded: false },
1135
+ ];
1136
+ if (embeddings) {
1137
+ try {
1138
+ models = await embeddings.listEmbeddingModels();
1139
+ }
1140
+ catch { /* use defaults */ }
1141
+ }
1142
+ output.printTable({
1143
+ columns: [
1144
+ { key: 'id', header: 'Model ID', width: 28 },
1145
+ { key: 'dimension', header: 'Dims', width: 8 },
1146
+ { key: 'size', header: 'Size', width: 10 },
1147
+ { key: 'quantized', header: 'Quant', width: 8 },
1148
+ { key: 'downloaded', header: 'Status', width: 12 },
1149
+ ],
1150
+ data: models.map(m => ({
1151
+ id: m.id,
1152
+ dimension: String(m.dimension),
1153
+ size: m.size,
1154
+ quantized: m.quantized ? 'Yes' : 'No',
1155
+ downloaded: m.downloaded ? output.success('Downloaded') : output.dim('Available'),
1156
+ })),
1311
1157
  });
1312
- }); }
1158
+ return { success: true };
1159
+ },
1313
1160
  };
1314
1161
  // Cache subcommand
1315
- var cacheCommand = {
1162
+ const cacheCommand = {
1316
1163
  name: 'cache',
1317
1164
  description: 'Manage embedding cache',
1318
1165
  options: [
1319
- { name: 'action', short: 'a', type: 'string', description: 'Action: stats, clear, persist', "default": 'stats' },
1320
- { name: 'db-path', type: 'string', description: 'SQLite database path', "default": '.cache/embeddings.db' },
1166
+ { name: 'action', short: 'a', type: 'string', description: 'Action: stats, clear, persist', default: 'stats' },
1167
+ { name: 'db-path', type: 'string', description: 'SQLite database path', default: '.cache/embeddings.db' },
1321
1168
  ],
1322
1169
  examples: [
1323
1170
  { command: 'claude-flow embeddings cache', description: 'Show cache stats' },
1324
1171
  { command: 'claude-flow embeddings cache -a clear', description: 'Clear cache' },
1325
1172
  ],
1326
- action: function (ctx) { return __awaiter(void 0, void 0, Promise, function () {
1327
- var action, dbPath, fs, path, resolvedDbPath, sqliteEntries, sqliteSize, sqliteExists, stats, sizeBytes, initSqlJs, SQL, fileBuffer, db, result, _a, _b, memoryEntries, memorySize, getHNSWStatus, hnswStatus, memBytes, _c;
1328
- return __generator(this, function (_d) {
1329
- switch (_d.label) {
1330
- case 0:
1331
- action = ctx.flags.action || 'stats';
1332
- dbPath = ctx.flags['db-path'] || '.cache/embeddings.db';
1333
- output.writeln();
1334
- output.writeln(output.bold('Embedding Cache'));
1335
- output.writeln(output.dim('─'.repeat(50)));
1336
- return [4 /*yield*/, import('fs')];
1337
- case 1:
1338
- fs = _d.sent();
1339
- return [4 /*yield*/, import('path')];
1340
- case 2:
1341
- path = _d.sent();
1342
- resolvedDbPath = path.resolve(dbPath);
1343
- sqliteEntries = 0;
1344
- sqliteSize = '0 B';
1345
- sqliteExists = false;
1346
- _d.label = 3;
1347
- case 3:
1348
- _d.trys.push([3, 9, , 10]);
1349
- if (!fs.existsSync(resolvedDbPath)) return [3 /*break*/, 8];
1350
- sqliteExists = true;
1351
- stats = fs.statSync(resolvedDbPath);
1352
- sizeBytes = stats.size;
1353
- // Format size
1354
- if (sizeBytes >= 1024 * 1024) {
1355
- sqliteSize = (sizeBytes / 1024 / 1024).toFixed(1) + " MB";
1356
- }
1357
- else if (sizeBytes >= 1024) {
1358
- sqliteSize = (sizeBytes / 1024).toFixed(1) + " KB";
1359
- }
1360
- else {
1361
- sqliteSize = sizeBytes + " B";
1362
- }
1363
- _d.label = 4;
1364
- case 4:
1365
- _d.trys.push([4, 7, , 8]);
1366
- return [4 /*yield*/, import('sql.js')];
1367
- case 5:
1368
- initSqlJs = (_d.sent())["default"];
1369
- return [4 /*yield*/, initSqlJs()];
1370
- case 6:
1371
- SQL = _d.sent();
1372
- fileBuffer = fs.readFileSync(resolvedDbPath);
1373
- db = new SQL.Database(fileBuffer);
1374
- result = db.exec('SELECT COUNT(*) as count FROM embeddings');
1173
+ action: async (ctx) => {
1174
+ const action = ctx.flags.action || 'stats';
1175
+ const dbPath = ctx.flags['db-path'] || '.cache/embeddings.db';
1176
+ output.writeln();
1177
+ output.writeln(output.bold('Embedding Cache'));
1178
+ output.writeln(output.dim(''.repeat(50)));
1179
+ const fs = await import('fs');
1180
+ const path = await import('path');
1181
+ // Get real cache stats
1182
+ const resolvedDbPath = path.resolve(dbPath);
1183
+ let sqliteEntries = 0;
1184
+ let sqliteSize = '0 B';
1185
+ let sqliteExists = false;
1186
+ try {
1187
+ if (fs.existsSync(resolvedDbPath)) {
1188
+ sqliteExists = true;
1189
+ const stats = fs.statSync(resolvedDbPath);
1190
+ const sizeBytes = stats.size;
1191
+ // Format size
1192
+ if (sizeBytes >= 1024 * 1024) {
1193
+ sqliteSize = `${(sizeBytes / 1024 / 1024).toFixed(1)} MB`;
1194
+ }
1195
+ else if (sizeBytes >= 1024) {
1196
+ sqliteSize = `${(sizeBytes / 1024).toFixed(1)} KB`;
1197
+ }
1198
+ else {
1199
+ sqliteSize = `${sizeBytes} B`;
1200
+ }
1201
+ // Try to count real entries via sql.js
1202
+ try {
1203
+ const initSqlJs = (await import('sql.js')).default;
1204
+ const SQL = await initSqlJs();
1205
+ const fileBuffer = fs.readFileSync(resolvedDbPath);
1206
+ const db = new SQL.Database(fileBuffer);
1207
+ const result = db.exec('SELECT COUNT(*) as count FROM embeddings');
1375
1208
  if (result.length > 0 && result[0].values.length > 0) {
1376
1209
  sqliteEntries = result[0].values[0][0];
1377
1210
  }
1378
1211
  db.close();
1379
- return [3 /*break*/, 8];
1380
- case 7:
1381
- _a = _d.sent();
1212
+ }
1213
+ catch {
1382
1214
  // Estimate entries from file size (~1600 bytes per entry for 384-dim embeddings)
1383
1215
  sqliteEntries = Math.floor(stats.size / 1600);
1384
- return [3 /*break*/, 8];
1385
- case 8: return [3 /*break*/, 10];
1386
- case 9:
1387
- _b = _d.sent();
1388
- return [3 /*break*/, 10];
1389
- case 10:
1390
- memoryEntries = 0;
1391
- memorySize = '0 B';
1392
- _d.label = 11;
1393
- case 11:
1394
- _d.trys.push([11, 13, , 14]);
1395
- return [4 /*yield*/, import('../memory/memory-initializer.js')];
1396
- case 12:
1397
- getHNSWStatus = (_d.sent()).getHNSWStatus;
1398
- hnswStatus = getHNSWStatus();
1399
- if (hnswStatus && hnswStatus.initialized) {
1400
- memoryEntries = hnswStatus.entryCount || 0;
1401
- memBytes = memoryEntries * (hnswStatus.dimensions || 384) * 4;
1402
- if (memBytes >= 1024 * 1024) {
1403
- memorySize = (memBytes / 1024 / 1024).toFixed(1) + " MB";
1404
- }
1405
- else if (memBytes >= 1024) {
1406
- memorySize = (memBytes / 1024).toFixed(1) + " KB";
1407
- }
1408
- else {
1409
- memorySize = memBytes + " B";
1410
- }
1411
- }
1412
- return [3 /*break*/, 14];
1413
- case 13:
1414
- _c = _d.sent();
1415
- return [3 /*break*/, 14];
1416
- case 14:
1417
- if (action === 'clear') {
1418
- try {
1419
- if (fs.existsSync(resolvedDbPath)) {
1420
- fs.unlinkSync(resolvedDbPath);
1421
- output.writeln(output.success('Cache cleared!'));
1422
- }
1423
- else {
1424
- output.writeln(output.dim('No cache to clear.'));
1425
- }
1426
- return [2 /*return*/, { success: true }];
1427
- }
1428
- catch (error) {
1429
- output.printError("Failed to clear cache: " + error);
1430
- return [2 /*return*/, { success: false }];
1431
- }
1432
- }
1433
- // Display real stats
1434
- output.printTable({
1435
- columns: [
1436
- { key: 'cache', header: 'Cache Type', width: 18 },
1437
- { key: 'entries', header: 'Entries', width: 12 },
1438
- { key: 'status', header: 'Status', width: 12 },
1439
- { key: 'size', header: 'Size', width: 12 },
1440
- ],
1441
- data: [
1442
- {
1443
- cache: 'LRU (Memory)',
1444
- entries: String(memoryEntries),
1445
- status: memoryEntries > 0 ? output.success('Active') : output.dim('Empty'),
1446
- size: memorySize
1447
- },
1448
- {
1449
- cache: 'SQLite (Disk)',
1450
- entries: String(sqliteEntries),
1451
- status: sqliteExists ? output.success('Active') : output.dim('Not Found'),
1452
- size: sqliteSize
1453
- },
1454
- ]
1455
- });
1456
- output.writeln();
1457
- output.writeln(output.dim("Database: " + resolvedDbPath));
1458
- if (sqliteExists) {
1459
- output.writeln(output.dim('Persistent cache survives restarts'));
1460
- }
1461
- else {
1462
- output.writeln(output.dim('Cache will be created on first embedding operation'));
1463
- }
1464
- return [2 /*return*/, { success: true }];
1216
+ }
1217
+ }
1218
+ }
1219
+ catch { /* file access error */ }
1220
+ // Get in-memory HNSW stats if available
1221
+ let memoryEntries = 0;
1222
+ let memorySize = '0 B';
1223
+ try {
1224
+ const { getHNSWStatus } = await import('../memory/memory-initializer.js');
1225
+ const hnswStatus = getHNSWStatus();
1226
+ if (hnswStatus && hnswStatus.initialized) {
1227
+ memoryEntries = hnswStatus.entryCount || 0;
1228
+ const memBytes = memoryEntries * (hnswStatus.dimensions || 384) * 4; // Float32 = 4 bytes per dimension
1229
+ if (memBytes >= 1024 * 1024) {
1230
+ memorySize = `${(memBytes / 1024 / 1024).toFixed(1)} MB`;
1231
+ }
1232
+ else if (memBytes >= 1024) {
1233
+ memorySize = `${(memBytes / 1024).toFixed(1)} KB`;
1234
+ }
1235
+ else {
1236
+ memorySize = `${memBytes} B`;
1237
+ }
1238
+ }
1239
+ }
1240
+ catch { /* HNSW not initialized */ }
1241
+ if (action === 'clear') {
1242
+ try {
1243
+ if (fs.existsSync(resolvedDbPath)) {
1244
+ fs.unlinkSync(resolvedDbPath);
1245
+ output.writeln(output.success('Cache cleared!'));
1246
+ }
1247
+ else {
1248
+ output.writeln(output.dim('No cache to clear.'));
1249
+ }
1250
+ return { success: true };
1465
1251
  }
1252
+ catch (error) {
1253
+ output.printError(`Failed to clear cache: ${error}`);
1254
+ return { success: false };
1255
+ }
1256
+ }
1257
+ // Display real stats
1258
+ output.printTable({
1259
+ columns: [
1260
+ { key: 'cache', header: 'Cache Type', width: 18 },
1261
+ { key: 'entries', header: 'Entries', width: 12 },
1262
+ { key: 'status', header: 'Status', width: 12 },
1263
+ { key: 'size', header: 'Size', width: 12 },
1264
+ ],
1265
+ data: [
1266
+ {
1267
+ cache: 'LRU (Memory)',
1268
+ entries: String(memoryEntries),
1269
+ status: memoryEntries > 0 ? output.success('Active') : output.dim('Empty'),
1270
+ size: memorySize,
1271
+ },
1272
+ {
1273
+ cache: 'SQLite (Disk)',
1274
+ entries: String(sqliteEntries),
1275
+ status: sqliteExists ? output.success('Active') : output.dim('Not Found'),
1276
+ size: sqliteSize,
1277
+ },
1278
+ ],
1466
1279
  });
1467
- }); }
1280
+ output.writeln();
1281
+ output.writeln(output.dim(`Database: ${resolvedDbPath}`));
1282
+ if (sqliteExists) {
1283
+ output.writeln(output.dim('Persistent cache survives restarts'));
1284
+ }
1285
+ else {
1286
+ output.writeln(output.dim('Cache will be created on first embedding operation'));
1287
+ }
1288
+ return { success: true };
1289
+ },
1468
1290
  };
1469
1291
  // Warmup subcommand - Preload model for faster first embed
1470
- var warmupCommand = {
1292
+ const warmupCommand = {
1471
1293
  name: 'warmup',
1472
1294
  description: 'Preload embedding model for faster subsequent operations',
1473
1295
  options: [
1474
- { name: 'background', short: 'b', type: 'boolean', description: 'Run warmup in background daemon', "default": 'false' },
1475
- { name: 'test', short: 't', type: 'boolean', description: 'Run test embedding after warmup', "default": 'true' },
1296
+ { name: 'background', short: 'b', type: 'boolean', description: 'Run warmup in background daemon', default: 'false' },
1297
+ { name: 'test', short: 't', type: 'boolean', description: 'Run test embedding after warmup', default: 'true' },
1476
1298
  ],
1477
1299
  examples: [
1478
1300
  { command: 'claude-flow embeddings warmup', description: 'Preload model with test' },
1479
1301
  { command: 'claude-flow embeddings warmup -b', description: 'Background warmup' },
1480
1302
  ],
1481
- action: function (ctx) { return __awaiter(void 0, void 0, Promise, function () {
1482
- var runTest, background, spinner, overallStart, _a, loadEmbeddingModel, generateEmbedding, loadStart, modelInfo, loadTime, testSpinner, warmupTexts, embedTimes, _i, warmupTexts_1, text, embedStart, avgWarmEmbed, totalTime, error_8;
1483
- return __generator(this, function (_b) {
1484
- switch (_b.label) {
1485
- case 0:
1486
- runTest = ctx.flags.test !== false;
1487
- background = ctx.flags.background === true;
1488
- output.writeln();
1489
- output.writeln(output.bold('Embedding Model Warmup'));
1490
- output.writeln(output.dim('─'.repeat(50)));
1491
- spinner = output.createSpinner({ text: 'Loading embedding model...', spinner: 'dots' });
1492
- spinner.start();
1493
- overallStart = Date.now();
1494
- _b.label = 1;
1495
- case 1:
1496
- _b.trys.push([1, 9, , 10]);
1497
- return [4 /*yield*/, import('../memory/memory-initializer.js')];
1498
- case 2:
1499
- _a = _b.sent(), loadEmbeddingModel = _a.loadEmbeddingModel, generateEmbedding = _a.generateEmbedding;
1500
- loadStart = Date.now();
1501
- return [4 /*yield*/, loadEmbeddingModel({ verbose: false })];
1502
- case 3:
1503
- modelInfo = _b.sent();
1504
- loadTime = Date.now() - loadStart;
1505
- spinner.succeed("Model loaded in " + loadTime + "ms");
1506
- if (!runTest) return [3 /*break*/, 8];
1507
- testSpinner = output.createSpinner({ text: 'Running warmup embedding...', spinner: 'dots' });
1508
- testSpinner.start();
1509
- warmupTexts = [
1510
- 'The quick brown fox jumps over the lazy dog',
1511
- 'Machine learning embeddings enable semantic search',
1512
- 'Vector databases use HNSW for fast similarity'
1513
- ];
1514
- embedTimes = [];
1515
- _i = 0, warmupTexts_1 = warmupTexts;
1516
- _b.label = 4;
1517
- case 4:
1518
- if (!(_i < warmupTexts_1.length)) return [3 /*break*/, 7];
1519
- text = warmupTexts_1[_i];
1520
- embedStart = Date.now();
1521
- return [4 /*yield*/, generateEmbedding(text)];
1522
- case 5:
1523
- _b.sent();
1303
+ action: async (ctx) => {
1304
+ const runTest = ctx.flags.test !== false;
1305
+ const background = ctx.flags.background === true;
1306
+ output.writeln();
1307
+ output.writeln(output.bold('Embedding Model Warmup'));
1308
+ output.writeln(output.dim('─'.repeat(50)));
1309
+ const spinner = output.createSpinner({ text: 'Loading embedding model...', spinner: 'dots' });
1310
+ spinner.start();
1311
+ const overallStart = Date.now();
1312
+ try {
1313
+ const { loadEmbeddingModel, generateEmbedding } = await import('../memory/memory-initializer.js');
1314
+ // Phase 1: Load model
1315
+ const loadStart = Date.now();
1316
+ const modelInfo = await loadEmbeddingModel({ verbose: false });
1317
+ const loadTime = Date.now() - loadStart;
1318
+ spinner.succeed(`Model loaded in ${loadTime}ms`);
1319
+ // Phase 2: Test embed (warms ONNX runtime)
1320
+ if (runTest) {
1321
+ const testSpinner = output.createSpinner({ text: 'Running warmup embedding...', spinner: 'dots' });
1322
+ testSpinner.start();
1323
+ const warmupTexts = [
1324
+ 'The quick brown fox jumps over the lazy dog',
1325
+ 'Machine learning embeddings enable semantic search',
1326
+ 'Vector databases use HNSW for fast similarity'
1327
+ ];
1328
+ const embedTimes = [];
1329
+ for (const text of warmupTexts) {
1330
+ const embedStart = Date.now();
1331
+ await generateEmbedding(text);
1524
1332
  embedTimes.push(Date.now() - embedStart);
1525
- _b.label = 6;
1526
- case 6:
1527
- _i++;
1528
- return [3 /*break*/, 4];
1529
- case 7:
1530
- avgWarmEmbed = embedTimes.slice(1).reduce(function (a, b) { return a + b; }, 0) / (embedTimes.length - 1);
1531
- testSpinner.succeed("Warmup complete: " + avgWarmEmbed.toFixed(1) + "ms avg (warm)");
1532
- _b.label = 8;
1533
- case 8:
1534
- totalTime = Date.now() - overallStart;
1535
- output.writeln();
1536
- output.printTable({
1537
- columns: [
1538
- { key: 'metric', header: 'Metric', width: 22 },
1539
- { key: 'value', header: 'Value', width: 25 },
1540
- ],
1541
- data: [
1542
- { metric: 'Model', value: modelInfo.modelName },
1543
- { metric: 'Dimensions', value: String(modelInfo.dimensions) },
1544
- { metric: 'Initial Load', value: loadTime + "ms" },
1545
- { metric: 'Warm Embed', value: runTest ? "~2-3ms" : 'Skipped' },
1546
- { metric: 'Total Warmup', value: totalTime + "ms" },
1547
- { metric: 'Status', value: output.success('Ready') },
1548
- ]
1549
- });
1550
- output.writeln();
1551
- output.writeln(output.dim('Model is now cached for fast subsequent embeddings'));
1552
- return [2 /*return*/, { success: true, data: { loadTime: loadTime, totalTime: totalTime, dimensions: modelInfo.dimensions } }];
1553
- case 9:
1554
- error_8 = _b.sent();
1555
- spinner.fail('Warmup failed');
1556
- output.printError(error_8 instanceof Error ? error_8.message : String(error_8));
1557
- return [2 /*return*/, { success: false, exitCode: 1 }];
1558
- case 10: return [2 /*return*/];
1333
+ }
1334
+ const avgWarmEmbed = embedTimes.slice(1).reduce((a, b) => a + b, 0) / (embedTimes.length - 1);
1335
+ testSpinner.succeed(`Warmup complete: ${avgWarmEmbed.toFixed(1)}ms avg (warm)`);
1559
1336
  }
1560
- });
1561
- }); }
1337
+ const totalTime = Date.now() - overallStart;
1338
+ output.writeln();
1339
+ output.printTable({
1340
+ columns: [
1341
+ { key: 'metric', header: 'Metric', width: 22 },
1342
+ { key: 'value', header: 'Value', width: 25 },
1343
+ ],
1344
+ data: [
1345
+ { metric: 'Model', value: modelInfo.modelName },
1346
+ { metric: 'Dimensions', value: String(modelInfo.dimensions) },
1347
+ { metric: 'Initial Load', value: `${loadTime}ms` },
1348
+ { metric: 'Warm Embed', value: runTest ? `~2-3ms` : 'Skipped' },
1349
+ { metric: 'Total Warmup', value: `${totalTime}ms` },
1350
+ { metric: 'Status', value: output.success('Ready') },
1351
+ ],
1352
+ });
1353
+ output.writeln();
1354
+ output.writeln(output.dim('Model is now cached for fast subsequent embeddings'));
1355
+ return { success: true, data: { loadTime, totalTime, dimensions: modelInfo.dimensions } };
1356
+ }
1357
+ catch (error) {
1358
+ spinner.fail('Warmup failed');
1359
+ output.printError(error instanceof Error ? error.message : String(error));
1360
+ return { success: false, exitCode: 1 };
1361
+ }
1362
+ },
1562
1363
  };
1563
1364
  // Benchmark subcommand - Performance testing
1564
- var benchmarkCommand = {
1365
+ const benchmarkCommand = {
1565
1366
  name: 'benchmark',
1566
1367
  description: 'Run embedding performance benchmarks',
1567
1368
  options: [
1568
- { name: 'iterations', short: 'n', type: 'number', description: 'Number of iterations', "default": '10' },
1569
- { name: 'batch-size', short: 'b', type: 'number', description: 'Batch size for batch test', "default": '5' },
1570
- { name: 'full', short: 'f', type: 'boolean', description: 'Run full benchmark suite', "default": 'false' },
1369
+ { name: 'iterations', short: 'n', type: 'number', description: 'Number of iterations', default: '10' },
1370
+ { name: 'batch-size', short: 'b', type: 'number', description: 'Batch size for batch test', default: '5' },
1371
+ { name: 'full', short: 'f', type: 'boolean', description: 'Run full benchmark suite', default: 'false' },
1571
1372
  ],
1572
1373
  examples: [
1573
1374
  { command: 'claude-flow embeddings benchmark', description: 'Quick benchmark' },
1574
1375
  { command: 'claude-flow embeddings benchmark -n 50 -f', description: 'Full benchmark' },
1575
1376
  ],
1576
- action: function (ctx) { return __awaiter(void 0, void 0, Promise, function () {
1577
- var iterations, batchSize, full, results, _a, loadEmbeddingModel, generateEmbedding_1, coldStart, modelInfo, coldTime, firstStart, firstTime, warmTimes, i, start, avgWarm, minWarm, maxWarm, batchTexts, seqStart, _i, batchTexts_1, text, seqTime, parallelTexts, parallelStart, parallelTime, speedup, cacheText, cacheTimes, i, start, avgCache, emb1, emb2, simTimes, i, start, avgSim, error_9;
1578
- return __generator(this, function (_b) {
1579
- switch (_b.label) {
1580
- case 0:
1581
- iterations = parseInt(ctx.flags.iterations || '10', 10);
1582
- batchSize = parseInt(ctx.flags['batch-size'] || '5', 10);
1583
- full = ctx.flags.full === true;
1584
- output.writeln();
1585
- output.writeln(output.bold('Embedding Performance Benchmark'));
1586
- output.writeln(output.dim('─'.repeat(60)));
1587
- results = [];
1588
- _b.label = 1;
1589
- case 1:
1590
- _b.trys.push([1, 22, , 23]);
1591
- return [4 /*yield*/, import('../memory/memory-initializer.js')];
1592
- case 2:
1593
- _a = _b.sent(), loadEmbeddingModel = _a.loadEmbeddingModel, generateEmbedding_1 = _a.generateEmbedding;
1594
- // Test 1: Cold start (model load)
1595
- output.writeln(output.dim('Testing cold start...'));
1596
- coldStart = Date.now();
1597
- return [4 /*yield*/, loadEmbeddingModel({ verbose: false })];
1598
- case 3:
1599
- modelInfo = _b.sent();
1600
- coldTime = Date.now() - coldStart;
1601
- results.push({
1602
- test: 'Cold Start (model load)',
1603
- time: coldTime + "ms",
1604
- opsPerSec: '-'
1605
- });
1606
- firstStart = Date.now();
1607
- return [4 /*yield*/, generateEmbedding_1('First embedding test')];
1608
- case 4:
1609
- _b.sent();
1610
- firstTime = Date.now() - firstStart;
1611
- results.push({
1612
- test: 'First Embed',
1613
- time: firstTime + "ms",
1614
- opsPerSec: "" + (1000 / firstTime).toFixed(1)
1615
- });
1616
- // Test 3: Warm embeds (multiple iterations)
1617
- output.writeln(output.dim("Testing " + iterations + " warm embeds..."));
1618
- warmTimes = [];
1619
- i = 0;
1620
- _b.label = 5;
1621
- case 5:
1622
- if (!(i < iterations)) return [3 /*break*/, 8];
1623
- start = Date.now();
1624
- return [4 /*yield*/, generateEmbedding_1("Warm embedding test " + i + " with some content")];
1625
- case 6:
1626
- _b.sent();
1627
- warmTimes.push(Date.now() - start);
1628
- _b.label = 7;
1629
- case 7:
1630
- i++;
1631
- return [3 /*break*/, 5];
1632
- case 8:
1633
- avgWarm = warmTimes.reduce(function (a, b) { return a + b; }, 0) / warmTimes.length;
1634
- minWarm = Math.min.apply(Math, warmTimes);
1635
- maxWarm = Math.max.apply(Math, warmTimes);
1636
- results.push({
1637
- test: "Warm Embed (n=" + iterations + ")",
1638
- time: avgWarm.toFixed(1) + "ms avg (" + minWarm + "-" + maxWarm + ")",
1639
- opsPerSec: "" + (1000 / avgWarm).toFixed(1)
1640
- });
1641
- // Test 4a: Sequential batch embed
1642
- output.writeln(output.dim("Testing sequential batch of " + batchSize + "..."));
1643
- batchTexts = Array.from({ length: batchSize }, function (_, i) { return "Batch text " + (i + 1) + " for testing"; });
1644
- seqStart = Date.now();
1645
- _i = 0, batchTexts_1 = batchTexts;
1646
- _b.label = 9;
1647
- case 9:
1648
- if (!(_i < batchTexts_1.length)) return [3 /*break*/, 12];
1649
- text = batchTexts_1[_i];
1650
- return [4 /*yield*/, generateEmbedding_1(text)];
1651
- case 10:
1652
- _b.sent();
1653
- _b.label = 11;
1654
- case 11:
1655
- _i++;
1656
- return [3 /*break*/, 9];
1657
- case 12:
1658
- seqTime = Date.now() - seqStart;
1659
- results.push({
1660
- test: "Sequential (n=" + batchSize + ")",
1661
- time: seqTime + "ms total (" + (seqTime / batchSize).toFixed(1) + "ms/item)",
1662
- opsPerSec: "" + (1000 * batchSize / seqTime).toFixed(1)
1663
- });
1664
- // Test 4b: Parallel batch embed
1665
- // Note: Local ONNX is CPU-bound so parallelism has limited benefit
1666
- // Parallelism gives 2-4x speedup for API-based providers (OpenAI, etc.)
1667
- output.writeln(output.dim("Testing parallel batch of " + batchSize + "..."));
1668
- parallelTexts = Array.from({ length: batchSize }, function (_, i) { return "Parallel batch text " + (i + 1); });
1669
- parallelStart = Date.now();
1670
- return [4 /*yield*/, Promise.all(parallelTexts.map(function (text) { return generateEmbedding_1(text); }))];
1671
- case 13:
1672
- _b.sent();
1673
- parallelTime = Date.now() - parallelStart;
1674
- speedup = seqTime / parallelTime;
1675
- results.push({
1676
- test: "Parallel (n=" + batchSize + ")",
1677
- time: parallelTime + "ms total (" + (parallelTime / batchSize).toFixed(1) + "ms/item)",
1678
- opsPerSec: (1000 * batchSize / parallelTime).toFixed(1) + " (" + speedup.toFixed(2) + "x vs seq)"
1679
- });
1680
- if (!full) return [3 /*break*/, 21];
1681
- output.writeln(output.dim('Testing cache hits...'));
1682
- cacheText = 'Cached embedding test text';
1683
- return [4 /*yield*/, generateEmbedding_1(cacheText)];
1684
- case 14:
1685
- _b.sent(); // Prime cache
1686
- cacheTimes = [];
1687
- i = 0;
1688
- _b.label = 15;
1689
- case 15:
1690
- if (!(i < 10)) return [3 /*break*/, 18];
1691
- start = Date.now();
1692
- return [4 /*yield*/, generateEmbedding_1(cacheText)];
1693
- case 16:
1694
- _b.sent();
1377
+ action: async (ctx) => {
1378
+ const iterations = parseInt(ctx.flags.iterations || '10', 10);
1379
+ const batchSize = parseInt(ctx.flags['batch-size'] || '5', 10);
1380
+ const full = ctx.flags.full === true;
1381
+ output.writeln();
1382
+ output.writeln(output.bold('Embedding Performance Benchmark'));
1383
+ output.writeln(output.dim(''.repeat(60)));
1384
+ const results = [];
1385
+ try {
1386
+ const { loadEmbeddingModel, generateEmbedding } = await import('../memory/memory-initializer.js');
1387
+ // Test 1: Cold start (model load)
1388
+ output.writeln(output.dim('Testing cold start...'));
1389
+ const coldStart = Date.now();
1390
+ const modelInfo = await loadEmbeddingModel({ verbose: false });
1391
+ const coldTime = Date.now() - coldStart;
1392
+ results.push({
1393
+ test: 'Cold Start (model load)',
1394
+ time: `${coldTime}ms`,
1395
+ opsPerSec: '-'
1396
+ });
1397
+ // Test 2: First embed
1398
+ const firstStart = Date.now();
1399
+ await generateEmbedding('First embedding test');
1400
+ const firstTime = Date.now() - firstStart;
1401
+ results.push({
1402
+ test: 'First Embed',
1403
+ time: `${firstTime}ms`,
1404
+ opsPerSec: `${(1000 / firstTime).toFixed(1)}`
1405
+ });
1406
+ // Test 3: Warm embeds (multiple iterations)
1407
+ output.writeln(output.dim(`Testing ${iterations} warm embeds...`));
1408
+ const warmTimes = [];
1409
+ for (let i = 0; i < iterations; i++) {
1410
+ const start = Date.now();
1411
+ await generateEmbedding(`Warm embedding test ${i} with some content`);
1412
+ warmTimes.push(Date.now() - start);
1413
+ }
1414
+ const avgWarm = warmTimes.reduce((a, b) => a + b, 0) / warmTimes.length;
1415
+ const minWarm = Math.min(...warmTimes);
1416
+ const maxWarm = Math.max(...warmTimes);
1417
+ results.push({
1418
+ test: `Warm Embed (n=${iterations})`,
1419
+ time: `${avgWarm.toFixed(1)}ms avg (${minWarm}-${maxWarm})`,
1420
+ opsPerSec: `${(1000 / avgWarm).toFixed(1)}`
1421
+ });
1422
+ // Test 4a: Sequential batch embed
1423
+ output.writeln(output.dim(`Testing sequential batch of ${batchSize}...`));
1424
+ const batchTexts = Array.from({ length: batchSize }, (_, i) => `Batch text ${i + 1} for testing`);
1425
+ const seqStart = Date.now();
1426
+ for (const text of batchTexts) {
1427
+ await generateEmbedding(text);
1428
+ }
1429
+ const seqTime = Date.now() - seqStart;
1430
+ results.push({
1431
+ test: `Sequential (n=${batchSize})`,
1432
+ time: `${seqTime}ms total (${(seqTime / batchSize).toFixed(1)}ms/item)`,
1433
+ opsPerSec: `${(1000 * batchSize / seqTime).toFixed(1)}`
1434
+ });
1435
+ // Test 4b: Parallel batch embed
1436
+ // Note: Local ONNX is CPU-bound so parallelism has limited benefit
1437
+ // Parallelism gives 2-4x speedup for API-based providers (OpenAI, etc.)
1438
+ output.writeln(output.dim(`Testing parallel batch of ${batchSize}...`));
1439
+ const parallelTexts = Array.from({ length: batchSize }, (_, i) => `Parallel batch text ${i + 1}`);
1440
+ const parallelStart = Date.now();
1441
+ await Promise.all(parallelTexts.map(text => generateEmbedding(text)));
1442
+ const parallelTime = Date.now() - parallelStart;
1443
+ const speedup = seqTime / parallelTime;
1444
+ results.push({
1445
+ test: `Parallel (n=${batchSize})`,
1446
+ time: `${parallelTime}ms total (${(parallelTime / batchSize).toFixed(1)}ms/item)`,
1447
+ opsPerSec: `${(1000 * batchSize / parallelTime).toFixed(1)} (${speedup.toFixed(2)}x vs seq)`
1448
+ });
1449
+ // Test 5: Cache hit (same text)
1450
+ if (full) {
1451
+ output.writeln(output.dim('Testing cache hits...'));
1452
+ const cacheText = 'Cached embedding test text';
1453
+ await generateEmbedding(cacheText); // Prime cache
1454
+ const cacheTimes = [];
1455
+ for (let i = 0; i < 10; i++) {
1456
+ const start = Date.now();
1457
+ await generateEmbedding(cacheText);
1695
1458
  cacheTimes.push(Date.now() - start);
1696
- _b.label = 17;
1697
- case 17:
1698
- i++;
1699
- return [3 /*break*/, 15];
1700
- case 18:
1701
- avgCache = cacheTimes.reduce(function (a, b) { return a + b; }, 0) / cacheTimes.length;
1702
- results.push({
1703
- test: 'Cache Hit',
1704
- time: avgCache.toFixed(2) + "ms avg",
1705
- opsPerSec: "" + (1000 / avgCache).toFixed(0)
1706
- });
1707
- // Test 6: Similarity computation
1708
- output.writeln(output.dim('Testing similarity...'));
1709
- return [4 /*yield*/, generateEmbedding_1('Hello world')];
1710
- case 19:
1711
- emb1 = (_b.sent()).embedding;
1712
- return [4 /*yield*/, generateEmbedding_1('Hi there')];
1713
- case 20:
1714
- emb2 = (_b.sent()).embedding;
1715
- simTimes = [];
1716
- for (i = 0; i < 1000; i++) {
1717
- start = performance.now();
1718
- cosineSimilarity(emb1, emb2);
1719
- simTimes.push(performance.now() - start);
1720
- }
1721
- avgSim = simTimes.reduce(function (a, b) { return a + b; }, 0) / simTimes.length;
1722
- results.push({
1723
- test: 'Cosine Similarity',
1724
- time: (avgSim * 1000).toFixed(2) + "\u03BCs",
1725
- opsPerSec: "" + (1000000 / (avgSim * 1000)).toFixed(0)
1726
- });
1727
- _b.label = 21;
1728
- case 21:
1729
- output.writeln();
1730
- output.printTable({
1731
- columns: [
1732
- { key: 'test', header: 'Test', width: 28 },
1733
- { key: 'time', header: 'Time', width: 32 },
1734
- { key: 'opsPerSec', header: 'Ops/sec', width: 12 },
1735
- ],
1736
- data: results
1737
- });
1738
- output.writeln();
1739
- output.writeln(output.bold('Summary:'));
1740
- output.writeln(" Model: " + modelInfo.modelName + " (" + modelInfo.dimensions + "-dim)");
1741
- output.writeln(" Cold start: " + coldTime + "ms");
1742
- output.writeln(" Warm embed: ~" + avgWarm.toFixed(1) + "ms");
1743
- output.writeln(" Throughput: ~" + (1000 / avgWarm).toFixed(0) + " embeds/sec");
1744
- return [2 /*return*/, { success: true, data: { results: results, avgWarm: avgWarm, coldTime: coldTime } }];
1745
- case 22:
1746
- error_9 = _b.sent();
1747
- output.printError(error_9 instanceof Error ? error_9.message : String(error_9));
1748
- return [2 /*return*/, { success: false, exitCode: 1 }];
1749
- case 23: return [2 /*return*/];
1459
+ }
1460
+ const avgCache = cacheTimes.reduce((a, b) => a + b, 0) / cacheTimes.length;
1461
+ results.push({
1462
+ test: 'Cache Hit',
1463
+ time: `${avgCache.toFixed(2)}ms avg`,
1464
+ opsPerSec: `${(1000 / avgCache).toFixed(0)}`
1465
+ });
1466
+ // Test 6: Similarity computation
1467
+ output.writeln(output.dim('Testing similarity...'));
1468
+ const emb1 = (await generateEmbedding('Hello world')).embedding;
1469
+ const emb2 = (await generateEmbedding('Hi there')).embedding;
1470
+ const simTimes = [];
1471
+ for (let i = 0; i < 1000; i++) {
1472
+ const start = performance.now();
1473
+ cosineSimilarity(emb1, emb2);
1474
+ simTimes.push(performance.now() - start);
1475
+ }
1476
+ const avgSim = simTimes.reduce((a, b) => a + b, 0) / simTimes.length;
1477
+ results.push({
1478
+ test: 'Cosine Similarity',
1479
+ time: `${(avgSim * 1000).toFixed(2)}μs`,
1480
+ opsPerSec: `${(1000000 / (avgSim * 1000)).toFixed(0)}`
1481
+ });
1750
1482
  }
1751
- });
1752
- }); }
1483
+ output.writeln();
1484
+ output.printTable({
1485
+ columns: [
1486
+ { key: 'test', header: 'Test', width: 28 },
1487
+ { key: 'time', header: 'Time', width: 32 },
1488
+ { key: 'opsPerSec', header: 'Ops/sec', width: 12 },
1489
+ ],
1490
+ data: results,
1491
+ });
1492
+ output.writeln();
1493
+ output.writeln(output.bold('Summary:'));
1494
+ output.writeln(` Model: ${modelInfo.modelName} (${modelInfo.dimensions}-dim)`);
1495
+ output.writeln(` Cold start: ${coldTime}ms`);
1496
+ output.writeln(` Warm embed: ~${avgWarm.toFixed(1)}ms`);
1497
+ output.writeln(` Throughput: ~${(1000 / avgWarm).toFixed(0)} embeds/sec`);
1498
+ return { success: true, data: { results, avgWarm, coldTime } };
1499
+ }
1500
+ catch (error) {
1501
+ output.printError(error instanceof Error ? error.message : String(error));
1502
+ return { success: false, exitCode: 1 };
1503
+ }
1504
+ },
1753
1505
  };
1754
1506
  // Main embeddings command
1755
- export var embeddingsCommand = {
1507
+ export const embeddingsCommand = {
1756
1508
  name: 'embeddings',
1757
1509
  description: 'Vector embeddings, semantic search, similarity operations',
1758
1510
  aliases: ['embed'],
@@ -1782,45 +1534,43 @@ export var embeddingsCommand = {
1782
1534
  { command: 'claude-flow embeddings hyperbolic -a convert', description: 'Hyperbolic space' },
1783
1535
  { command: 'claude-flow embed neural -f drift', description: 'Neural substrate' },
1784
1536
  ],
1785
- action: function () { return __awaiter(void 0, void 0, Promise, function () {
1786
- return __generator(this, function (_a) {
1787
- output.writeln();
1788
- output.writeln(output.bold('RuFlo Embeddings'));
1789
- output.writeln(output.dim('Vector embeddings and semantic search'));
1790
- output.writeln();
1791
- output.writeln('Core Commands:');
1792
- output.printList([
1793
- 'init - Initialize ONNX models and hyperbolic config',
1794
- 'generate - Generate embeddings for text',
1795
- 'search - Semantic similarity search',
1796
- 'compare - Compare similarity between texts',
1797
- 'collections - Manage embedding collections',
1798
- 'index - Manage HNSW indexes',
1799
- 'providers - List available providers',
1800
- ]);
1801
- output.writeln();
1802
- output.writeln('Advanced Features:');
1803
- output.printList([
1804
- 'chunk - Document chunking with overlap',
1805
- 'normalize - L2/L1/minmax/zscore normalization',
1806
- 'hyperbolic - Poincaré ball embeddings',
1807
- 'neural - Neural substrate (drift, memory, swarm)',
1808
- 'models - List/download ONNX models',
1809
- 'cache - Manage persistent SQLite cache',
1810
- ]);
1811
- output.writeln();
1812
- output.writeln('Performance:');
1813
- output.printList([
1814
- 'HNSW indexing: 150x-12,500x faster search',
1815
- 'Agentic Flow: 75x faster than Transformers.js (~3ms)',
1816
- 'Persistent cache: SQLite-backed, survives restarts',
1817
- 'Hyperbolic: Better hierarchical representation',
1818
- ]);
1819
- output.writeln();
1820
- output.writeln(output.dim('Created with ❤️ by ruv.io'));
1821
- return [2 /*return*/, { success: true }];
1822
- });
1823
- }); }
1537
+ action: async () => {
1538
+ output.writeln();
1539
+ output.writeln(output.bold('RuFlo Embeddings'));
1540
+ output.writeln(output.dim('Vector embeddings and semantic search'));
1541
+ output.writeln();
1542
+ output.writeln('Core Commands:');
1543
+ output.printList([
1544
+ 'init - Initialize ONNX models and hyperbolic config',
1545
+ 'generate - Generate embeddings for text',
1546
+ 'search - Semantic similarity search',
1547
+ 'compare - Compare similarity between texts',
1548
+ 'collections - Manage embedding collections',
1549
+ 'index - Manage HNSW indexes',
1550
+ 'providers - List available providers',
1551
+ ]);
1552
+ output.writeln();
1553
+ output.writeln('Advanced Features:');
1554
+ output.printList([
1555
+ 'chunk - Document chunking with overlap',
1556
+ 'normalize - L2/L1/minmax/zscore normalization',
1557
+ 'hyperbolic - Poincaré ball embeddings',
1558
+ 'neural - Neural substrate (drift, memory, swarm)',
1559
+ 'models - List/download ONNX models',
1560
+ 'cache - Manage persistent SQLite cache',
1561
+ ]);
1562
+ output.writeln();
1563
+ output.writeln('Performance:');
1564
+ output.printList([
1565
+ 'HNSW indexing: 150x-12,500x faster search',
1566
+ 'Agentic Flow: 75x faster than Transformers.js (~3ms)',
1567
+ 'Persistent cache: SQLite-backed, survives restarts',
1568
+ 'Hyperbolic: Better hierarchical representation',
1569
+ ]);
1570
+ output.writeln();
1571
+ output.writeln(output.dim('Created with ❤️ by ruv.io'));
1572
+ return { success: true };
1573
+ },
1824
1574
  };
1825
1575
  export default embeddingsCommand;
1826
1576
  //# sourceMappingURL=embeddings.js.map