agentic-flow 2.0.0-alpha → 2.0.1-alpha

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (81) hide show
  1. package/README.md +320 -23
  2. package/agentic-flow/.claude/agents/base-template-generator.md +229 -3
  3. package/agentic-flow/.claude/agents/core/coder.md +212 -7
  4. package/agentic-flow/.claude/agents/core/planner.md +228 -7
  5. package/agentic-flow/.claude/agents/core/researcher.md +205 -10
  6. package/agentic-flow/.claude/agents/core/reviewer.md +216 -5
  7. package/agentic-flow/.claude/agents/core/tester.md +213 -3
  8. package/agentic-flow/.claude/agents/data/ml/data-ml-model.md +256 -5
  9. package/agentic-flow/.claude/agents/development/backend/dev-backend-api.md +209 -6
  10. package/agentic-flow/.claude/agents/documentation/api-docs/docs-api-openapi.md +185 -5
  11. package/agentic-flow/.claude/agents/github/code-review-swarm.md +307 -468
  12. package/agentic-flow/.claude/agents/github/issue-tracker.md +270 -13
  13. package/agentic-flow/.claude/agents/github/pr-manager.md +259 -12
  14. package/agentic-flow/.claude/agents/github/release-manager.md +253 -15
  15. package/agentic-flow/.claude/agents/github/workflow-automation.md +277 -9
  16. package/agentic-flow/.claude/agents/sona/sona-learning-optimizer.md +496 -0
  17. package/agentic-flow/.claude/agents/sparc/architecture.md +231 -4
  18. package/agentic-flow/.claude/agents/sparc/pseudocode.md +206 -4
  19. package/agentic-flow/.claude/agents/sparc/refinement.md +283 -6
  20. package/agentic-flow/.claude/agents/sparc/specification.md +205 -3
  21. package/agentic-flow/.claude/agents/swarm/adaptive-coordinator.md +731 -0
  22. package/agentic-flow/.claude/agents/swarm/hierarchical-coordinator.md +455 -1
  23. package/agentic-flow/.claude/agents/swarm/mesh-coordinator.md +571 -0
  24. package/agentic-flow/.claude/agents/templates/sparc-coordinator.md +336 -5
  25. package/agentic-flow/dist/cli/commands/sona-train.d.ts.map +1 -0
  26. package/agentic-flow/dist/cli/commands/sona-train.js +295 -0
  27. package/agentic-flow/dist/cli/commands/sona-train.js.map +1 -0
  28. package/agentic-flow/dist/cli/commands/sona.d.ts.map +1 -0
  29. package/agentic-flow/dist/cli/commands/sona.js +290 -0
  30. package/agentic-flow/dist/cli/commands/sona.js.map +1 -0
  31. package/agentic-flow/dist/core/agentdb-fast.d.ts.map +1 -0
  32. package/agentic-flow/dist/core/agentdb-fast.js +299 -0
  33. package/agentic-flow/dist/core/agentdb-fast.js.map +1 -0
  34. package/agentic-flow/dist/core/attention-fallbacks.d.ts.map +1 -0
  35. package/agentic-flow/dist/core/attention-fallbacks.js +321 -0
  36. package/agentic-flow/dist/core/attention-fallbacks.js.map +1 -0
  37. package/agentic-flow/dist/core/embedding-service.d.ts.map +1 -0
  38. package/agentic-flow/dist/core/embedding-service.js +370 -0
  39. package/agentic-flow/dist/core/embedding-service.js.map +1 -0
  40. package/agentic-flow/dist/core/gnn-wrapper.d.ts.map +1 -0
  41. package/agentic-flow/dist/core/gnn-wrapper.js +236 -0
  42. package/agentic-flow/dist/core/gnn-wrapper.js.map +1 -0
  43. package/agentic-flow/dist/core/index.d.ts.map +1 -1
  44. package/agentic-flow/dist/core/index.js +80 -3
  45. package/agentic-flow/dist/core/index.js.map +1 -1
  46. package/agentic-flow/dist/mcp/claudeFlowSdkServer.d.ts.map +1 -1
  47. package/agentic-flow/dist/mcp/claudeFlowSdkServer.js +109 -0
  48. package/agentic-flow/dist/mcp/claudeFlowSdkServer.js.map +1 -1
  49. package/agentic-flow/dist/mcp/tools/agent-booster-tools.d.ts.map +1 -0
  50. package/agentic-flow/dist/mcp/tools/agent-booster-tools.js +262 -0
  51. package/agentic-flow/dist/mcp/tools/agent-booster-tools.js.map +1 -0
  52. package/agentic-flow/dist/mcp/tools/sona-tools.d.ts.map +1 -0
  53. package/agentic-flow/dist/mcp/tools/sona-tools.js +560 -0
  54. package/agentic-flow/dist/mcp/tools/sona-tools.js.map +1 -0
  55. package/agentic-flow/dist/optimizations/agent-booster-migration.d.ts.map +1 -0
  56. package/agentic-flow/dist/optimizations/agent-booster-migration.js +323 -0
  57. package/agentic-flow/dist/optimizations/agent-booster-migration.js.map +1 -0
  58. package/agentic-flow/dist/optimizations/configuration-tuning.d.ts.map +1 -0
  59. package/agentic-flow/dist/optimizations/configuration-tuning.js +422 -0
  60. package/agentic-flow/dist/optimizations/configuration-tuning.js.map +1 -0
  61. package/agentic-flow/dist/optimizations/ruvector-backend.d.ts.map +1 -0
  62. package/agentic-flow/dist/optimizations/ruvector-backend.js +464 -0
  63. package/agentic-flow/dist/optimizations/ruvector-backend.js.map +1 -0
  64. package/agentic-flow/dist/services/embedding-service.d.ts.map +1 -0
  65. package/agentic-flow/dist/services/embedding-service.js +367 -0
  66. package/agentic-flow/dist/services/embedding-service.js.map +1 -0
  67. package/agentic-flow/dist/services/sona-agent-training.d.ts.map +1 -0
  68. package/agentic-flow/dist/services/sona-agent-training.js +382 -0
  69. package/agentic-flow/dist/services/sona-agent-training.js.map +1 -0
  70. package/agentic-flow/dist/services/sona-agentdb-integration.d.ts.map +1 -0
  71. package/agentic-flow/dist/services/sona-agentdb-integration.js +346 -0
  72. package/agentic-flow/dist/services/sona-agentdb-integration.js.map +1 -0
  73. package/agentic-flow/dist/services/sona-service.d.ts.map +1 -0
  74. package/agentic-flow/dist/services/sona-service.js +448 -0
  75. package/agentic-flow/dist/services/sona-service.js.map +1 -0
  76. package/agentic-flow/dist/services/sona-types.d.ts.map +1 -0
  77. package/agentic-flow/dist/services/sona-types.js +59 -0
  78. package/agentic-flow/dist/services/sona-types.js.map +1 -0
  79. package/docs/README.md +27 -2
  80. package/package.json +12 -2
  81. package/docs/AGENTIC_JUJUTSU_QUICKSTART.md +0 -491
@@ -0,0 +1 @@
1
+ {"version":3,"file":"attention-fallbacks.js","sourceRoot":"","sources":["../../src/core/attention-fallbacks.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AASH;;;GAGG;AACH,MAAM,UAAU,yBAAyB,CACvC,KAAe,EACf,GAAa,EACb,KAAe,EACf,IAAe;IAEf,MAAM,EAAE,GAAG,KAAK,CAAC,MAAM,CAAC;IAExB,+CAA+C;IAC/C,IAAI,KAAK,GAAG,CAAC,CAAC;IACd,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,EAAE,EAAE,CAAC,EAAE,EAAE,CAAC;QAC5B,KAAK,IAAI,KAAK,CAAC,CAAC,CAAC,GAAG,GAAG,CAAC,CAAC,CAAC,CAAC;IAC7B,CAAC;IACD,KAAK,IAAI,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;IAEvB,yBAAyB;IACzB,IAAI,IAAI,IAAI,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC;QAC1B,KAAK,GAAG,CAAC,QAAQ,CAAC;IACpB,CAAC;IAED,iCAAiC;IACjC,MAAM,QAAQ,GAAG,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;IACjC,MAAM,MAAM,GAAG,QAAQ,CAAC,CAAC,iCAAiC;IAE1D,iBAAiB;IACjB,MAAM,MAAM,GAAG,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,GAAG,MAAM,CAAC,CAAC;IAE1C,OAAO,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC,MAAM,CAAC,EAAE,CAAC;AACvC,CAAC;AAED;;;;GAIG;AACH,MAAM,OAAO,kBAAkB;IACrB,QAAQ,CAAS;IACjB,SAAS,CAAS;IAClB,OAAO,CAAS;IAChB,YAAY,CAAe;IAC3B,UAAU,CAAe;IACzB,YAAY,CAAe;IAC3B,aAAa,CAAa;IAElC,YAAY,MAAuB;QACjC,IAAI,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,IAAI,CAAC,CAAC;QACrC,IAAI,CAAC,SAAS,GAAG,MAAM,CAAC,SAAS,CAAC;QAClC,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,QAAQ,CAAC,CAAC;QAE1D,8BAA8B;QAC9B,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,iBAAiB,EAAE,CAAC;QAC7C,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,iBAAiB,EAAE,CAAC;QAC3C,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,iBAAiB,EAAE,CAAC;QAC7C,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,uBAAuB,EAAE,CAAC;IACtD,CAAC;IAEO,iBAAiB;QACvB,MAAM,OAAO,GAAiB,EAAE,CAAC;QACjC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,QAAQ,EAAE,CAAC,EAAE,EAAE,CAAC;YACvC,MAAM,WAAW,GAAe,EAAE,CAAC;YACnC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,OAAO,EAAE,CAAC,EAAE,EAAE,CAAC;gBACtC,MAAM,GAAG,GAAa,EAAE,CAAC;gBACzB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,SAAS,EAAE,CAAC,EAAE,EAAE,CAAC;oBACxC,GAAG,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,GAAG,CAAC,GAAG,GAAG,CAAC,CAAC;gBACxC,CAAC;gBACD,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;YACxB,CAAC;YACD,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;QAC5B,CAAC;QACD,OAAO,OAAO,CAAC;IACjB,CAAC;IAEO,uBAAuB;QAC7B,MAAM,OAAO,GAAe,EAAE,CAAC;QAC/B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,SAAS,EAAE,CAAC,EAAE,EAAE,CAAC;YACxC,MAAM,GAAG,GAAa,EAAE,CAAC;YACzB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,SAAS,EAAE,CAAC,EAAE,EAAE,CAAC;gBACxC,GAAG,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,GAAG,CAAC,GAAG,GAAG,CAAC,CAAC;YACxC,CAAC;YACD,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;QACpB,CAAC;QACD,OAAO,OAAO,CAAC;IACjB,CAAC;IAED,OAAO,CACL,KAAe,EACf,GAAa,EACb,KAAe,EACf,IAAe;QAEf,MAAM,WAAW,GAAe,EAAE,CAAC;QACnC,MAAM,UAAU,GAAe,EAAE,CAAC;QAElC,oBAAoB;QACpB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,QAAQ,EAAE,CAAC,EAAE,EAAE,CAAC;YACvC,4BAA4B;YAC5B,MAAM,CAAC,GAAG,IAAI,CAAC,OAAO,CAAC,KAAK,EAAE,IAAI,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC;YACpD,MAAM,CAAC,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC;YAChD,MAAM,CAAC,GAAG,IAAI,CAAC,OAAO,CAAC,KAAK,EAAE,IAAI,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC;YAEpD,0BAA0B;YAC1B,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,GAAG,yBAAyB,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,IAAI,CAAC,CAAC;YAErE,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;YACzB,UAAU,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QAC3B,CAAC;QAED,oBAAoB;QACpB,MAAM,YAAY,GAAG,WAAW,CAAC,IAAI,EAAE,CAAC;QAExC,oBAAoB;QACpB,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,YAAY,EAAE,IAAI,CAAC,aAAa,CAAC,CAAC;QAE9D,OAAO,EAAE,MAAM,EAAE,gBAAgB,EAAE,UAAU,EAAE,CAAC;IAClD,CAAC;IAEO,OAAO,CAAC,KAAe,EAAE,OAAmB;QAClD,MAAM,MAAM,GAAa,EAAE,CAAC;QAC5B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;YACxC,IAAI,GAAG,GAAG,CAAC,CAAC;YACZ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;gBACtC,GAAG,IAAI,KAAK,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;YAClC,CAAC;YACD,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;QACnB,CAAC;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;CACF;AAED;;;;;GAKG;AACH,MAAM,OAAO,cAAc;IACjB,SAAS,CAAS;IAClB,SAAS,CAAS;IAE1B,YAAY,MAAuB;QACjC,IAAI,CAAC,SAAS,GAAG,MAAM,CAAC,SAAS,CAAC;QAClC,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,GAAG,CAAC,EAAE,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,YAAY;IAC7D,CAAC;IAED,OAAO,CACL,KAAiB,EACjB,GAAe,EACf,KAAiB,EACjB,WAAmB,CAAC;QAEpB,MAAM,MAAM,GAAG,KAAK,CAAC,MAAM,CAAC;QAC5B,MAAM,OAAO,GAAG,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAC;QAE1C,MAAM,MAAM,GAAe,EAAE,CAAC;QAC9B,MAAM,eAAe,GAAe,EAAE,CAAC;QAEvC,0CAA0C;QAC1C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,EAAE,CAAC,IAAI,IAAI,CAAC,SAAS,EAAE,CAAC;YAChD,MAAM,QAAQ,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,SAAS,EAAE,MAAM,CAAC,CAAC;YAEtD,KAAK,IAAI,EAAE,GAAG,CAAC,EAAE,EAAE,GAAG,QAAQ,EAAE,EAAE,EAAE,EAAE,CAAC;gBACrC,MAAM,MAAM,GAAa,EAAE,CAAC;gBAC5B,IAAI,QAAQ,GAAG,CAAC,QAAQ,CAAC;gBAEzB,0CAA0C;gBAC1C,KAAK,IAAI,EAAE,GAAG,CAAC,EAAE,EAAE,GAAG,MAAM,EAAE,EAAE,EAAE,EAAE,CAAC;oBACnC,IAAI,KAAK,GAAG,CAAC,CAAC;oBACd,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;wBAC1C,KAAK,IAAI,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;oBACrC,CAAC;oBACD,KAAK,IAAI,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;oBAC5B,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;oBACnB,QAAQ,GAAG,IAAI,CAAC,GAAG,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC;gBACvC,CAAC;gBAED,6BAA6B;gBAC7B,MAAM,SAAS,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,GAAG,QAAQ,CAAC,CAAC,CAAC;gBAC1D,MAAM,MAAM,GAAG,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC;gBACpD,MAAM,OAAO,GAAG,SAAS,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,GAAG,MAAM,CAAC,CAAC;gBAE/C,yBAAyB;gBACzB,MAAM,SAAS,GAAG,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;gBACrD,KAAK,IAAI,EAAE,GAAG,CAAC,EAAE,EAAE,GAAG,MAAM,EAAE,EAAE,EAAE,EAAE,CAAC;oBACnC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;wBAC1C,SAAS,CAAC,CAAC,CAAC,IAAI,OAAO,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;oBAC7C,CAAC;gBACH,CAAC;gBAED,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;gBACvB,eAAe,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;YAChC,CAAC;QACH,CAAC;QAED,OAAO,EAAE,MAAM,EAAE,eAAe,EAAE,CAAC;IACrC,CAAC;CACF;AAED;;;;GAIG;AACH,MAAM,OAAO,eAAe;IAClB,SAAS,CAAS;IAClB,UAAU,CAAwB;IAE1C,YAAY,MAAuB;QACjC,IAAI,CAAC,SAAS,GAAG,MAAM,CAAC,SAAS,CAAC;QAClC,kBAAkB;QAClB,IAAI,CAAC,UAAU,GAAG,CAAC,CAAS,EAAE,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;IACjE,CAAC;IAED,OAAO,CACL,KAAiB,EACjB,GAAe,EACf,KAAiB;QAEjB,MAAM,MAAM,GAAG,KAAK,CAAC,MAAM,CAAC;QAC5B,MAAM,GAAG,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC;QAE5B,oBAAoB;QACpB,MAAM,WAAW,GAAG,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC;QAC3D,MAAM,SAAS,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC;QAEvD,6CAA6C;QAC7C,MAAM,GAAG,GAAe,KAAK,CAAC,IAAI,CAAC,EAAE,MAAM,EAAE,IAAI,CAAC,SAAS,EAAE,EAAE,GAAG,EAAE,CAClE,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CACnB,CAAC;QAEF,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;YAChC,KAAK,IAAI,EAAE,GAAG,CAAC,EAAE,EAAE,GAAG,IAAI,CAAC,SAAS,EAAE,EAAE,EAAE,EAAE,CAAC;gBAC3C,KAAK,IAAI,EAAE,GAAG,CAAC,EAAE,EAAE,GAAG,GAAG,EAAE,EAAE,EAAE,EAAE,CAAC;oBAChC,GAAG,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,IAAI,SAAS,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;gBACjD,CAAC;YACH,CAAC;QACH,CAAC;QAED,oBAAoB;QACpB,MAAM,MAAM,GAAe,EAAE,CAAC;QAC9B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;YAChC,MAAM,GAAG,GAAa,EAAE,CAAC;YACzB,KAAK,IAAI,EAAE,GAAG,CAAC,EAAE,EAAE,GAAG,GAAG,EAAE,EAAE,EAAE,EAAE,CAAC;gBAChC,IAAI,GAAG,GAAG,CAAC,CAAC;gBACZ,KAAK,IAAI,EAAE,GAAG,CAAC,EAAE,EAAE,GAAG,IAAI,CAAC,SAAS,EAAE,EAAE,EAAE,EAAE,CAAC;oBAC3C,GAAG,IAAI,WAAW,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,GAAG,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC;gBAC1C,CAAC;gBACD,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;YAChB,CAAC;YAED,YAAY;YACZ,MAAM,OAAO,GAAG,WAAW,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC;YAC1D,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,GAAG,CAAC,OAAO,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC;QAClD,CAAC;QAED,OAAO,EAAE,MAAM,EAAE,CAAC;IACpB,CAAC;CACF;AAED;;;;GAIG;AACH,MAAM,OAAO,mBAAmB;IACtB,SAAS,CAAS;IAClB,SAAS,CAAS;IAE1B,YAAY,MAAuB;QACjC,IAAI,CAAC,SAAS,GAAG,MAAM,CAAC,SAAS,CAAC;QAClC,IAAI,CAAC,SAAS,GAAG,CAAC,GAAG,CAAC,CAAC,0BAA0B;IACnD,CAAC;IAED,OAAO,CACL,KAAe,EACf,GAAa,EACb,KAAe;QAEf,mCAAmC;QACnC,MAAM,QAAQ,GAAG,IAAI,CAAC,kBAAkB,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC;QAErD,gDAAgD;QAChD,MAAM,MAAM,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC;QAEnC,iBAAiB;QACjB,MAAM,MAAM,GAAG,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,GAAG,MAAM,CAAC,CAAC;QAE1C,OAAO,EAAE,MAAM,EAAE,QAAQ,EAAE,CAAC;IAC9B,CAAC;IAEO,kBAAkB,CAAC,CAAW,EAAE,CAAW;QACjD,kDAAkD;QAClD,IAAI,UAAU,GAAG,CAAC,CAAC;QACnB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;YAClC,MAAM,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;YACzB,UAAU,IAAI,IAAI,GAAG,IAAI,CAAC;QAC5B,CAAC;QAED,MAAM,OAAO,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG,EAAE,CAAC,EAAE,EAAE,CAAC,GAAG,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC;QACrD,MAAM,OAAO,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG,EAAE,CAAC,EAAE,EAAE,CAAC,GAAG,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC;QAErD,MAAM,SAAS,GAAG,UAAU,CAAC;QAC7B,MAAM,WAAW,GAAG,CAAC,CAAC,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,GAAG,OAAO,CAAC,CAAC;QAElD,OAAO,IAAI,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,SAAS,CAAC,GAAG,WAAW,CAAC,CAAC;IACvD,CAAC;CACF;AAED;;;;GAIG;AACH,MAAM,OAAO,YAAY;IACf,OAAO,CAAuB;IAC9B,UAAU,CAAS;IACnB,aAAa,CAAa;IAElC,YAAY,MAAiD;QAC3D,IAAI,CAAC,UAAU,GAAG,MAAM,CAAC,UAAU,IAAI,CAAC,CAAC;QACzC,IAAI,CAAC,OAAO,GAAG,KAAK,CAAC,IAAI,CACvB,EAAE,MAAM,EAAE,IAAI,CAAC,UAAU,EAAE,EAC3B,GAAG,EAAE,CAAC,IAAI,kBAAkB,CAAC,MAAM,CAAC,CACrC,CAAC;QAEF,oCAAoC;QACpC,IAAI,CAAC,aAAa,GAAG,KAAK,CAAC,IAAI,CAAC,EAAE,MAAM,EAAE,IAAI,CAAC,UAAU,EAAE,EAAE,GAAG,EAAE,CAChE,KAAK,CAAC,IAAI,CAAC,EAAE,MAAM,EAAE,MAAM,CAAC,SAAS,EAAE,EAAE,GAAG,EAAE,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,GAAG,CAAC,GAAG,GAAG,CAAC,CAC5E,CAAC;IACJ,CAAC;IAED,OAAO,CACL,KAAe,EACf,GAAa,EACb,KAAe,EACf,OAAe,CAAC;QAEhB,wBAAwB;QACxB,MAAM,YAAY,GAAG,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,OAAO,CAAC,EAAE;YACpD,IAAI,KAAK,GAAG,CAAC,CAAC;YACd,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;gBACtC,KAAK,IAAI,KAAK,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;YACjC,CAAC;YACD,OAAO,KAAK,CAAC;QACf,CAAC,CAAC,CAAC;QAEH,6BAA6B;QAC7B,MAAM,SAAS,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;QACrD,MAAM,MAAM,GAAG,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC;QACpD,MAAM,aAAa,GAAG,SAAS,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,GAAG,MAAM,CAAC,CAAC;QAErD,oBAAoB;QACpB,MAAM,aAAa,GAAG,aAAa;aAChC,GAAG,CAAC,CAAC,MAAM,EAAE,GAAG,EAAE,EAAE,CAAC,CAAC,EAAE,MAAM,EAAE,GAAG,EAAE,CAAC,CAAC;aACvC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,MAAM,CAAC;aACnC,KAAK,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC;QAElB,yCAAyC;QACzC,MAAM,MAAM,GAAG,IAAI,KAAK,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;QAE/C,KAAK,MAAM,EAAE,MAAM,EAAE,GAAG,EAAE,IAAI,aAAa,EAAE,CAAC;YAC5C,MAAM,YAAY,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,EAAE,KAAK,CAAC,CAAC,MAAM,CAAC;YACzE,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;gBACvC,MAAM,CAAC,CAAC,CAAC,IAAI,MAAM,GAAG,YAAY,CAAC,CAAC,CAAC,CAAC;YACxC,CAAC;QACH,CAAC;QAED,OAAO,EAAE,MAAM,EAAE,aAAa,EAAE,CAAC;IACnC,CAAC;CACF;AAED;;GAEG;AACH,MAAM,UAAU,0BAA0B;IACxC,IAAI,CAAC;QACH,MAAM,SAAS,GAAG,OAAO,CAAC,qBAAqB,CAAC,CAAC;QACjD,yBAAyB;QACzB,MAAM,MAAM,GAAG,SAAS,CAAC,cAAc,CACrC,IAAI,YAAY,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EACxB,IAAI,YAAY,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EACxB,IAAI,YAAY,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EACxB,CAAC,CACF,CAAC;QACF,OAAO,IAAI,CAAC;IACd,CAAC;IAAC,MAAM,CAAC;QACP,OAAO,KAAK,CAAC;IACf,CAAC;AACH,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,eAAe,CAC7B,IAA8D,EAC9D,MAAuB;IAEvB,QAAQ,IAAI,EAAE,CAAC;QACb,KAAK,YAAY;YACf,OAAO,IAAI,kBAAkB,CAAC,MAAM,CAAC,CAAC;QACxC,KAAK,OAAO;YACV,OAAO,IAAI,cAAc,CAAC,MAAM,CAAC,CAAC;QACpC,KAAK,QAAQ;YACX,OAAO,IAAI,eAAe,CAAC,MAAM,CAAC,CAAC;QACrC,KAAK,YAAY;YACf,OAAO,IAAI,mBAAmB,CAAC,MAAM,CAAC,CAAC;QACzC,KAAK,KAAK;YACR,OAAO,IAAI,YAAY,CAAC,MAAM,CAAC,CAAC;QAClC;YACE,OAAO,IAAI,kBAAkB,CAAC,MAAM,CAAC,CAAC;IAC1C,CAAC;AACH,CAAC","sourcesContent":["/**\n * Attention Module Fallbacks\n *\n * Since @ruvector/attention is completely broken, provide JavaScript fallbacks\n * Performance will be slower but functionality will work\n */\n\nexport interface AttentionConfig {\n hiddenDim: number;\n numHeads?: number;\n dropoutRate?: number;\n useFlash?: boolean;\n}\n\n/**\n * Scaled Dot-Product Attention\n * The core attention mechanism\n */\nexport function scaledDotProductAttention(\n query: number[],\n key: number[],\n value: number[],\n mask?: number[]\n): { output: number[]; weights: number[] } {\n const dk = query.length;\n\n // Compute attention scores: Q · K^T / sqrt(dk)\n let score = 0;\n for (let i = 0; i < dk; i++) {\n score += query[i] * key[i];\n }\n score /= Math.sqrt(dk);\n\n // Apply mask if provided\n if (mask && mask[0] === 0) {\n score = -Infinity;\n }\n\n // Softmax (single score version)\n const expScore = Math.exp(score);\n const weight = expScore; // Simplified for single K,V pair\n\n // Weighted value\n const output = value.map(v => v * weight);\n\n return { output, weights: [weight] };\n}\n\n/**\n * Multi-Head Attention (JavaScript fallback)\n *\n * Replaces broken @ruvector/attention.multiHeadAttention\n */\nexport class MultiHeadAttention {\n private numHeads: number;\n private hiddenDim: number;\n private headDim: number;\n private queryWeights: number[][][];\n private keyWeights: number[][][];\n private valueWeights: number[][][];\n private outputWeights: number[][];\n\n constructor(config: AttentionConfig) {\n this.numHeads = config.numHeads || 8;\n this.hiddenDim = config.hiddenDim;\n this.headDim = Math.floor(this.hiddenDim / this.numHeads);\n\n // Initialize weights (random)\n this.queryWeights = this.initializeWeights();\n this.keyWeights = this.initializeWeights();\n this.valueWeights = this.initializeWeights();\n this.outputWeights = this.initializeOutputWeights();\n }\n\n private initializeWeights(): number[][][] {\n const weights: number[][][] = [];\n for (let h = 0; h < this.numHeads; h++) {\n const headWeights: number[][] = [];\n for (let i = 0; i < this.headDim; i++) {\n const row: number[] = [];\n for (let j = 0; j < this.hiddenDim; j++) {\n row.push((Math.random() - 0.5) * 0.1);\n }\n headWeights.push(row);\n }\n weights.push(headWeights);\n }\n return weights;\n }\n\n private initializeOutputWeights(): number[][] {\n const weights: number[][] = [];\n for (let i = 0; i < this.hiddenDim; i++) {\n const row: number[] = [];\n for (let j = 0; j < this.hiddenDim; j++) {\n row.push((Math.random() - 0.5) * 0.1);\n }\n weights.push(row);\n }\n return weights;\n }\n\n forward(\n query: number[],\n key: number[],\n value: number[],\n mask?: number[]\n ): { output: number[]; attentionWeights: number[][] } {\n const headOutputs: number[][] = [];\n const allWeights: number[][] = [];\n\n // Process each head\n for (let h = 0; h < this.numHeads; h++) {\n // Project to head dimension\n const q = this.project(query, this.queryWeights[h]);\n const k = this.project(key, this.keyWeights[h]);\n const v = this.project(value, this.valueWeights[h]);\n\n // Attention for this head\n const { output, weights } = scaledDotProductAttention(q, k, v, mask);\n\n headOutputs.push(output);\n allWeights.push(weights);\n }\n\n // Concatenate heads\n const concatenated = headOutputs.flat();\n\n // Output projection\n const output = this.project(concatenated, this.outputWeights);\n\n return { output, attentionWeights: allWeights };\n }\n\n private project(input: number[], weights: number[][]): number[] {\n const output: number[] = [];\n for (let i = 0; i < weights.length; i++) {\n let sum = 0;\n for (let j = 0; j < input.length; j++) {\n sum += input[j] * weights[i][j];\n }\n output.push(sum);\n }\n return output;\n }\n}\n\n/**\n * Flash Attention (optimized fallback)\n *\n * Replaces broken @ruvector/attention.flashAttention\n * Uses tiling/chunking for better memory efficiency\n */\nexport class FlashAttention {\n private hiddenDim: number;\n private blockSize: number;\n\n constructor(config: AttentionConfig) {\n this.hiddenDim = config.hiddenDim;\n this.blockSize = Math.min(64, this.hiddenDim); // Tile size\n }\n\n forward(\n query: number[][],\n key: number[][],\n value: number[][],\n numHeads: number = 8\n ): { output: number[][]; attentionScores: number[][] } {\n const seqLen = query.length;\n const headDim = this.hiddenDim / numHeads;\n\n const output: number[][] = [];\n const attentionScores: number[][] = [];\n\n // Process in blocks for memory efficiency\n for (let i = 0; i < seqLen; i += this.blockSize) {\n const blockEnd = Math.min(i + this.blockSize, seqLen);\n\n for (let qi = i; qi < blockEnd; qi++) {\n const scores: number[] = [];\n let maxScore = -Infinity;\n\n // Compute attention scores for this query\n for (let ki = 0; ki < seqLen; ki++) {\n let score = 0;\n for (let d = 0; d < query[qi].length; d++) {\n score += query[qi][d] * key[ki][d];\n }\n score /= Math.sqrt(headDim);\n scores.push(score);\n maxScore = Math.max(maxScore, score);\n }\n\n // Numerically stable softmax\n const expScores = scores.map(s => Math.exp(s - maxScore));\n const sumExp = expScores.reduce((a, b) => a + b, 0);\n const weights = expScores.map(e => e / sumExp);\n\n // Weighted sum of values\n const outputRow = new Array(value[0].length).fill(0);\n for (let vi = 0; vi < seqLen; vi++) {\n for (let d = 0; d < value[vi].length; d++) {\n outputRow[d] += weights[vi] * value[vi][d];\n }\n }\n\n output.push(outputRow);\n attentionScores.push(weights);\n }\n }\n\n return { output, attentionScores };\n }\n}\n\n/**\n * Linear Attention (fallback)\n *\n * O(n) complexity approximation of attention\n */\nexport class LinearAttention {\n private hiddenDim: number;\n private featureMap: (x: number) => number;\n\n constructor(config: AttentionConfig) {\n this.hiddenDim = config.hiddenDim;\n // ELU feature map\n this.featureMap = (x: number) => (x > 0 ? x : Math.exp(x) - 1);\n }\n\n forward(\n query: number[][],\n key: number[][],\n value: number[][]\n ): { output: number[][] } {\n const seqLen = query.length;\n const dim = value[0].length;\n\n // Apply feature map\n const queryMapped = query.map(q => q.map(this.featureMap));\n const keyMapped = key.map(k => k.map(this.featureMap));\n\n // Compute K^T V (dimension: [dim, valueDim])\n const ktv: number[][] = Array.from({ length: this.hiddenDim }, () =>\n Array(dim).fill(0)\n );\n\n for (let i = 0; i < seqLen; i++) {\n for (let d1 = 0; d1 < this.hiddenDim; d1++) {\n for (let d2 = 0; d2 < dim; d2++) {\n ktv[d1][d2] += keyMapped[i][d1] * value[i][d2];\n }\n }\n }\n\n // Compute Q (K^T V)\n const output: number[][] = [];\n for (let i = 0; i < seqLen; i++) {\n const row: number[] = [];\n for (let d2 = 0; d2 < dim; d2++) {\n let sum = 0;\n for (let d1 = 0; d1 < this.hiddenDim; d1++) {\n sum += queryMapped[i][d1] * ktv[d1][d2];\n }\n row.push(sum);\n }\n\n // Normalize\n const normSum = queryMapped[i].reduce((a, b) => a + b, 0);\n output.push(row.map(v => v / (normSum + 1e-8)));\n }\n\n return { output };\n }\n}\n\n/**\n * Hyperbolic Attention (simplified fallback)\n *\n * Approximation using hyperbolic geometry\n */\nexport class HyperbolicAttention {\n private hiddenDim: number;\n private curvature: number;\n\n constructor(config: AttentionConfig) {\n this.hiddenDim = config.hiddenDim;\n this.curvature = -1.0; // Poincaré ball curvature\n }\n\n forward(\n query: number[],\n key: number[],\n value: number[]\n ): { output: number[]; distance: number } {\n // Hyperbolic distance (simplified)\n const distance = this.hyperbolicDistance(query, key);\n\n // Attention weight based on hyperbolic distance\n const weight = Math.exp(-distance);\n\n // Weighted value\n const output = value.map(v => v * weight);\n\n return { output, distance };\n }\n\n private hyperbolicDistance(a: number[], b: number[]): number {\n // Simplified hyperbolic distance in Poincaré ball\n let normDiffSq = 0;\n for (let i = 0; i < a.length; i++) {\n const diff = a[i] - b[i];\n normDiffSq += diff * diff;\n }\n\n const normASq = a.reduce((sum, v) => sum + v * v, 0);\n const normBSq = b.reduce((sum, v) => sum + v * v, 0);\n\n const numerator = normDiffSq;\n const denominator = (1 - normASq) * (1 - normBSq);\n\n return Math.acosh(1 + (2 * numerator) / denominator);\n }\n}\n\n/**\n * MoE (Mixture of Experts) Attention (fallback)\n *\n * Routes to different expert attention modules\n */\nexport class MoEAttention {\n private experts: MultiHeadAttention[];\n private numExperts: number;\n private gatingWeights: number[][];\n\n constructor(config: AttentionConfig & { numExperts?: number }) {\n this.numExperts = config.numExperts || 4;\n this.experts = Array.from(\n { length: this.numExperts },\n () => new MultiHeadAttention(config)\n );\n\n // Initialize gating network weights\n this.gatingWeights = Array.from({ length: this.numExperts }, () =>\n Array.from({ length: config.hiddenDim }, () => (Math.random() - 0.5) * 0.1)\n );\n }\n\n forward(\n query: number[],\n key: number[],\n value: number[],\n topK: number = 2\n ): { output: number[]; expertWeights: number[] } {\n // Compute gating scores\n const gatingScores = this.gatingWeights.map(weights => {\n let score = 0;\n for (let i = 0; i < query.length; i++) {\n score += query[i] * weights[i];\n }\n return score;\n });\n\n // Softmax over top-K experts\n const expScores = gatingScores.map(s => Math.exp(s));\n const sumExp = expScores.reduce((a, b) => a + b, 0);\n const expertWeights = expScores.map(e => e / sumExp);\n\n // Get top-K experts\n const expertIndices = expertWeights\n .map((weight, idx) => ({ weight, idx }))\n .sort((a, b) => b.weight - a.weight)\n .slice(0, topK);\n\n // Weighted combination of expert outputs\n const output = new Array(query.length).fill(0);\n\n for (const { weight, idx } of expertIndices) {\n const expertOutput = this.experts[idx].forward(query, key, value).output;\n for (let i = 0; i < output.length; i++) {\n output[i] += weight * expertOutput[i];\n }\n }\n\n return { output, expertWeights };\n }\n}\n\n/**\n * Check if native attention is available\n */\nexport function isNativeAttentionAvailable(): boolean {\n try {\n const attention = require('@ruvector/attention');\n // Try a simple operation\n const result = attention.flashAttention(\n new Float32Array([1, 0]),\n new Float32Array([1, 0]),\n new Float32Array([1, 0]),\n 1\n );\n return true;\n } catch {\n return false;\n }\n}\n\n/**\n * Factory function to create appropriate attention module\n */\nexport function createAttention(\n type: 'multi-head' | 'flash' | 'linear' | 'hyperbolic' | 'moe',\n config: AttentionConfig\n): MultiHeadAttention | FlashAttention | LinearAttention | HyperbolicAttention | MoEAttention {\n switch (type) {\n case 'multi-head':\n return new MultiHeadAttention(config);\n case 'flash':\n return new FlashAttention(config);\n case 'linear':\n return new LinearAttention(config);\n case 'hyperbolic':\n return new HyperbolicAttention(config);\n case 'moe':\n return new MoEAttention(config);\n default:\n return new MultiHeadAttention(config);\n }\n}\n"]}
@@ -0,0 +1 @@
1
+ {"version":3,"file":"embedding-service.d.ts","sourceRoot":"","sources":["../../src/core/embedding-service.ts"],"names":[],"mappings":"AAAA;;;;;;;;GAQG;AAEH,OAAO,EAAE,YAAY,EAAE,MAAM,QAAQ,CAAC;AAEtC,MAAM,WAAW,eAAe;IAC9B,QAAQ,EAAE,QAAQ,GAAG,cAAc,GAAG,MAAM,GAAG,MAAM,CAAC;IACtD,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,SAAS,CAAC,EAAE,MAAM,CAAC;CACpB;AAED,MAAM,WAAW,eAAe;IAC9B,SAAS,EAAE,MAAM,EAAE,CAAC;IACpB,KAAK,CAAC,EAAE;QACN,YAAY,EAAE,MAAM,CAAC;QACrB,WAAW,EAAE,MAAM,CAAC;KACrB,CAAC;IACF,OAAO,EAAE,MAAM,CAAC;CACjB;AAED;;GAEG;AACH,8BAAsB,gBAAiB,SAAQ,YAAY;IACzD,SAAS,CAAC,MAAM,EAAE,eAAe,CAAC;IAClC,SAAS,CAAC,KAAK,EAAE,GAAG,CAAC,MAAM,EAAE,MAAM,EAAE,CAAC,CAAa;gBAEvC,MAAM,EAAE,eAAe;IAQnC,QAAQ,CAAC,KAAK,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,eAAe,CAAC;IACtD,QAAQ,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,eAAe,EAAE,CAAC;IAEhE;;OAEG;IACH,SAAS,CAAC,SAAS,CAAC,IAAI,EAAE,MAAM,GAAG,MAAM,EAAE,GAAG,IAAI;IAIlD;;OAEG;IACH,SAAS,CAAC,SAAS,CAAC,IAAI,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,EAAE,GAAG,IAAI;IAY5D;;OAEG;IACH,UAAU,IAAI,IAAI;CAGnB;AAED;;;;;GAKG;AACH,qBAAa,sBAAuB,SAAQ,gBAAgB;IAC1D,OAAO,CAAC,MAAM,CAAS;IACvB,OAAO,CAAC,KAAK,CAAS;IACtB,OAAO,CAAC,OAAO,CAA0C;gBAE7C,MAAM,EAAE,IAAI,CAAC,eAAe,EAAE,UAAU,CAAC,GAAG;QAAE,MAAM,EAAE,MAAM,CAAA;KAAE;IAMpE,KAAK,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,eAAe,CAAC;IAkD7C,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,eAAe,EAAE,CAAC;CAyC9D;AAED;;;;;GAKG;AACH,qBAAa,4BAA6B,SAAQ,gBAAgB;IAChE,OAAO,CAAC,QAAQ,CAAa;IAC7B,OAAO,CAAC,SAAS,CAAS;gBAEd,MAAM,EAAE,IAAI,CAAC,eAAe,EAAE,UAAU,CAAC;IAK/C,UAAU,IAAI,OAAO,CAAC,IAAI,CAAC;IAc3B,KAAK,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,eAAe,CAAC;IAoC7C,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,eAAe,EAAE,CAAC;CAqC9D;AAED;;;;;GAKG;AACH,qBAAa,oBAAqB,SAAQ,gBAAgB;gBAC5C,MAAM,CAAC,EAAE,OAAO,CAAC,eAAe,CAAC;IAQvC,KAAK,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,eAAe,CAAC;IA0B7C,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,eAAe,EAAE,CAAC;IAI7D,OAAO,CAAC,aAAa;CAsBtB;AAED;;GAEG;AACH,wBAAgB,sBAAsB,CAAC,MAAM,EAAE,eAAe,GAAG,gBAAgB,CAkBhF;AAED;;GAEG;AACH,wBAAsB,YAAY,CAChC,IAAI,EAAE,MAAM,EACZ,MAAM,CAAC,EAAE,OAAO,CAAC,eAAe,CAAC,GAChC,OAAO,CAAC,MAAM,EAAE,CAAC,CAQnB;AAED;;GAEG;AACH,wBAAsB,mBAAmB,CAAC,QAAQ,GAAE,MAAsB,GAAG,OAAO,CAAC;IACnF,IAAI,EAAE;QAAE,OAAO,EAAE,MAAM,CAAC;QAAC,UAAU,EAAE,MAAM,CAAA;KAAE,CAAC;IAC9C,YAAY,CAAC,EAAE;QAAE,OAAO,EAAE,MAAM,CAAC;QAAC,UAAU,EAAE,MAAM,CAAC;QAAC,KAAK,CAAC,EAAE,MAAM,CAAA;KAAE,CAAC;IACvE,MAAM,CAAC,EAAE;QAAE,OAAO,EAAE,MAAM,CAAC;QAAC,UAAU,EAAE,MAAM,CAAC;QAAC,KAAK,CAAC,EAAE,MAAM,CAAA;KAAE,CAAC;CAClE,CAAC,CAgDD"}
@@ -0,0 +1,370 @@
1
+ /**
2
+ * Production Embedding Service
3
+ *
4
+ * Replaces mock embeddings with real implementations:
5
+ * 1. OpenAI Embeddings API (text-embedding-3-small/large)
6
+ * 2. Local Transformers.js (runs in Node.js/browser)
7
+ * 3. Custom ONNX models
8
+ * 4. Fallback hash-based embeddings (for development)
9
+ */
10
+ import { EventEmitter } from 'events';
11
+ /**
12
+ * Base embedding service interface
13
+ */
14
+ export class EmbeddingService extends EventEmitter {
15
+ config;
16
+ cache = new Map();
17
+ constructor(config) {
18
+ super();
19
+ this.config = {
20
+ cacheSize: 1000,
21
+ ...config
22
+ };
23
+ }
24
+ /**
25
+ * Get cached embedding if available
26
+ */
27
+ getCached(text) {
28
+ return this.cache.get(text) || null;
29
+ }
30
+ /**
31
+ * Cache embedding with LRU eviction
32
+ */
33
+ setCached(text, embedding) {
34
+ const cacheSize = this.config.cacheSize ?? 1000;
35
+ if (this.cache.size >= cacheSize) {
36
+ // Remove oldest entry (first in map)
37
+ const firstKey = this.cache.keys().next().value;
38
+ if (firstKey) {
39
+ this.cache.delete(firstKey);
40
+ }
41
+ }
42
+ this.cache.set(text, embedding);
43
+ }
44
+ /**
45
+ * Clear cache
46
+ */
47
+ clearCache() {
48
+ this.cache.clear();
49
+ }
50
+ }
51
+ /**
52
+ * OpenAI Embeddings Service
53
+ *
54
+ * Uses OpenAI's text-embedding-3-small (1536D) or text-embedding-3-large (3072D)
55
+ * https://platform.openai.com/docs/guides/embeddings
56
+ */
57
+ export class OpenAIEmbeddingService extends EmbeddingService {
58
+ apiKey;
59
+ model;
60
+ baseURL = 'https://api.openai.com/v1/embeddings';
61
+ constructor(config) {
62
+ super({ ...config, provider: 'openai' });
63
+ this.apiKey = config.apiKey;
64
+ this.model = config.model || 'text-embedding-3-small';
65
+ }
66
+ async embed(text) {
67
+ // Check cache
68
+ const cached = this.getCached(text);
69
+ if (cached) {
70
+ return {
71
+ embedding: cached,
72
+ latency: 0
73
+ };
74
+ }
75
+ const start = Date.now();
76
+ try {
77
+ const response = await fetch(this.baseURL, {
78
+ method: 'POST',
79
+ headers: {
80
+ 'Content-Type': 'application/json',
81
+ Authorization: `Bearer ${this.apiKey}`
82
+ },
83
+ body: JSON.stringify({
84
+ model: this.model,
85
+ input: text,
86
+ dimensions: this.config.dimensions || undefined
87
+ })
88
+ });
89
+ if (!response.ok) {
90
+ throw new Error(`OpenAI API error: ${response.statusText}`);
91
+ }
92
+ const data = await response.json();
93
+ const embedding = data.data[0].embedding;
94
+ // Cache it
95
+ this.setCached(text, embedding);
96
+ const latency = Date.now() - start;
97
+ this.emit('embed', { text, latency });
98
+ return {
99
+ embedding,
100
+ usage: data.usage,
101
+ latency
102
+ };
103
+ }
104
+ catch (error) {
105
+ throw new Error(`OpenAI embedding failed: ${error.message}`);
106
+ }
107
+ }
108
+ async embedBatch(texts) {
109
+ const start = Date.now();
110
+ try {
111
+ const response = await fetch(this.baseURL, {
112
+ method: 'POST',
113
+ headers: {
114
+ 'Content-Type': 'application/json',
115
+ Authorization: `Bearer ${this.apiKey}`
116
+ },
117
+ body: JSON.stringify({
118
+ model: this.model,
119
+ input: texts,
120
+ dimensions: this.config.dimensions || undefined
121
+ })
122
+ });
123
+ if (!response.ok) {
124
+ throw new Error(`OpenAI API error: ${response.statusText}`);
125
+ }
126
+ const data = await response.json();
127
+ const latency = Date.now() - start;
128
+ return data.data.map((item, index) => {
129
+ const embedding = item.embedding;
130
+ this.setCached(texts[index], embedding);
131
+ return {
132
+ embedding,
133
+ usage: {
134
+ promptTokens: Math.floor(data.usage.prompt_tokens / texts.length),
135
+ totalTokens: Math.floor(data.usage.total_tokens / texts.length)
136
+ },
137
+ latency: Math.floor(latency / texts.length)
138
+ };
139
+ });
140
+ }
141
+ catch (error) {
142
+ throw new Error(`OpenAI batch embedding failed: ${error.message}`);
143
+ }
144
+ }
145
+ }
146
+ /**
147
+ * Transformers.js Local Embedding Service
148
+ *
149
+ * Runs locally without API calls using ONNX runtime
150
+ * https://huggingface.co/docs/transformers.js
151
+ */
152
+ export class TransformersEmbeddingService extends EmbeddingService {
153
+ pipeline = null;
154
+ modelName;
155
+ constructor(config) {
156
+ super({ ...config, provider: 'transformers' });
157
+ this.modelName = config.model || 'Xenova/all-MiniLM-L6-v2';
158
+ }
159
+ async initialize() {
160
+ if (this.pipeline)
161
+ return;
162
+ try {
163
+ // Dynamically import transformers.js
164
+ const { pipeline } = await import('@xenova/transformers');
165
+ this.pipeline = await pipeline('feature-extraction', this.modelName);
166
+ this.emit('initialized', { model: this.modelName });
167
+ }
168
+ catch (error) {
169
+ throw new Error(`Failed to initialize transformers.js: ${error.message}`);
170
+ }
171
+ }
172
+ async embed(text) {
173
+ await this.initialize();
174
+ // Check cache
175
+ const cached = this.getCached(text);
176
+ if (cached) {
177
+ return {
178
+ embedding: cached,
179
+ latency: 0
180
+ };
181
+ }
182
+ const start = Date.now();
183
+ try {
184
+ const output = await this.pipeline(text, { pooling: 'mean', normalize: true });
185
+ // Convert to regular array
186
+ const embedding = Array.from(output.data);
187
+ // Cache it
188
+ this.setCached(text, embedding);
189
+ const latency = Date.now() - start;
190
+ this.emit('embed', { text, latency });
191
+ return {
192
+ embedding,
193
+ latency
194
+ };
195
+ }
196
+ catch (error) {
197
+ throw new Error(`Transformers.js embedding failed: ${error.message}`);
198
+ }
199
+ }
200
+ async embedBatch(texts) {
201
+ await this.initialize();
202
+ const start = Date.now();
203
+ try {
204
+ const results = [];
205
+ for (const text of texts) {
206
+ const cached = this.getCached(text);
207
+ if (cached) {
208
+ results.push({
209
+ embedding: cached,
210
+ latency: 0
211
+ });
212
+ }
213
+ else {
214
+ const output = await this.pipeline(text, {
215
+ pooling: 'mean',
216
+ normalize: true
217
+ });
218
+ const embedding = Array.from(output.data);
219
+ this.setCached(text, embedding);
220
+ results.push({
221
+ embedding,
222
+ latency: Math.floor((Date.now() - start) / texts.length)
223
+ });
224
+ }
225
+ }
226
+ return results;
227
+ }
228
+ catch (error) {
229
+ throw new Error(`Transformers.js batch embedding failed: ${error.message}`);
230
+ }
231
+ }
232
+ }
233
+ /**
234
+ * Mock Embedding Service (for development/testing)
235
+ *
236
+ * Generates deterministic hash-based embeddings
237
+ * Fast but not semantically meaningful
238
+ */
239
+ export class MockEmbeddingService extends EmbeddingService {
240
+ constructor(config) {
241
+ super({
242
+ provider: 'mock',
243
+ dimensions: 384,
244
+ ...config
245
+ });
246
+ }
247
+ async embed(text) {
248
+ // Check cache
249
+ const cached = this.getCached(text);
250
+ if (cached) {
251
+ return {
252
+ embedding: cached,
253
+ latency: 0
254
+ };
255
+ }
256
+ const start = Date.now();
257
+ // Generate hash-based embedding
258
+ const embedding = this.hashEmbedding(text);
259
+ // Cache it
260
+ this.setCached(text, embedding);
261
+ const latency = Date.now() - start;
262
+ return {
263
+ embedding,
264
+ latency
265
+ };
266
+ }
267
+ async embedBatch(texts) {
268
+ return Promise.all(texts.map(text => this.embed(text)));
269
+ }
270
+ hashEmbedding(text) {
271
+ const dimensions = this.config.dimensions || 384;
272
+ const embedding = new Array(dimensions);
273
+ // Seed with text hash
274
+ let hash = 0;
275
+ for (let i = 0; i < text.length; i++) {
276
+ hash = (hash << 5) - hash + text.charCodeAt(i);
277
+ hash = hash & hash;
278
+ }
279
+ // Generate pseudo-random embedding
280
+ for (let i = 0; i < dimensions; i++) {
281
+ const seed = hash + i * 2654435761;
282
+ const x = Math.sin(seed) * 10000;
283
+ embedding[i] = x - Math.floor(x);
284
+ }
285
+ // Normalize to unit vector
286
+ const norm = Math.sqrt(embedding.reduce((sum, v) => sum + v * v, 0));
287
+ return embedding.map(v => v / norm);
288
+ }
289
+ }
290
+ /**
291
+ * Factory function to create appropriate embedding service
292
+ */
293
+ export function createEmbeddingService(config) {
294
+ switch (config.provider) {
295
+ case 'openai':
296
+ if (!config.apiKey) {
297
+ throw new Error('OpenAI API key required');
298
+ }
299
+ return new OpenAIEmbeddingService(config);
300
+ case 'transformers':
301
+ return new TransformersEmbeddingService(config);
302
+ case 'mock':
303
+ return new MockEmbeddingService(config);
304
+ default:
305
+ console.warn(`Unknown provider: ${config.provider}, using mock`);
306
+ return new MockEmbeddingService(config);
307
+ }
308
+ }
309
+ /**
310
+ * Convenience function for quick embeddings
311
+ */
312
+ export async function getEmbedding(text, config) {
313
+ const service = createEmbeddingService({
314
+ provider: 'mock',
315
+ ...config
316
+ });
317
+ const result = await service.embed(text);
318
+ return result.embedding;
319
+ }
320
+ /**
321
+ * Benchmark different embedding providers
322
+ */
323
+ export async function benchmarkEmbeddings(testText = 'Hello world') {
324
+ const results = {};
325
+ // Test mock
326
+ const mockService = new MockEmbeddingService({ dimensions: 384 });
327
+ const mockResult = await mockService.embed(testText);
328
+ results.mock = {
329
+ latency: mockResult.latency,
330
+ dimensions: mockResult.embedding.length
331
+ };
332
+ // Test transformers (if available)
333
+ try {
334
+ const transformersService = new TransformersEmbeddingService({
335
+ model: 'Xenova/all-MiniLM-L6-v2'
336
+ });
337
+ const transformersResult = await transformersService.embed(testText);
338
+ results.transformers = {
339
+ latency: transformersResult.latency,
340
+ dimensions: transformersResult.embedding.length
341
+ };
342
+ }
343
+ catch (error) {
344
+ results.transformers = {
345
+ error: error.message
346
+ };
347
+ }
348
+ // Test OpenAI (if API key available)
349
+ const apiKey = process.env.OPENAI_API_KEY;
350
+ if (apiKey) {
351
+ try {
352
+ const openaiService = new OpenAIEmbeddingService({
353
+ apiKey,
354
+ model: 'text-embedding-3-small'
355
+ });
356
+ const openaiResult = await openaiService.embed(testText);
357
+ results.openai = {
358
+ latency: openaiResult.latency,
359
+ dimensions: openaiResult.embedding.length
360
+ };
361
+ }
362
+ catch (error) {
363
+ results.openai = {
364
+ error: error.message
365
+ };
366
+ }
367
+ }
368
+ return results;
369
+ }
370
+ //# sourceMappingURL=embedding-service.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"embedding-service.js","sourceRoot":"","sources":["../../src/core/embedding-service.ts"],"names":[],"mappings":"AAAA;;;;;;;;GAQG;AAEH,OAAO,EAAE,YAAY,EAAE,MAAM,QAAQ,CAAC;AAmBtC;;GAEG;AACH,MAAM,OAAgB,gBAAiB,SAAQ,YAAY;IAC/C,MAAM,CAAkB;IACxB,KAAK,GAA0B,IAAI,GAAG,EAAE,CAAC;IAEnD,YAAY,MAAuB;QACjC,KAAK,EAAE,CAAC;QACR,IAAI,CAAC,MAAM,GAAG;YACZ,SAAS,EAAE,IAAI;YACf,GAAG,MAAM;SACV,CAAC;IACJ,CAAC;IAKD;;OAEG;IACO,SAAS,CAAC,IAAY;QAC9B,OAAO,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,IAAI,CAAC;IACtC,CAAC;IAED;;OAEG;IACO,SAAS,CAAC,IAAY,EAAE,SAAmB;QACnD,MAAM,SAAS,GAAG,IAAI,CAAC,MAAM,CAAC,SAAS,IAAI,IAAI,CAAC;QAChD,IAAI,IAAI,CAAC,KAAK,CAAC,IAAI,IAAI,SAAS,EAAE,CAAC;YACjC,qCAAqC;YACrC,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC,IAAI,EAAE,CAAC,KAA2B,CAAC;YACtE,IAAI,QAAQ,EAAE,CAAC;gBACb,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC;YAC9B,CAAC;QACH,CAAC;QACD,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,EAAE,SAAS,CAAC,CAAC;IAClC,CAAC;IAED;;OAEG;IACH,UAAU;QACR,IAAI,CAAC,KAAK,CAAC,KAAK,EAAE,CAAC;IACrB,CAAC;CACF;AAED;;;;;GAKG;AACH,MAAM,OAAO,sBAAuB,SAAQ,gBAAgB;IAClD,MAAM,CAAS;IACf,KAAK,CAAS;IACd,OAAO,GAAG,sCAAsC,CAAC;IAEzD,YAAY,MAA8D;QACxE,KAAK,CAAC,EAAE,GAAG,MAAM,EAAE,QAAQ,EAAE,QAAQ,EAAE,CAAC,CAAC;QACzC,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC;QAC5B,IAAI,CAAC,KAAK,GAAG,MAAM,CAAC,KAAK,IAAI,wBAAwB,CAAC;IACxD,CAAC;IAED,KAAK,CAAC,KAAK,CAAC,IAAY;QACtB,cAAc;QACd,MAAM,MAAM,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC;QACpC,IAAI,MAAM,EAAE,CAAC;YACX,OAAO;gBACL,SAAS,EAAE,MAAM;gBACjB,OAAO,EAAE,CAAC;aACX,CAAC;QACJ,CAAC;QAED,MAAM,KAAK,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;QAEzB,IAAI,CAAC;YACH,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE;gBACzC,MAAM,EAAE,MAAM;gBACd,OAAO,EAAE;oBACP,cAAc,EAAE,kBAAkB;oBAClC,aAAa,EAAE,UAAU,IAAI,CAAC,MAAM,EAAE;iBACvC;gBACD,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC;oBACnB,KAAK,EAAE,IAAI,CAAC,KAAK;oBACjB,KAAK,EAAE,IAAI;oBACX,UAAU,EAAE,IAAI,CAAC,MAAM,CAAC,UAAU,IAAI,SAAS;iBAChD,CAAC;aACH,CAAC,CAAC;YAEH,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE,CAAC;gBACjB,MAAM,IAAI,KAAK,CAAC,qBAAqB,QAAQ,CAAC,UAAU,EAAE,CAAC,CAAC;YAC9D,CAAC;YAED,MAAM,IAAI,GAAQ,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAC;YACxC,MAAM,SAAS,GAAa,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC;YAEnD,WAAW;YACX,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,SAAS,CAAC,CAAC;YAEhC,MAAM,OAAO,GAAG,IAAI,CAAC,GAAG,EAAE,GAAG,KAAK,CAAC;YAEnC,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC,CAAC;YAEtC,OAAO;gBACL,SAAS;gBACT,KAAK,EAAE,IAAI,CAAC,KAAK;gBACjB,OAAO;aACR,CAAC;QACJ,CAAC;QAAC,OAAO,KAAU,EAAE,CAAC;YACpB,MAAM,IAAI,KAAK,CAAC,4BAA4B,KAAK,CAAC,OAAO,EAAE,CAAC,CAAC;QAC/D,CAAC;IACH,CAAC;IAED,KAAK,CAAC,UAAU,CAAC,KAAe;QAC9B,MAAM,KAAK,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;QAEzB,IAAI,CAAC;YACH,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE;gBACzC,MAAM,EAAE,MAAM;gBACd,OAAO,EAAE;oBACP,cAAc,EAAE,kBAAkB;oBAClC,aAAa,EAAE,UAAU,IAAI,CAAC,MAAM,EAAE;iBACvC;gBACD,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC;oBACnB,KAAK,EAAE,IAAI,CAAC,KAAK;oBACjB,KAAK,EAAE,KAAK;oBACZ,UAAU,EAAE,IAAI,CAAC,MAAM,CAAC,UAAU,IAAI,SAAS;iBAChD,CAAC;aACH,CAAC,CAAC;YAEH,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE,CAAC;gBACjB,MAAM,IAAI,KAAK,CAAC,qBAAqB,QAAQ,CAAC,UAAU,EAAE,CAAC,CAAC;YAC9D,CAAC;YAED,MAAM,IAAI,GAAQ,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAC;YACxC,MAAM,OAAO,GAAG,IAAI,CAAC,GAAG,EAAE,GAAG,KAAK,CAAC;YAEnC,OAAO,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,IAAS,EAAE,KAAa,EAAE,EAAE;gBAChD,MAAM,SAAS,GAAa,IAAI,CAAC,SAAS,CAAC;gBAC3C,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,KAAK,CAAC,EAAE,SAAS,CAAC,CAAC;gBAExC,OAAO;oBACL,SAAS;oBACT,KAAK,EAAE;wBACL,YAAY,EAAE,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,aAAa,GAAG,KAAK,CAAC,MAAM,CAAC;wBACjE,WAAW,EAAE,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,YAAY,GAAG,KAAK,CAAC,MAAM,CAAC;qBAChE;oBACD,OAAO,EAAE,IAAI,CAAC,KAAK,CAAC,OAAO,GAAG,KAAK,CAAC,MAAM,CAAC;iBAC5C,CAAC;YACJ,CAAC,CAAC,CAAC;QACL,CAAC;QAAC,OAAO,KAAU,EAAE,CAAC;YACpB,MAAM,IAAI,KAAK,CAAC,kCAAkC,KAAK,CAAC,OAAO,EAAE,CAAC,CAAC;QACrE,CAAC;IACH,CAAC;CACF;AAED;;;;;GAKG;AACH,MAAM,OAAO,4BAA6B,SAAQ,gBAAgB;IACxD,QAAQ,GAAQ,IAAI,CAAC;IACrB,SAAS,CAAS;IAE1B,YAAY,MAAyC;QACnD,KAAK,CAAC,EAAE,GAAG,MAAM,EAAE,QAAQ,EAAE,cAAc,EAAE,CAAC,CAAC;QAC/C,IAAI,CAAC,SAAS,GAAG,MAAM,CAAC,KAAK,IAAI,yBAAyB,CAAC;IAC7D,CAAC;IAED,KAAK,CAAC,UAAU;QACd,IAAI,IAAI,CAAC,QAAQ;YAAE,OAAO;QAE1B,IAAI,CAAC;YACH,qCAAqC;YACrC,MAAM,EAAE,QAAQ,EAAE,GAAG,MAAM,MAAM,CAAC,sBAAsB,CAAC,CAAC;YAE1D,IAAI,CAAC,QAAQ,GAAG,MAAM,QAAQ,CAAC,oBAAoB,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;YACrE,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,EAAE,KAAK,EAAE,IAAI,CAAC,SAAS,EAAE,CAAC,CAAC;QACtD,CAAC;QAAC,OAAO,KAAU,EAAE,CAAC;YACpB,MAAM,IAAI,KAAK,CAAC,yCAAyC,KAAK,CAAC,OAAO,EAAE,CAAC,CAAC;QAC5E,CAAC;IACH,CAAC;IAED,KAAK,CAAC,KAAK,CAAC,IAAY;QACtB,MAAM,IAAI,CAAC,UAAU,EAAE,CAAC;QAExB,cAAc;QACd,MAAM,MAAM,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC;QACpC,IAAI,MAAM,EAAE,CAAC;YACX,OAAO;gBACL,SAAS,EAAE,MAAM;gBACjB,OAAO,EAAE,CAAC;aACX,CAAC;QACJ,CAAC;QAED,MAAM,KAAK,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;QAEzB,IAAI,CAAC;YACH,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,QAAQ,CAAC,IAAI,EAAE,EAAE,OAAO,EAAE,MAAM,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;YAE/E,2BAA2B;YAC3B,MAAM,SAAS,GAAG,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAa,CAAC;YAEtD,WAAW;YACX,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,SAAS,CAAC,CAAC;YAEhC,MAAM,OAAO,GAAG,IAAI,CAAC,GAAG,EAAE,GAAG,KAAK,CAAC;YAEnC,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC,CAAC;YAEtC,OAAO;gBACL,SAAS;gBACT,OAAO;aACR,CAAC;QACJ,CAAC;QAAC,OAAO,KAAU,EAAE,CAAC;YACpB,MAAM,IAAI,KAAK,CAAC,qCAAqC,KAAK,CAAC,OAAO,EAAE,CAAC,CAAC;QACxE,CAAC;IACH,CAAC;IAED,KAAK,CAAC,UAAU,CAAC,KAAe;QAC9B,MAAM,IAAI,CAAC,UAAU,EAAE,CAAC;QAExB,MAAM,KAAK,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;QAEzB,IAAI,CAAC;YACH,MAAM,OAAO,GAAsB,EAAE,CAAC;YAEtC,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE,CAAC;gBACzB,MAAM,MAAM,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC;gBAEpC,IAAI,MAAM,EAAE,CAAC;oBACX,OAAO,CAAC,IAAI,CAAC;wBACX,SAAS,EAAE,MAAM;wBACjB,OAAO,EAAE,CAAC;qBACX,CAAC,CAAC;gBACL,CAAC;qBAAM,CAAC;oBACN,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,QAAQ,CAAC,IAAI,EAAE;wBACvC,OAAO,EAAE,MAAM;wBACf,SAAS,EAAE,IAAI;qBAChB,CAAC,CAAC;oBACH,MAAM,SAAS,GAAG,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAa,CAAC;oBAEtD,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,SAAS,CAAC,CAAC;oBAEhC,OAAO,CAAC,IAAI,CAAC;wBACX,SAAS;wBACT,OAAO,EAAE,IAAI,CAAC,KAAK,CAAC,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,KAAK,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC;qBACzD,CAAC,CAAC;gBACL,CAAC;YACH,CAAC;YAED,OAAO,OAAO,CAAC;QACjB,CAAC;QAAC,OAAO,KAAU,EAAE,CAAC;YACpB,MAAM,IAAI,KAAK,CAAC,2CAA2C,KAAK,CAAC,OAAO,EAAE,CAAC,CAAC;QAC9E,CAAC;IACH,CAAC;CACF;AAED;;;;;GAKG;AACH,MAAM,OAAO,oBAAqB,SAAQ,gBAAgB;IACxD,YAAY,MAAiC;QAC3C,KAAK,CAAC;YACJ,QAAQ,EAAE,MAAM;YAChB,UAAU,EAAE,GAAG;YACf,GAAG,MAAM;SACV,CAAC,CAAC;IACL,CAAC;IAED,KAAK,CAAC,KAAK,CAAC,IAAY;QACtB,cAAc;QACd,MAAM,MAAM,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC;QACpC,IAAI,MAAM,EAAE,CAAC;YACX,OAAO;gBACL,SAAS,EAAE,MAAM;gBACjB,OAAO,EAAE,CAAC;aACX,CAAC;QACJ,CAAC;QAED,MAAM,KAAK,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;QAEzB,gCAAgC;QAChC,MAAM,SAAS,GAAG,IAAI,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC;QAE3C,WAAW;QACX,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,SAAS,CAAC,CAAC;QAEhC,MAAM,OAAO,GAAG,IAAI,CAAC,GAAG,EAAE,GAAG,KAAK,CAAC;QAEnC,OAAO;YACL,SAAS;YACT,OAAO;SACR,CAAC;IACJ,CAAC;IAED,KAAK,CAAC,UAAU,CAAC,KAAe;QAC9B,OAAO,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;IAC1D,CAAC;IAEO,aAAa,CAAC,IAAY;QAChC,MAAM,UAAU,GAAG,IAAI,CAAC,MAAM,CAAC,UAAU,IAAI,GAAG,CAAC;QACjD,MAAM,SAAS,GAAG,IAAI,KAAK,CAAC,UAAU,CAAC,CAAC;QAExC,sBAAsB;QACtB,IAAI,IAAI,GAAG,CAAC,CAAC;QACb,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;YACrC,IAAI,GAAG,CAAC,IAAI,IAAI,CAAC,CAAC,GAAG,IAAI,GAAG,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;YAC/C,IAAI,GAAG,IAAI,GAAG,IAAI,CAAC;QACrB,CAAC;QAED,mCAAmC;QACnC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,EAAE,CAAC,EAAE,EAAE,CAAC;YACpC,MAAM,IAAI,GAAG,IAAI,GAAG,CAAC,GAAG,UAAU,CAAC;YACnC,MAAM,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,KAAK,CAAC;YACjC,SAAS,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;QACnC,CAAC;QAED,2BAA2B;QAC3B,MAAM,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,GAAG,EAAE,CAAC,EAAE,EAAE,CAAC,GAAG,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;QACrE,OAAO,SAAS,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC;IACtC,CAAC;CACF;AAED;;GAEG;AACH,MAAM,UAAU,sBAAsB,CAAC,MAAuB;IAC5D,QAAQ,MAAM,CAAC,QAAQ,EAAE,CAAC;QACxB,KAAK,QAAQ;YACX,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC;gBACnB,MAAM,IAAI,KAAK,CAAC,yBAAyB,CAAC,CAAC;YAC7C,CAAC;YACD,OAAO,IAAI,sBAAsB,CAAC,MAAa,CAAC,CAAC;QAEnD,KAAK,cAAc;YACjB,OAAO,IAAI,4BAA4B,CAAC,MAAM,CAAC,CAAC;QAElD,KAAK,MAAM;YACT,OAAO,IAAI,oBAAoB,CAAC,MAAM,CAAC,CAAC;QAE1C;YACE,OAAO,CAAC,IAAI,CAAC,qBAAqB,MAAM,CAAC,QAAQ,cAAc,CAAC,CAAC;YACjE,OAAO,IAAI,oBAAoB,CAAC,MAAM,CAAC,CAAC;IAC5C,CAAC;AACH,CAAC;AAED;;GAEG;AACH,MAAM,CAAC,KAAK,UAAU,YAAY,CAChC,IAAY,EACZ,MAAiC;IAEjC,MAAM,OAAO,GAAG,sBAAsB,CAAC;QACrC,QAAQ,EAAE,MAAM;QAChB,GAAG,MAAM;KACS,CAAC,CAAC;IAEtB,MAAM,MAAM,GAAG,MAAM,OAAO,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;IACzC,OAAO,MAAM,CAAC,SAAS,CAAC;AAC1B,CAAC;AAED;;GAEG;AACH,MAAM,CAAC,KAAK,UAAU,mBAAmB,CAAC,WAAmB,aAAa;IAKxE,MAAM,OAAO,GAAQ,EAAE,CAAC;IAExB,YAAY;IACZ,MAAM,WAAW,GAAG,IAAI,oBAAoB,CAAC,EAAE,UAAU,EAAE,GAAG,EAAE,CAAC,CAAC;IAClE,MAAM,UAAU,GAAG,MAAM,WAAW,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC;IACrD,OAAO,CAAC,IAAI,GAAG;QACb,OAAO,EAAE,UAAU,CAAC,OAAO;QAC3B,UAAU,EAAE,UAAU,CAAC,SAAS,CAAC,MAAM;KACxC,CAAC;IAEF,mCAAmC;IACnC,IAAI,CAAC;QACH,MAAM,mBAAmB,GAAG,IAAI,4BAA4B,CAAC;YAC3D,KAAK,EAAE,yBAAyB;SACjC,CAAC,CAAC;QACH,MAAM,kBAAkB,GAAG,MAAM,mBAAmB,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC;QACrE,OAAO,CAAC,YAAY,GAAG;YACrB,OAAO,EAAE,kBAAkB,CAAC,OAAO;YACnC,UAAU,EAAE,kBAAkB,CAAC,SAAS,CAAC,MAAM;SAChD,CAAC;IACJ,CAAC;IAAC,OAAO,KAAU,EAAE,CAAC;QACpB,OAAO,CAAC,YAAY,GAAG;YACrB,KAAK,EAAE,KAAK,CAAC,OAAO;SACrB,CAAC;IACJ,CAAC;IAED,qCAAqC;IACrC,MAAM,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC;IAC1C,IAAI,MAAM,EAAE,CAAC;QACX,IAAI,CAAC;YACH,MAAM,aAAa,GAAG,IAAI,sBAAsB,CAAC;gBAC/C,MAAM;gBACN,KAAK,EAAE,wBAAwB;aAChC,CAAC,CAAC;YACH,MAAM,YAAY,GAAG,MAAM,aAAa,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC;YACzD,OAAO,CAAC,MAAM,GAAG;gBACf,OAAO,EAAE,YAAY,CAAC,OAAO;gBAC7B,UAAU,EAAE,YAAY,CAAC,SAAS,CAAC,MAAM;aAC1C,CAAC;QACJ,CAAC;QAAC,OAAO,KAAU,EAAE,CAAC;YACpB,OAAO,CAAC,MAAM,GAAG;gBACf,KAAK,EAAE,KAAK,CAAC,OAAO;aACrB,CAAC;QACJ,CAAC;IACH,CAAC;IAED,OAAO,OAAO,CAAC;AACjB,CAAC","sourcesContent":["/**\n * Production Embedding Service\n *\n * Replaces mock embeddings with real implementations:\n * 1. OpenAI Embeddings API (text-embedding-3-small/large)\n * 2. Local Transformers.js (runs in Node.js/browser)\n * 3. Custom ONNX models\n * 4. Fallback hash-based embeddings (for development)\n */\n\nimport { EventEmitter } from 'events';\n\nexport interface EmbeddingConfig {\n provider: 'openai' | 'transformers' | 'onnx' | 'mock';\n model?: string;\n dimensions?: number;\n apiKey?: string;\n cacheSize?: number;\n}\n\nexport interface EmbeddingResult {\n embedding: number[];\n usage?: {\n promptTokens: number;\n totalTokens: number;\n };\n latency: number;\n}\n\n/**\n * Base embedding service interface\n */\nexport abstract class EmbeddingService extends EventEmitter {\n protected config: EmbeddingConfig;\n protected cache: Map<string, number[]> = new Map();\n\n constructor(config: EmbeddingConfig) {\n super();\n this.config = {\n cacheSize: 1000,\n ...config\n };\n }\n\n abstract embed(text: string): Promise<EmbeddingResult>;\n abstract embedBatch(texts: string[]): Promise<EmbeddingResult[]>;\n\n /**\n * Get cached embedding if available\n */\n protected getCached(text: string): number[] | null {\n return this.cache.get(text) || null;\n }\n\n /**\n * Cache embedding with LRU eviction\n */\n protected setCached(text: string, embedding: number[]): void {\n const cacheSize = this.config.cacheSize ?? 1000;\n if (this.cache.size >= cacheSize) {\n // Remove oldest entry (first in map)\n const firstKey = this.cache.keys().next().value as string | undefined;\n if (firstKey) {\n this.cache.delete(firstKey);\n }\n }\n this.cache.set(text, embedding);\n }\n\n /**\n * Clear cache\n */\n clearCache(): void {\n this.cache.clear();\n }\n}\n\n/**\n * OpenAI Embeddings Service\n *\n * Uses OpenAI's text-embedding-3-small (1536D) or text-embedding-3-large (3072D)\n * https://platform.openai.com/docs/guides/embeddings\n */\nexport class OpenAIEmbeddingService extends EmbeddingService {\n private apiKey: string;\n private model: string;\n private baseURL = 'https://api.openai.com/v1/embeddings';\n\n constructor(config: Omit<EmbeddingConfig, 'provider'> & { apiKey: string }) {\n super({ ...config, provider: 'openai' });\n this.apiKey = config.apiKey;\n this.model = config.model || 'text-embedding-3-small';\n }\n\n async embed(text: string): Promise<EmbeddingResult> {\n // Check cache\n const cached = this.getCached(text);\n if (cached) {\n return {\n embedding: cached,\n latency: 0\n };\n }\n\n const start = Date.now();\n\n try {\n const response = await fetch(this.baseURL, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n Authorization: `Bearer ${this.apiKey}`\n },\n body: JSON.stringify({\n model: this.model,\n input: text,\n dimensions: this.config.dimensions || undefined\n })\n });\n\n if (!response.ok) {\n throw new Error(`OpenAI API error: ${response.statusText}`);\n }\n\n const data: any = await response.json();\n const embedding: number[] = data.data[0].embedding;\n\n // Cache it\n this.setCached(text, embedding);\n\n const latency = Date.now() - start;\n\n this.emit('embed', { text, latency });\n\n return {\n embedding,\n usage: data.usage,\n latency\n };\n } catch (error: any) {\n throw new Error(`OpenAI embedding failed: ${error.message}`);\n }\n }\n\n async embedBatch(texts: string[]): Promise<EmbeddingResult[]> {\n const start = Date.now();\n\n try {\n const response = await fetch(this.baseURL, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n Authorization: `Bearer ${this.apiKey}`\n },\n body: JSON.stringify({\n model: this.model,\n input: texts,\n dimensions: this.config.dimensions || undefined\n })\n });\n\n if (!response.ok) {\n throw new Error(`OpenAI API error: ${response.statusText}`);\n }\n\n const data: any = await response.json();\n const latency = Date.now() - start;\n\n return data.data.map((item: any, index: number) => {\n const embedding: number[] = item.embedding;\n this.setCached(texts[index], embedding);\n\n return {\n embedding,\n usage: {\n promptTokens: Math.floor(data.usage.prompt_tokens / texts.length),\n totalTokens: Math.floor(data.usage.total_tokens / texts.length)\n },\n latency: Math.floor(latency / texts.length)\n };\n });\n } catch (error: any) {\n throw new Error(`OpenAI batch embedding failed: ${error.message}`);\n }\n }\n}\n\n/**\n * Transformers.js Local Embedding Service\n *\n * Runs locally without API calls using ONNX runtime\n * https://huggingface.co/docs/transformers.js\n */\nexport class TransformersEmbeddingService extends EmbeddingService {\n private pipeline: any = null;\n private modelName: string;\n\n constructor(config: Omit<EmbeddingConfig, 'provider'>) {\n super({ ...config, provider: 'transformers' });\n this.modelName = config.model || 'Xenova/all-MiniLM-L6-v2';\n }\n\n async initialize(): Promise<void> {\n if (this.pipeline) return;\n\n try {\n // Dynamically import transformers.js\n const { pipeline } = await import('@xenova/transformers');\n\n this.pipeline = await pipeline('feature-extraction', this.modelName);\n this.emit('initialized', { model: this.modelName });\n } catch (error: any) {\n throw new Error(`Failed to initialize transformers.js: ${error.message}`);\n }\n }\n\n async embed(text: string): Promise<EmbeddingResult> {\n await this.initialize();\n\n // Check cache\n const cached = this.getCached(text);\n if (cached) {\n return {\n embedding: cached,\n latency: 0\n };\n }\n\n const start = Date.now();\n\n try {\n const output = await this.pipeline(text, { pooling: 'mean', normalize: true });\n\n // Convert to regular array\n const embedding = Array.from(output.data) as number[];\n\n // Cache it\n this.setCached(text, embedding);\n\n const latency = Date.now() - start;\n\n this.emit('embed', { text, latency });\n\n return {\n embedding,\n latency\n };\n } catch (error: any) {\n throw new Error(`Transformers.js embedding failed: ${error.message}`);\n }\n }\n\n async embedBatch(texts: string[]): Promise<EmbeddingResult[]> {\n await this.initialize();\n\n const start = Date.now();\n\n try {\n const results: EmbeddingResult[] = [];\n\n for (const text of texts) {\n const cached = this.getCached(text);\n\n if (cached) {\n results.push({\n embedding: cached,\n latency: 0\n });\n } else {\n const output = await this.pipeline(text, {\n pooling: 'mean',\n normalize: true\n });\n const embedding = Array.from(output.data) as number[];\n\n this.setCached(text, embedding);\n\n results.push({\n embedding,\n latency: Math.floor((Date.now() - start) / texts.length)\n });\n }\n }\n\n return results;\n } catch (error: any) {\n throw new Error(`Transformers.js batch embedding failed: ${error.message}`);\n }\n }\n}\n\n/**\n * Mock Embedding Service (for development/testing)\n *\n * Generates deterministic hash-based embeddings\n * Fast but not semantically meaningful\n */\nexport class MockEmbeddingService extends EmbeddingService {\n constructor(config?: Partial<EmbeddingConfig>) {\n super({\n provider: 'mock',\n dimensions: 384,\n ...config\n });\n }\n\n async embed(text: string): Promise<EmbeddingResult> {\n // Check cache\n const cached = this.getCached(text);\n if (cached) {\n return {\n embedding: cached,\n latency: 0\n };\n }\n\n const start = Date.now();\n\n // Generate hash-based embedding\n const embedding = this.hashEmbedding(text);\n\n // Cache it\n this.setCached(text, embedding);\n\n const latency = Date.now() - start;\n\n return {\n embedding,\n latency\n };\n }\n\n async embedBatch(texts: string[]): Promise<EmbeddingResult[]> {\n return Promise.all(texts.map(text => this.embed(text)));\n }\n\n private hashEmbedding(text: string): number[] {\n const dimensions = this.config.dimensions || 384;\n const embedding = new Array(dimensions);\n\n // Seed with text hash\n let hash = 0;\n for (let i = 0; i < text.length; i++) {\n hash = (hash << 5) - hash + text.charCodeAt(i);\n hash = hash & hash;\n }\n\n // Generate pseudo-random embedding\n for (let i = 0; i < dimensions; i++) {\n const seed = hash + i * 2654435761;\n const x = Math.sin(seed) * 10000;\n embedding[i] = x - Math.floor(x);\n }\n\n // Normalize to unit vector\n const norm = Math.sqrt(embedding.reduce((sum, v) => sum + v * v, 0));\n return embedding.map(v => v / norm);\n }\n}\n\n/**\n * Factory function to create appropriate embedding service\n */\nexport function createEmbeddingService(config: EmbeddingConfig): EmbeddingService {\n switch (config.provider) {\n case 'openai':\n if (!config.apiKey) {\n throw new Error('OpenAI API key required');\n }\n return new OpenAIEmbeddingService(config as any);\n\n case 'transformers':\n return new TransformersEmbeddingService(config);\n\n case 'mock':\n return new MockEmbeddingService(config);\n\n default:\n console.warn(`Unknown provider: ${config.provider}, using mock`);\n return new MockEmbeddingService(config);\n }\n}\n\n/**\n * Convenience function for quick embeddings\n */\nexport async function getEmbedding(\n text: string,\n config?: Partial<EmbeddingConfig>\n): Promise<number[]> {\n const service = createEmbeddingService({\n provider: 'mock',\n ...config\n } as EmbeddingConfig);\n\n const result = await service.embed(text);\n return result.embedding;\n}\n\n/**\n * Benchmark different embedding providers\n */\nexport async function benchmarkEmbeddings(testText: string = 'Hello world'): Promise<{\n mock: { latency: number; dimensions: number };\n transformers?: { latency: number; dimensions: number; error?: string };\n openai?: { latency: number; dimensions: number; error?: string };\n}> {\n const results: any = {};\n\n // Test mock\n const mockService = new MockEmbeddingService({ dimensions: 384 });\n const mockResult = await mockService.embed(testText);\n results.mock = {\n latency: mockResult.latency,\n dimensions: mockResult.embedding.length\n };\n\n // Test transformers (if available)\n try {\n const transformersService = new TransformersEmbeddingService({\n model: 'Xenova/all-MiniLM-L6-v2'\n });\n const transformersResult = await transformersService.embed(testText);\n results.transformers = {\n latency: transformersResult.latency,\n dimensions: transformersResult.embedding.length\n };\n } catch (error: any) {\n results.transformers = {\n error: error.message\n };\n }\n\n // Test OpenAI (if API key available)\n const apiKey = process.env.OPENAI_API_KEY;\n if (apiKey) {\n try {\n const openaiService = new OpenAIEmbeddingService({\n apiKey,\n model: 'text-embedding-3-small'\n });\n const openaiResult = await openaiService.embed(testText);\n results.openai = {\n latency: openaiResult.latency,\n dimensions: openaiResult.embedding.length\n };\n } catch (error: any) {\n results.openai = {\n error: error.message\n };\n }\n }\n\n return results;\n}\n"]}
@@ -0,0 +1 @@
1
+ {"version":3,"file":"gnn-wrapper.d.ts","sourceRoot":"","sources":["../../src/core/gnn-wrapper.ts"],"names":[],"mappings":"AAAA;;;;;;;GAOG;AAGH,QAAA,MAAM,GAAG,EAAE,GAOP,CAAC;AAEL,MAAM,WAAW,YAAY;IAC3B,OAAO,EAAE,MAAM,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,CAAC;CACnB;AAED,MAAM,WAAW,iBAAiB;IAChC,SAAS,EAAE,MAAM,GAAG,MAAM,GAAG,KAAK,GAAG,KAAK,GAAG,QAAQ,CAAC;IACtD,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B,SAAS,CAAC,EAAE,MAAM,CAAC;CACpB;AAED;;;GAGG;AACH,wBAAgB,oBAAoB,CAClC,KAAK,EAAE,MAAM,EAAE,EACf,mBAAmB,EAAE,MAAM,EAAE,EAAE,EAC/B,CAAC,EAAE,MAAM,EACT,WAAW,GAAE,MAAY,GACxB,YAAY,CAiBd;AAED;;;GAGG;AACH,wBAAgB,mBAAmB,CACjC,KAAK,EAAE,MAAM,EAAE,EACf,OAAO,EAAE,MAAM,EAAE,GAAG,MAAM,EAAE,EAAE,EAC9B,QAAQ,EAAE,MAAM,EAChB,SAAS,EAAE,MAAM,GAChB,MAAM,EAAE,CAeV;AAmCD;;GAEG;AACH,qBAAa,aAAa;IACxB,OAAO,CAAC,QAAQ,CAAS;IACzB,OAAO,CAAC,SAAS,CAAS;IAC1B,OAAO,CAAC,OAAO,CAAa;IAC5B,OAAO,CAAC,UAAU,CAAuC;gBAGvD,QAAQ,EAAE,MAAM,EAChB,SAAS,EAAE,MAAM,EACjB,UAAU,GAAE,MAAM,GAAG,MAAM,GAAG,SAAS,GAAG,MAAe;IAY3D,OAAO,CAAC,KAAK,EAAE,MAAM,EAAE,GAAG,MAAM,EAAE;IAqBlC,OAAO,CAAC,eAAe;IAevB,UAAU,IAAI,MAAM,EAAE,EAAE;IAIxB,UAAU,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE,GAAG,IAAI;CAMtC;AAED;;GAEG;AACH,qBAAa,cAAc;IACzB,OAAO,CAAC,MAAM,CAAoB;gBAEtB,MAAM,EAAE,MAAM,GAAG,iBAAiB;IAQ9C,QAAQ,CAAC,MAAM,EAAE,MAAM,EAAE,GAAG,MAAM,EAAE;IAiCpC,UAAU,CAAC,UAAU,EAAE,MAAM,EAAE,GAAG,MAAM,EAAE;IAK1C,mBAAmB,IAAI,MAAM;CAgB9B;AAED;;;GAGG;AACH,wBAAgB,mBAAmB,CAAC,KAAK,EAAE,MAAM,GAAG,iBAAiB,CAgBpE;AAED;;GAEG;AACH,wBAAgB,cAAc,IAAI,OAAO,CASxC;AAED;;GAEG;AACH,wBAAgB,OAAO,IAAI,IAAI,CAI9B;AAGD,OAAO,EAAE,GAAG,IAAI,MAAM,EAAE,CAAC;AAGzB,YAAY,EAAE,YAAY,IAAI,eAAe,EAAE,CAAC"}