ruvector 0.2.21 → 0.2.23

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (123) hide show
  1. package/README.md +2 -2
  2. package/bin/cli.js +160 -0
  3. package/package.json +9 -5
  4. package/src/decompiler/api-prober.js +302 -0
  5. package/src/decompiler/model-decompiler.js +423 -0
  6. package/dist/analysis/complexity.d.ts +0 -52
  7. package/dist/analysis/complexity.d.ts.map +0 -1
  8. package/dist/analysis/complexity.js +0 -146
  9. package/dist/analysis/index.d.ts +0 -15
  10. package/dist/analysis/index.d.ts.map +0 -1
  11. package/dist/analysis/index.js +0 -38
  12. package/dist/analysis/patterns.d.ts +0 -71
  13. package/dist/analysis/patterns.d.ts.map +0 -1
  14. package/dist/analysis/patterns.js +0 -243
  15. package/dist/analysis/security.d.ts +0 -51
  16. package/dist/analysis/security.d.ts.map +0 -1
  17. package/dist/analysis/security.js +0 -139
  18. package/dist/core/adaptive-embedder.d.ts +0 -156
  19. package/dist/core/adaptive-embedder.d.ts.map +0 -1
  20. package/dist/core/adaptive-embedder.js +0 -838
  21. package/dist/core/agentdb-fast.d.ts +0 -149
  22. package/dist/core/agentdb-fast.d.ts.map +0 -1
  23. package/dist/core/agentdb-fast.js +0 -301
  24. package/dist/core/ast-parser.d.ts +0 -108
  25. package/dist/core/ast-parser.d.ts.map +0 -1
  26. package/dist/core/ast-parser.js +0 -602
  27. package/dist/core/attention-fallbacks.d.ts +0 -321
  28. package/dist/core/attention-fallbacks.d.ts.map +0 -1
  29. package/dist/core/attention-fallbacks.js +0 -552
  30. package/dist/core/cluster-wrapper.d.ts +0 -148
  31. package/dist/core/cluster-wrapper.d.ts.map +0 -1
  32. package/dist/core/cluster-wrapper.js +0 -271
  33. package/dist/core/coverage-router.d.ts +0 -88
  34. package/dist/core/coverage-router.d.ts.map +0 -1
  35. package/dist/core/coverage-router.js +0 -315
  36. package/dist/core/diff-embeddings.d.ts +0 -93
  37. package/dist/core/diff-embeddings.d.ts.map +0 -1
  38. package/dist/core/diff-embeddings.js +0 -334
  39. package/dist/core/gnn-wrapper.d.ts +0 -143
  40. package/dist/core/gnn-wrapper.d.ts.map +0 -1
  41. package/dist/core/gnn-wrapper.js +0 -213
  42. package/dist/core/graph-algorithms.d.ts +0 -83
  43. package/dist/core/graph-algorithms.d.ts.map +0 -1
  44. package/dist/core/graph-algorithms.js +0 -514
  45. package/dist/core/graph-wrapper.d.ts +0 -147
  46. package/dist/core/graph-wrapper.d.ts.map +0 -1
  47. package/dist/core/graph-wrapper.js +0 -299
  48. package/dist/core/index.d.ts +0 -48
  49. package/dist/core/index.d.ts.map +0 -1
  50. package/dist/core/index.js +0 -89
  51. package/dist/core/intelligence-engine.d.ts +0 -258
  52. package/dist/core/intelligence-engine.d.ts.map +0 -1
  53. package/dist/core/intelligence-engine.js +0 -1030
  54. package/dist/core/learning-engine.d.ts +0 -160
  55. package/dist/core/learning-engine.d.ts.map +0 -1
  56. package/dist/core/learning-engine.js +0 -589
  57. package/dist/core/neural-embeddings.d.ts +0 -393
  58. package/dist/core/neural-embeddings.d.ts.map +0 -1
  59. package/dist/core/neural-embeddings.js +0 -1091
  60. package/dist/core/neural-perf.d.ts +0 -331
  61. package/dist/core/neural-perf.d.ts.map +0 -1
  62. package/dist/core/neural-perf.js +0 -704
  63. package/dist/core/onnx/loader.js +0 -348
  64. package/dist/core/onnx/pkg/LICENSE +0 -21
  65. package/dist/core/onnx/pkg/loader.js +0 -348
  66. package/dist/core/onnx/pkg/package.json +0 -3
  67. package/dist/core/onnx/pkg/ruvector_onnx_embeddings_wasm.d.ts +0 -112
  68. package/dist/core/onnx/pkg/ruvector_onnx_embeddings_wasm.js +0 -5
  69. package/dist/core/onnx/pkg/ruvector_onnx_embeddings_wasm_bg.js +0 -638
  70. package/dist/core/onnx/pkg/ruvector_onnx_embeddings_wasm_bg.wasm +0 -0
  71. package/dist/core/onnx/pkg/ruvector_onnx_embeddings_wasm_bg.wasm.d.ts +0 -29
  72. package/dist/core/onnx/pkg/ruvector_onnx_embeddings_wasm_cjs.js +0 -127
  73. package/dist/core/onnx-embedder.d.ts +0 -105
  74. package/dist/core/onnx-embedder.d.ts.map +0 -1
  75. package/dist/core/onnx-embedder.js +0 -410
  76. package/dist/core/onnx-llm.d.ts +0 -206
  77. package/dist/core/onnx-llm.d.ts.map +0 -1
  78. package/dist/core/onnx-llm.js +0 -430
  79. package/dist/core/onnx-optimized.d.ts +0 -109
  80. package/dist/core/onnx-optimized.d.ts.map +0 -1
  81. package/dist/core/onnx-optimized.js +0 -419
  82. package/dist/core/parallel-intelligence.d.ts +0 -109
  83. package/dist/core/parallel-intelligence.d.ts.map +0 -1
  84. package/dist/core/parallel-intelligence.js +0 -340
  85. package/dist/core/parallel-workers.d.ts +0 -177
  86. package/dist/core/parallel-workers.d.ts.map +0 -1
  87. package/dist/core/parallel-workers.js +0 -671
  88. package/dist/core/router-wrapper.d.ts +0 -62
  89. package/dist/core/router-wrapper.d.ts.map +0 -1
  90. package/dist/core/router-wrapper.js +0 -209
  91. package/dist/core/rvf-wrapper.d.ts +0 -86
  92. package/dist/core/rvf-wrapper.d.ts.map +0 -1
  93. package/dist/core/rvf-wrapper.js +0 -102
  94. package/dist/core/sona-wrapper.d.ts +0 -226
  95. package/dist/core/sona-wrapper.d.ts.map +0 -1
  96. package/dist/core/sona-wrapper.js +0 -282
  97. package/dist/core/tensor-compress.d.ts +0 -134
  98. package/dist/core/tensor-compress.d.ts.map +0 -1
  99. package/dist/core/tensor-compress.js +0 -432
  100. package/dist/index.d.ts +0 -105
  101. package/dist/index.d.ts.map +0 -1
  102. package/dist/index.js +0 -221
  103. package/dist/services/embedding-service.d.ts +0 -136
  104. package/dist/services/embedding-service.d.ts.map +0 -1
  105. package/dist/services/embedding-service.js +0 -294
  106. package/dist/services/index.d.ts +0 -6
  107. package/dist/services/index.d.ts.map +0 -1
  108. package/dist/services/index.js +0 -26
  109. package/dist/types.d.ts +0 -145
  110. package/dist/types.d.ts.map +0 -1
  111. package/dist/types.js +0 -2
  112. package/dist/workers/benchmark.d.ts +0 -44
  113. package/dist/workers/benchmark.d.ts.map +0 -1
  114. package/dist/workers/benchmark.js +0 -230
  115. package/dist/workers/index.d.ts +0 -10
  116. package/dist/workers/index.d.ts.map +0 -1
  117. package/dist/workers/index.js +0 -25
  118. package/dist/workers/native-worker.d.ts +0 -76
  119. package/dist/workers/native-worker.d.ts.map +0 -1
  120. package/dist/workers/native-worker.js +0 -490
  121. package/dist/workers/types.d.ts +0 -69
  122. package/dist/workers/types.d.ts.map +0 -1
  123. package/dist/workers/types.js +0 -7
@@ -1,552 +0,0 @@
1
- "use strict";
2
- /**
3
- * Attention Fallbacks - Safe wrapper around @ruvector/attention with automatic array conversion
4
- *
5
- * This wrapper handles the array type conversion automatically, allowing users
6
- * to pass either regular arrays or Float32Arrays.
7
- *
8
- * @ruvector/attention requires Float32Array inputs.
9
- * This wrapper handles the conversion automatically.
10
- */
11
- Object.defineProperty(exports, "__esModule", { value: true });
12
- exports.AdamOptimizer = exports.DotProductAttention = exports.DualSpaceAttention = exports.EdgeFeaturedAttention = exports.GraphRoPeAttention = exports.MoEAttention = exports.LocalGlobalAttention = exports.LinearAttention = exports.HyperbolicAttention = exports.FlashAttention = exports.MultiHeadAttention = void 0;
13
- exports.projectToPoincareBall = projectToPoincareBall;
14
- exports.poincareDistance = poincareDistance;
15
- exports.mobiusAddition = mobiusAddition;
16
- exports.expMap = expMap;
17
- exports.logMap = logMap;
18
- exports.isAttentionAvailable = isAttentionAvailable;
19
- exports.getAttentionVersion = getAttentionVersion;
20
- exports.parallelAttentionCompute = parallelAttentionCompute;
21
- exports.batchAttentionCompute = batchAttentionCompute;
22
- exports.computeFlashAttentionAsync = computeFlashAttentionAsync;
23
- exports.computeHyperbolicAttentionAsync = computeHyperbolicAttentionAsync;
24
- exports.infoNceLoss = infoNceLoss;
25
- exports.mineHardNegatives = mineHardNegatives;
26
- exports.benchmarkAttention = benchmarkAttention;
27
- // Lazy load to avoid import errors if not installed
28
- let attentionModule = null;
29
- let loadError = null;
30
- function getAttentionModule() {
31
- if (attentionModule)
32
- return attentionModule;
33
- if (loadError)
34
- throw loadError;
35
- try {
36
- attentionModule = require('@ruvector/attention');
37
- return attentionModule;
38
- }
39
- catch (e) {
40
- loadError = new Error(`@ruvector/attention is not installed or failed to load: ${e.message}\n` +
41
- `Install with: npm install @ruvector/attention`);
42
- throw loadError;
43
- }
44
- }
45
- /**
46
- * Convert any array-like input to Float32Array
47
- */
48
- function toFloat32Array(input) {
49
- if (input instanceof Float32Array) {
50
- return input;
51
- }
52
- return new Float32Array(input);
53
- }
54
- /**
55
- * Convert nested arrays to Float32Arrays
56
- */
57
- function toFloat32Arrays(inputs) {
58
- return inputs.map(arr => toFloat32Array(arr));
59
- }
60
- /**
61
- * Convert Float32Array result back to regular array if needed
62
- */
63
- function fromFloat32Array(input) {
64
- return Array.from(input);
65
- }
66
- /**
67
- * Multi-head attention mechanism
68
- *
69
- * This wrapper automatically converts array inputs to Float32Array.
70
- */
71
- class MultiHeadAttention {
72
- /**
73
- * Create a new multi-head attention instance
74
- *
75
- * @param dim - Embedding dimension (must be divisible by numHeads)
76
- * @param numHeads - Number of attention heads
77
- */
78
- constructor(dim, numHeads) {
79
- const attention = getAttentionModule();
80
- this.inner = new attention.MultiHeadAttention(dim, numHeads);
81
- this.dim = dim;
82
- this.numHeads = numHeads;
83
- }
84
- /**
85
- * Compute multi-head attention
86
- *
87
- * @param query - Query vector
88
- * @param keys - Array of key vectors
89
- * @param values - Array of value vectors
90
- * @returns Attention output
91
- *
92
- * @example
93
- * ```typescript
94
- * const mha = new MultiHeadAttention(64, 4);
95
- *
96
- * // Works with regular arrays
97
- * const result1 = mha.compute([...64 values], [[...64], [...64]], [[...64], [...64]]);
98
- *
99
- * // Also works with Float32Array
100
- * const q = new Float32Array(64);
101
- * const k = [new Float32Array(64)];
102
- * const v = [new Float32Array(64)];
103
- * const result2 = mha.compute(q, k, v);
104
- * ```
105
- */
106
- compute(query, keys, values) {
107
- const raw = this.inner.compute(toFloat32Array(query), toFloat32Arrays(keys), toFloat32Arrays(values));
108
- return {
109
- values: fromFloat32Array(raw),
110
- raw
111
- };
112
- }
113
- /**
114
- * Compute and return raw Float32Array (faster, no conversion)
115
- */
116
- computeRaw(query, keys, values) {
117
- return this.inner.compute(query, keys, values);
118
- }
119
- get headDim() {
120
- return this.dim / this.numHeads;
121
- }
122
- }
123
- exports.MultiHeadAttention = MultiHeadAttention;
124
- /**
125
- * Flash attention with tiled computation
126
- */
127
- class FlashAttention {
128
- /**
129
- * Create a new flash attention instance
130
- *
131
- * @param dim - Embedding dimension
132
- * @param blockSize - Block size for tiled computation (default: 512)
133
- */
134
- constructor(dim, blockSize = 512) {
135
- const attention = getAttentionModule();
136
- this.inner = new attention.FlashAttention(dim, blockSize);
137
- this.dim = dim;
138
- this.blockSize = blockSize;
139
- }
140
- /**
141
- * Compute flash attention
142
- */
143
- compute(query, keys, values) {
144
- const raw = this.inner.compute(toFloat32Array(query), toFloat32Arrays(keys), toFloat32Arrays(values));
145
- return {
146
- values: fromFloat32Array(raw),
147
- raw
148
- };
149
- }
150
- computeRaw(query, keys, values) {
151
- return this.inner.compute(query, keys, values);
152
- }
153
- }
154
- exports.FlashAttention = FlashAttention;
155
- /**
156
- * Hyperbolic attention in Poincare ball model
157
- */
158
- class HyperbolicAttention {
159
- /**
160
- * Create a new hyperbolic attention instance
161
- *
162
- * @param dim - Embedding dimension
163
- * @param curvature - Hyperbolic curvature (typically 1.0)
164
- */
165
- constructor(dim, curvature = 1.0) {
166
- const attention = getAttentionModule();
167
- this.inner = new attention.HyperbolicAttention(dim, curvature);
168
- this.dim = dim;
169
- this.curvature = curvature;
170
- }
171
- /**
172
- * Compute hyperbolic attention
173
- */
174
- compute(query, keys, values) {
175
- const raw = this.inner.compute(toFloat32Array(query), toFloat32Arrays(keys), toFloat32Arrays(values));
176
- return {
177
- values: fromFloat32Array(raw),
178
- raw
179
- };
180
- }
181
- computeRaw(query, keys, values) {
182
- return this.inner.compute(query, keys, values);
183
- }
184
- }
185
- exports.HyperbolicAttention = HyperbolicAttention;
186
- /**
187
- * Linear attention (Performer-style) with O(n) complexity
188
- */
189
- class LinearAttention {
190
- /**
191
- * Create a new linear attention instance
192
- *
193
- * @param dim - Embedding dimension
194
- * @param numFeatures - Number of random features
195
- */
196
- constructor(dim, numFeatures) {
197
- const attention = getAttentionModule();
198
- this.inner = new attention.LinearAttention(dim, numFeatures);
199
- this.dim = dim;
200
- this.numFeatures = numFeatures;
201
- }
202
- /**
203
- * Compute linear attention
204
- */
205
- compute(query, keys, values) {
206
- const raw = this.inner.compute(toFloat32Array(query), toFloat32Arrays(keys), toFloat32Arrays(values));
207
- return {
208
- values: fromFloat32Array(raw),
209
- raw
210
- };
211
- }
212
- computeRaw(query, keys, values) {
213
- return this.inner.compute(query, keys, values);
214
- }
215
- }
216
- exports.LinearAttention = LinearAttention;
217
- /**
218
- * Local-global attention (Longformer-style)
219
- */
220
- class LocalGlobalAttention {
221
- /**
222
- * Create a new local-global attention instance
223
- *
224
- * @param dim - Embedding dimension
225
- * @param localWindow - Size of local attention window
226
- * @param globalTokens - Number of global attention tokens
227
- */
228
- constructor(dim, localWindow, globalTokens) {
229
- const attention = getAttentionModule();
230
- this.inner = new attention.LocalGlobalAttention(dim, localWindow, globalTokens);
231
- this.dim = dim;
232
- this.localWindow = localWindow;
233
- this.globalTokens = globalTokens;
234
- }
235
- /**
236
- * Compute local-global attention
237
- */
238
- compute(query, keys, values) {
239
- const raw = this.inner.compute(toFloat32Array(query), toFloat32Arrays(keys), toFloat32Arrays(values));
240
- return {
241
- values: fromFloat32Array(raw),
242
- raw
243
- };
244
- }
245
- computeRaw(query, keys, values) {
246
- return this.inner.compute(query, keys, values);
247
- }
248
- }
249
- exports.LocalGlobalAttention = LocalGlobalAttention;
250
- /**
251
- * Mixture of Experts attention
252
- */
253
- class MoEAttention {
254
- /**
255
- * Create a new MoE attention instance
256
- *
257
- * @param config - MoE configuration
258
- */
259
- constructor(config) {
260
- const attention = getAttentionModule();
261
- this.inner = new attention.MoEAttention({
262
- dim: config.dim,
263
- num_experts: config.numExperts,
264
- top_k: config.topK,
265
- expert_capacity: config.expertCapacity ?? 1.25,
266
- });
267
- this.config = config;
268
- }
269
- /**
270
- * Create with simple parameters
271
- */
272
- static simple(dim, numExperts, topK) {
273
- return new MoEAttention({ dim, numExperts, topK });
274
- }
275
- /**
276
- * Compute MoE attention
277
- */
278
- compute(query, keys, values) {
279
- const raw = this.inner.compute(toFloat32Array(query), toFloat32Arrays(keys), toFloat32Arrays(values));
280
- return {
281
- values: fromFloat32Array(raw),
282
- raw
283
- };
284
- }
285
- computeRaw(query, keys, values) {
286
- return this.inner.compute(query, keys, values);
287
- }
288
- }
289
- exports.MoEAttention = MoEAttention;
290
- // Hyperbolic math utilities
291
- /**
292
- * Project a vector into the Poincare ball
293
- */
294
- function projectToPoincareBall(vector, curvature = 1.0) {
295
- const attention = getAttentionModule();
296
- const result = attention.projectToPoincareBall(toFloat32Array(vector), curvature);
297
- return fromFloat32Array(result);
298
- }
299
- /**
300
- * Compute hyperbolic (Poincare) distance between two points
301
- */
302
- function poincareDistance(a, b, curvature = 1.0) {
303
- const attention = getAttentionModule();
304
- return attention.poincareDistance(toFloat32Array(a), toFloat32Array(b), curvature);
305
- }
306
- /**
307
- * Mobius addition in hyperbolic space
308
- */
309
- function mobiusAddition(a, b, curvature = 1.0) {
310
- const attention = getAttentionModule();
311
- const result = attention.mobiusAddition(toFloat32Array(a), toFloat32Array(b), curvature);
312
- return fromFloat32Array(result);
313
- }
314
- /**
315
- * Exponential map from tangent space to hyperbolic space
316
- */
317
- function expMap(base, tangent, curvature = 1.0) {
318
- const attention = getAttentionModule();
319
- const result = attention.expMap(toFloat32Array(base), toFloat32Array(tangent), curvature);
320
- return fromFloat32Array(result);
321
- }
322
- /**
323
- * Logarithmic map from hyperbolic space to tangent space
324
- */
325
- function logMap(base, point, curvature = 1.0) {
326
- const attention = getAttentionModule();
327
- const result = attention.logMap(toFloat32Array(base), toFloat32Array(point), curvature);
328
- return fromFloat32Array(result);
329
- }
330
- /**
331
- * Check if attention module is available
332
- */
333
- function isAttentionAvailable() {
334
- try {
335
- getAttentionModule();
336
- return true;
337
- }
338
- catch {
339
- return false;
340
- }
341
- }
342
- /**
343
- * Get attention module version
344
- */
345
- function getAttentionVersion() {
346
- try {
347
- const attention = getAttentionModule();
348
- return attention.version?.() ?? null;
349
- }
350
- catch {
351
- return null;
352
- }
353
- }
354
- // ============================================================================
355
- // Graph-based Attention (for code structure)
356
- // ============================================================================
357
- /**
358
- * Graph attention with Rotary Position Embeddings
359
- * Excellent for code AST and dependency graphs
360
- */
361
- class GraphRoPeAttention {
362
- constructor(dim, numHeads = 4, maxSeqLen = 4096) {
363
- const attention = getAttentionModule();
364
- this.inner = new attention.GraphRoPeAttention(dim, numHeads, maxSeqLen);
365
- this.dim = dim;
366
- this.numHeads = numHeads;
367
- this.maxSeqLen = maxSeqLen;
368
- }
369
- compute(query, keys, values, positions) {
370
- const raw = this.inner.compute(toFloat32Array(query), toFloat32Arrays(keys), toFloat32Arrays(values), positions ? new Int32Array(positions) : undefined);
371
- return { values: fromFloat32Array(raw), raw };
372
- }
373
- }
374
- exports.GraphRoPeAttention = GraphRoPeAttention;
375
- /**
376
- * Edge-featured attention for graphs with edge attributes
377
- * Useful for weighted dependency graphs
378
- */
379
- class EdgeFeaturedAttention {
380
- constructor(dim, edgeDim = 16) {
381
- const attention = getAttentionModule();
382
- this.inner = new attention.EdgeFeaturedAttention(dim, edgeDim);
383
- this.dim = dim;
384
- this.edgeDim = edgeDim;
385
- }
386
- compute(query, keys, values, edgeFeatures) {
387
- const raw = this.inner.compute(toFloat32Array(query), toFloat32Arrays(keys), toFloat32Arrays(values), edgeFeatures ? toFloat32Arrays(edgeFeatures) : undefined);
388
- return { values: fromFloat32Array(raw), raw };
389
- }
390
- }
391
- exports.EdgeFeaturedAttention = EdgeFeaturedAttention;
392
- /**
393
- * Dual-space attention (Euclidean + Hyperbolic)
394
- * Best of both worlds for hierarchical + semantic similarity
395
- */
396
- class DualSpaceAttention {
397
- constructor(dim, curvature = 1.0, alpha = 0.5) {
398
- const attention = getAttentionModule();
399
- this.inner = new attention.DualSpaceAttention(dim, curvature, alpha);
400
- this.dim = dim;
401
- this.curvature = curvature;
402
- this.alpha = alpha;
403
- }
404
- compute(query, keys, values) {
405
- const raw = this.inner.compute(toFloat32Array(query), toFloat32Arrays(keys), toFloat32Arrays(values));
406
- return { values: fromFloat32Array(raw), raw };
407
- }
408
- }
409
- exports.DualSpaceAttention = DualSpaceAttention;
410
- /**
411
- * Basic dot-product attention
412
- */
413
- class DotProductAttention {
414
- constructor(dim) {
415
- const attention = getAttentionModule();
416
- this.inner = new attention.DotProductAttention(dim);
417
- this.dim = dim;
418
- }
419
- compute(query, keys, values) {
420
- const raw = this.inner.compute(toFloat32Array(query), toFloat32Arrays(keys), toFloat32Arrays(values));
421
- return { values: fromFloat32Array(raw), raw };
422
- }
423
- }
424
- exports.DotProductAttention = DotProductAttention;
425
- // ============================================================================
426
- // Parallel/Batch Attention Compute
427
- // ============================================================================
428
- /**
429
- * Compute attention in parallel across multiple queries
430
- */
431
- async function parallelAttentionCompute(queries, keys, values, attentionType = 'multi-head') {
432
- const attention = getAttentionModule();
433
- const results = await attention.parallelAttentionCompute(toFloat32Arrays(queries), toFloat32Arrays(keys), toFloat32Arrays(values), attentionType);
434
- return results.map((r) => fromFloat32Array(r));
435
- }
436
- /**
437
- * Batch attention compute for multiple query-key-value sets
438
- */
439
- async function batchAttentionCompute(batches, attentionType = 'multi-head') {
440
- const attention = getAttentionModule();
441
- const nativeBatches = batches.map(b => ({
442
- query: toFloat32Array(b.query),
443
- keys: toFloat32Arrays(b.keys),
444
- values: toFloat32Arrays(b.values),
445
- }));
446
- const results = await attention.batchAttentionCompute(nativeBatches, attentionType);
447
- return results.map((r) => fromFloat32Array(r));
448
- }
449
- /**
450
- * Async flash attention with callback
451
- */
452
- function computeFlashAttentionAsync(query, keys, values) {
453
- const attention = getAttentionModule();
454
- return new Promise((resolve, reject) => {
455
- attention.computeFlashAttentionAsync(toFloat32Array(query), toFloat32Arrays(keys), toFloat32Arrays(values), (err, result) => {
456
- if (err)
457
- reject(err);
458
- else
459
- resolve(fromFloat32Array(result));
460
- });
461
- });
462
- }
463
- /**
464
- * Async hyperbolic attention
465
- */
466
- function computeHyperbolicAttentionAsync(query, keys, values, curvature = 1.0) {
467
- const attention = getAttentionModule();
468
- return new Promise((resolve, reject) => {
469
- attention.computeHyperbolicAttentionAsync(toFloat32Array(query), toFloat32Arrays(keys), toFloat32Arrays(values), curvature, (err, result) => {
470
- if (err)
471
- reject(err);
472
- else
473
- resolve(fromFloat32Array(result));
474
- });
475
- });
476
- }
477
- // ============================================================================
478
- // Training Utilities (for SONA integration)
479
- // ============================================================================
480
- /**
481
- * Adam optimizer for attention training
482
- */
483
- class AdamOptimizer {
484
- constructor(learningRate = 0.001, beta1 = 0.9, beta2 = 0.999) {
485
- const attention = getAttentionModule();
486
- this.inner = new attention.AdamOptimizer(learningRate, beta1, beta2);
487
- }
488
- step(gradients, params) {
489
- const result = this.inner.step(toFloat32Array(gradients), toFloat32Array(params));
490
- return fromFloat32Array(result);
491
- }
492
- }
493
- exports.AdamOptimizer = AdamOptimizer;
494
- /**
495
- * InfoNCE contrastive loss
496
- */
497
- function infoNceLoss(anchor, positive, negatives, temperature = 0.07) {
498
- const attention = getAttentionModule();
499
- return attention.InfoNceLoss.compute(toFloat32Array(anchor), toFloat32Array(positive), toFloat32Arrays(negatives), temperature);
500
- }
501
- /**
502
- * Hard negative mining for contrastive learning
503
- */
504
- function mineHardNegatives(anchor, candidates, topK = 5) {
505
- const attention = getAttentionModule();
506
- const miner = new attention.HardNegativeMiner(topK);
507
- const results = miner.mine(toFloat32Array(anchor), toFloat32Arrays(candidates));
508
- return results.map((r) => fromFloat32Array(r));
509
- }
510
- // ============================================================================
511
- // Benchmarking
512
- // ============================================================================
513
- /**
514
- * Benchmark attention implementations
515
- */
516
- async function benchmarkAttention(dim, seqLen, iterations = 100) {
517
- const attention = getAttentionModule();
518
- return attention.benchmarkAttention(dim, seqLen, iterations);
519
- }
520
- exports.default = {
521
- // Core attention types
522
- DotProductAttention,
523
- MultiHeadAttention,
524
- FlashAttention,
525
- HyperbolicAttention,
526
- LinearAttention,
527
- LocalGlobalAttention,
528
- MoEAttention,
529
- // Graph attention types
530
- GraphRoPeAttention,
531
- EdgeFeaturedAttention,
532
- DualSpaceAttention,
533
- // Parallel/batch compute
534
- parallelAttentionCompute,
535
- batchAttentionCompute,
536
- computeFlashAttentionAsync,
537
- computeHyperbolicAttentionAsync,
538
- // Training utilities
539
- AdamOptimizer,
540
- infoNceLoss,
541
- mineHardNegatives,
542
- // Hyperbolic math
543
- projectToPoincareBall,
544
- poincareDistance,
545
- mobiusAddition,
546
- expMap,
547
- logMap,
548
- // Utilities
549
- isAttentionAvailable,
550
- getAttentionVersion,
551
- benchmarkAttention,
552
- };
@@ -1,148 +0,0 @@
1
- /**
2
- * Cluster Wrapper - Distributed coordination for multi-agent systems
3
- *
4
- * Wraps @ruvector/cluster for Raft consensus, auto-sharding,
5
- * and distributed memory across agents.
6
- */
7
- export declare function isClusterAvailable(): boolean;
8
- export interface ClusterNode {
9
- id: string;
10
- address: string;
11
- role: 'leader' | 'follower' | 'candidate';
12
- status: 'healthy' | 'unhealthy' | 'unknown';
13
- lastHeartbeat: number;
14
- }
15
- export interface ShardInfo {
16
- id: number;
17
- range: [number, number];
18
- node: string;
19
- size: number;
20
- status: 'active' | 'migrating' | 'offline';
21
- }
22
- export interface ClusterConfig {
23
- nodeId: string;
24
- address: string;
25
- peers?: string[];
26
- shards?: number;
27
- replicationFactor?: number;
28
- }
29
- /**
30
- * Distributed cluster for multi-agent coordination
31
- */
32
- export declare class RuvectorCluster {
33
- private inner;
34
- private nodeId;
35
- private isLeader;
36
- constructor(config: ClusterConfig);
37
- /**
38
- * Start the cluster node
39
- */
40
- start(): Promise<void>;
41
- /**
42
- * Stop the cluster node gracefully
43
- */
44
- stop(): Promise<void>;
45
- /**
46
- * Join an existing cluster
47
- */
48
- join(peerAddress: string): Promise<boolean>;
49
- /**
50
- * Leave the cluster
51
- */
52
- leave(): Promise<void>;
53
- /**
54
- * Get current node info
55
- */
56
- getNodeInfo(): ClusterNode;
57
- /**
58
- * Get all cluster nodes
59
- */
60
- getNodes(): ClusterNode[];
61
- /**
62
- * Check if this node is the leader
63
- */
64
- isClusterLeader(): boolean;
65
- /**
66
- * Get the current leader
67
- */
68
- getLeader(): ClusterNode | null;
69
- /**
70
- * Put a value in distributed storage
71
- */
72
- put(key: string, value: any): Promise<boolean>;
73
- /**
74
- * Get a value from distributed storage
75
- */
76
- get(key: string): Promise<any | null>;
77
- /**
78
- * Delete a value from distributed storage
79
- */
80
- delete(key: string): Promise<boolean>;
81
- /**
82
- * Atomic compare-and-swap
83
- */
84
- compareAndSwap(key: string, expected: any, newValue: any): Promise<boolean>;
85
- /**
86
- * Get shard information
87
- */
88
- getShards(): ShardInfo[];
89
- /**
90
- * Get the shard for a key
91
- */
92
- getShardForKey(key: string): ShardInfo;
93
- /**
94
- * Trigger shard rebalancing
95
- */
96
- rebalance(): Promise<void>;
97
- /**
98
- * Acquire a distributed lock
99
- */
100
- lock(name: string, timeout?: number): Promise<string | null>;
101
- /**
102
- * Release a distributed lock
103
- */
104
- unlock(name: string, token: string): Promise<boolean>;
105
- /**
106
- * Extend a lock's TTL
107
- */
108
- extendLock(name: string, token: string, extension?: number): Promise<boolean>;
109
- /**
110
- * Subscribe to a channel
111
- */
112
- subscribe(channel: string, callback: (message: any) => void): () => void;
113
- /**
114
- * Publish to a channel
115
- */
116
- publish(channel: string, message: any): Promise<number>;
117
- /**
118
- * Register an agent with the cluster
119
- */
120
- registerAgent(agentId: string, capabilities: string[]): Promise<boolean>;
121
- /**
122
- * Find agents with a capability
123
- */
124
- findAgents(capability: string): Promise<string[]>;
125
- /**
126
- * Assign a task to an agent
127
- */
128
- assignTask(taskId: string, agentId: string, task: any): Promise<boolean>;
129
- /**
130
- * Complete a task
131
- */
132
- completeTask(taskId: string, result: any): Promise<boolean>;
133
- /**
134
- * Get cluster statistics
135
- */
136
- stats(): {
137
- nodes: number;
138
- shards: number;
139
- leader: string | null;
140
- healthy: boolean;
141
- };
142
- }
143
- /**
144
- * Create a cluster node for agent coordination
145
- */
146
- export declare function createCluster(config: ClusterConfig): RuvectorCluster;
147
- export default RuvectorCluster;
148
- //# sourceMappingURL=cluster-wrapper.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"cluster-wrapper.d.ts","sourceRoot":"","sources":["../../src/core/cluster-wrapper.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAqBH,wBAAgB,kBAAkB,IAAI,OAAO,CAO5C;AAED,MAAM,WAAW,WAAW;IAC1B,EAAE,EAAE,MAAM,CAAC;IACX,OAAO,EAAE,MAAM,CAAC;IAChB,IAAI,EAAE,QAAQ,GAAG,UAAU,GAAG,WAAW,CAAC;IAC1C,MAAM,EAAE,SAAS,GAAG,WAAW,GAAG,SAAS,CAAC;IAC5C,aAAa,EAAE,MAAM,CAAC;CACvB;AAED,MAAM,WAAW,SAAS;IACxB,EAAE,EAAE,MAAM,CAAC;IACX,KAAK,EAAE,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACxB,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC;IACb,MAAM,EAAE,QAAQ,GAAG,WAAW,GAAG,SAAS,CAAC;CAC5C;AAED,MAAM,WAAW,aAAa;IAC5B,MAAM,EAAE,MAAM,CAAC;IACf,OAAO,EAAE,MAAM,CAAC;IAChB,KAAK,CAAC,EAAE,MAAM,EAAE,CAAC;IACjB,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,iBAAiB,CAAC,EAAE,MAAM,CAAC;CAC5B;AAED;;GAEG;AACH,qBAAa,eAAe;IAC1B,OAAO,CAAC,KAAK,CAAM;IACnB,OAAO,CAAC,MAAM,CAAS;IACvB,OAAO,CAAC,QAAQ,CAAkB;gBAEtB,MAAM,EAAE,aAAa;IAgBjC;;OAEG;IACG,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAI5B;;OAEG;IACG,IAAI,IAAI,OAAO,CAAC,IAAI,CAAC;IAI3B;;OAEG;IACG,IAAI,CAAC,WAAW,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;IAIjD;;OAEG;IACG,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAQ5B;;OAEG;IACH,WAAW,IAAI,WAAW;IAI1B;;OAEG;IACH,QAAQ,IAAI,WAAW,EAAE;IAIzB;;OAEG;IACH,eAAe,IAAI,OAAO;IAK1B;;OAEG;IACH,SAAS,IAAI,WAAW,GAAG,IAAI;IAQ/B;;OAEG;IACG,GAAG,CAAC,GAAG,EAAE,MAAM,EAAE,KAAK,EAAE,GAAG,GAAG,OAAO,CAAC,OAAO,CAAC;IAIpD;;OAEG;IACG,GAAG,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,GAAG,GAAG,IAAI,CAAC;IAK3C;;OAEG;IACG,MAAM,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;IAI3C;;OAEG;IACG,cAAc,CAAC,GAAG,EAAE,MAAM,EAAE,QAAQ,EAAE,GAAG,EAAE,QAAQ,EAAE,GAAG,GAAG,OAAO,CAAC,OAAO,CAAC;IAYjF;;OAEG;IACH,SAAS,IAAI,SAAS,EAAE;IAIxB;;OAEG;IACH,cAAc,CAAC,GAAG,EAAE,MAAM,GAAG,SAAS;IAItC;;OAEG;IACG,SAAS,IAAI,OAAO,CAAC,IAAI,CAAC;IAQhC;;OAEG;IACG,IAAI,CAAC,IAAI,EAAE,MAAM,EAAE,OAAO,GAAE,MAAc,GAAG,OAAO,CAAC,MAAM,GAAG,IAAI,CAAC;IAIzE;;OAEG;IACG,MAAM,CAAC,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;IAI3D;;OAEG;IACG,UAAU,CAAC,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,SAAS,GAAE,MAAc,GAAG,OAAO,CAAC,OAAO,CAAC;IAQ1F;;OAEG;IACH,SAAS,CAAC,OAAO,EAAE,MAAM,EAAE,QAAQ,EAAE,CAAC,OAAO,EAAE,GAAG,KAAK,IAAI,GAAG,MAAM,IAAI;IAMxE;;OAEG;IACG,OAAO,CAAC,OAAO,EAAE,MAAM,EAAE,OAAO,EAAE,GAAG,GAAG,OAAO,CAAC,MAAM,CAAC;IAQ7D;;OAEG;IACG,aAAa,CAAC,OAAO,EAAE,MAAM,EAAE,YAAY,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,OAAO,CAAC;IAS9E;;OAEG;IACG,UAAU,CAAC,UAAU,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC;IAcvD;;OAEG;IACG,UAAU,CAAC,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,IAAI,EAAE,GAAG,GAAG,OAAO,CAAC,OAAO,CAAC;IAgB9E;;OAEG;IACG,YAAY,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,GAAG,GAAG,OAAO,CAAC,OAAO,CAAC;IAgBjE;;OAEG;IACH,KAAK,IAAI;QACP,KAAK,EAAE,MAAM,CAAC;QACd,MAAM,EAAE,MAAM,CAAC;QACf,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC;QACtB,OAAO,EAAE,OAAO,CAAC;KAClB;CAGF;AAED;;GAEG;AACH,wBAAgB,aAAa,CAAC,MAAM,EAAE,aAAa,GAAG,eAAe,CAEpE;AAED,eAAe,eAAe,CAAC"}