ruvector 0.1.62 → 0.1.64

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -202,13 +202,112 @@ export declare function isAttentionAvailable(): boolean;
202
202
  * Get attention module version
203
203
  */
204
204
  export declare function getAttentionVersion(): string | null;
205
+ /**
206
+ * Graph attention with Rotary Position Embeddings
207
+ * Excellent for code AST and dependency graphs
208
+ */
209
+ export declare class GraphRoPeAttention {
210
+ private inner;
211
+ readonly dim: number;
212
+ readonly numHeads: number;
213
+ readonly maxSeqLen: number;
214
+ constructor(dim: number, numHeads?: number, maxSeqLen?: number);
215
+ compute(query: number[] | Float32Array, keys: (number[] | Float32Array)[], values: (number[] | Float32Array)[], positions?: number[]): AttentionOutput;
216
+ }
217
+ /**
218
+ * Edge-featured attention for graphs with edge attributes
219
+ * Useful for weighted dependency graphs
220
+ */
221
+ export declare class EdgeFeaturedAttention {
222
+ private inner;
223
+ readonly dim: number;
224
+ readonly edgeDim: number;
225
+ constructor(dim: number, edgeDim?: number);
226
+ compute(query: number[] | Float32Array, keys: (number[] | Float32Array)[], values: (number[] | Float32Array)[], edgeFeatures?: (number[] | Float32Array)[]): AttentionOutput;
227
+ }
228
+ /**
229
+ * Dual-space attention (Euclidean + Hyperbolic)
230
+ * Best of both worlds for hierarchical + semantic similarity
231
+ */
232
+ export declare class DualSpaceAttention {
233
+ private inner;
234
+ readonly dim: number;
235
+ readonly curvature: number;
236
+ readonly alpha: number;
237
+ constructor(dim: number, curvature?: number, alpha?: number);
238
+ compute(query: number[] | Float32Array, keys: (number[] | Float32Array)[], values: (number[] | Float32Array)[]): AttentionOutput;
239
+ }
240
+ /**
241
+ * Basic dot-product attention
242
+ */
243
+ export declare class DotProductAttention {
244
+ private inner;
245
+ readonly dim: number;
246
+ constructor(dim: number);
247
+ compute(query: number[] | Float32Array, keys: (number[] | Float32Array)[], values: (number[] | Float32Array)[]): AttentionOutput;
248
+ }
249
+ /**
250
+ * Compute attention in parallel across multiple queries
251
+ */
252
+ export declare function parallelAttentionCompute(queries: (number[] | Float32Array)[], keys: (number[] | Float32Array)[], values: (number[] | Float32Array)[], attentionType?: 'dot' | 'multi-head' | 'flash' | 'hyperbolic' | 'linear'): Promise<number[][]>;
253
+ /**
254
+ * Batch attention compute for multiple query-key-value sets
255
+ */
256
+ export declare function batchAttentionCompute(batches: Array<{
257
+ query: number[] | Float32Array;
258
+ keys: (number[] | Float32Array)[];
259
+ values: (number[] | Float32Array)[];
260
+ }>, attentionType?: 'dot' | 'multi-head' | 'flash' | 'hyperbolic' | 'linear'): Promise<number[][]>;
261
+ /**
262
+ * Async flash attention with callback
263
+ */
264
+ export declare function computeFlashAttentionAsync(query: number[] | Float32Array, keys: (number[] | Float32Array)[], values: (number[] | Float32Array)[]): Promise<number[]>;
265
+ /**
266
+ * Async hyperbolic attention
267
+ */
268
+ export declare function computeHyperbolicAttentionAsync(query: number[] | Float32Array, keys: (number[] | Float32Array)[], values: (number[] | Float32Array)[], curvature?: number): Promise<number[]>;
269
+ /**
270
+ * Adam optimizer for attention training
271
+ */
272
+ export declare class AdamOptimizer {
273
+ private inner;
274
+ constructor(learningRate?: number, beta1?: number, beta2?: number);
275
+ step(gradients: number[] | Float32Array, params: number[] | Float32Array): number[];
276
+ }
277
+ /**
278
+ * InfoNCE contrastive loss
279
+ */
280
+ export declare function infoNceLoss(anchor: number[] | Float32Array, positive: number[] | Float32Array, negatives: (number[] | Float32Array)[], temperature?: number): number;
281
+ /**
282
+ * Hard negative mining for contrastive learning
283
+ */
284
+ export declare function mineHardNegatives(anchor: number[] | Float32Array, candidates: (number[] | Float32Array)[], topK?: number): number[][];
285
+ /**
286
+ * Benchmark attention implementations
287
+ */
288
+ export declare function benchmarkAttention(dim: number, seqLen: number, iterations?: number): Promise<Record<string, {
289
+ avgMs: number;
290
+ minMs: number;
291
+ maxMs: number;
292
+ }>>;
205
293
  declare const _default: {
294
+ DotProductAttention: typeof DotProductAttention;
206
295
  MultiHeadAttention: typeof MultiHeadAttention;
207
296
  FlashAttention: typeof FlashAttention;
208
297
  HyperbolicAttention: typeof HyperbolicAttention;
209
298
  LinearAttention: typeof LinearAttention;
210
299
  LocalGlobalAttention: typeof LocalGlobalAttention;
211
300
  MoEAttention: typeof MoEAttention;
301
+ GraphRoPeAttention: typeof GraphRoPeAttention;
302
+ EdgeFeaturedAttention: typeof EdgeFeaturedAttention;
303
+ DualSpaceAttention: typeof DualSpaceAttention;
304
+ parallelAttentionCompute: typeof parallelAttentionCompute;
305
+ batchAttentionCompute: typeof batchAttentionCompute;
306
+ computeFlashAttentionAsync: typeof computeFlashAttentionAsync;
307
+ computeHyperbolicAttentionAsync: typeof computeHyperbolicAttentionAsync;
308
+ AdamOptimizer: typeof AdamOptimizer;
309
+ infoNceLoss: typeof infoNceLoss;
310
+ mineHardNegatives: typeof mineHardNegatives;
212
311
  projectToPoincareBall: typeof projectToPoincareBall;
213
312
  poincareDistance: typeof poincareDistance;
214
313
  mobiusAddition: typeof mobiusAddition;
@@ -216,6 +315,7 @@ declare const _default: {
216
315
  logMap: typeof logMap;
217
316
  isAttentionAvailable: typeof isAttentionAvailable;
218
317
  getAttentionVersion: typeof getAttentionVersion;
318
+ benchmarkAttention: typeof benchmarkAttention;
219
319
  };
220
320
  export default _default;
221
321
  //# sourceMappingURL=attention-fallbacks.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"attention-fallbacks.d.ts","sourceRoot":"","sources":["../../src/core/attention-fallbacks.ts"],"names":[],"mappings":"AAAA;;;;;;;;GAQG;AA8CH;;GAEG;AACH,MAAM,WAAW,eAAe;IAC9B,qCAAqC;IACrC,MAAM,EAAE,MAAM,EAAE,CAAC;IACjB,2DAA2D;IAC3D,GAAG,EAAE,YAAY,CAAC;CACnB;AAED;;;;GAIG;AACH,qBAAa,kBAAkB;IAC7B,OAAO,CAAC,KAAK,CAAM;IACnB,SAAgB,GAAG,EAAE,MAAM,CAAC;IAC5B,SAAgB,QAAQ,EAAE,MAAM,CAAC;IAEjC;;;;;OAKG;gBACS,GAAG,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM;IAOzC;;;;;;;;;;;;;;;;;;;;;OAqBG;IACH,OAAO,CACL,KAAK,EAAE,MAAM,EAAE,GAAG,YAAY,EAC9B,IAAI,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,EACjC,MAAM,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,GAClC,eAAe;IAYlB;;OAEG;IACH,UAAU,CACR,KAAK,EAAE,YAAY,EACnB,IAAI,EAAE,YAAY,EAAE,EACpB,MAAM,EAAE,YAAY,EAAE,GACrB,YAAY;IAIf,IAAI,OAAO,IAAI,MAAM,CAEpB;CACF;AAED;;GAEG;AACH,qBAAa,cAAc;IACzB,OAAO,CAAC,KAAK,CAAM;IACnB,SAAgB,GAAG,EAAE,MAAM,CAAC;IAC5B,SAAgB,SAAS,EAAE,MAAM,CAAC;IAElC;;;;;OAKG;gBACS,GAAG,EAAE,MAAM,EAAE,SAAS,GAAE,MAAY;IAOhD;;OAEG;IACH,OAAO,CACL,KAAK,EAAE,MAAM,EAAE,GAAG,YAAY,EAC9B,IAAI,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,EACjC,MAAM,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,GAClC,eAAe;IAYlB,UAAU,CACR,KAAK,EAAE,YAAY,EACnB,IAAI,EAAE,YAAY,EAAE,EACpB,MAAM,EAAE,YAAY,EAAE,GACrB,YAAY;CAGhB;AAED;;GAEG;AACH,qBAAa,mBAAmB;IAC9B,OAAO,CAAC,KAAK,CAAM;IACnB,SAAgB,GAAG,EAAE,MAAM,CAAC;IAC5B,SAAgB,SAAS,EAAE,MAAM,CAAC;IAElC;;;;;OAKG;gBACS,GAAG,EAAE,MAAM,EAAE,SAAS,GAAE,MAAY;IAOhD;;OAEG;IACH,OAAO,CACL,KAAK,EAAE,MAAM,EAAE,GAAG,YAAY,EAC9B,IAAI,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,EACjC,MAAM,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,GAClC,eAAe;IAYlB,UAAU,CACR,KAAK,EAAE,YAAY,EACnB,IAAI,EAAE,YAAY,EAAE,EACpB,MAAM,EAAE,YAAY,EAAE,GACrB,YAAY;CAGhB;AAED;;GAEG;AACH,qBAAa,eAAe;IAC1B,OAAO,CAAC,KAAK,CAAM;IACnB,SAAgB,GAAG,EAAE,MAAM,CAAC;IAC5B,SAAgB,WAAW,EAAE,MAAM,CAAC;IAEpC;;;;;OAKG;gBACS,GAAG,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM;IAO5C;;OAEG;IACH,OAAO,CACL,KAAK,EAAE,MAAM,EAAE,GAAG,YAAY,EAC9B,IAAI,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,EACjC,MAAM,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,GAClC,eAAe;IAYlB,UAAU,CACR,KAAK,EAAE,YAAY,EACnB,IAAI,EAAE,YAAY,EAAE,EACpB,MAAM,EAAE,YAAY,EAAE,GACrB,YAAY;CAGhB;AAED;;GAEG;AACH,qBAAa,oBAAoB;IAC/B,OAAO,CAAC,KAAK,CAAM;IACnB,SAAgB,GAAG,EAAE,MAAM,CAAC;IAC5B,SAAgB,WAAW,EAAE,MAAM,CAAC;IACpC,SAAgB,YAAY,EAAE,MAAM,CAAC;IAErC;;;;;;OAMG;gBACS,GAAG,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,EAAE,YAAY,EAAE,MAAM;IAQlE;;OAEG;IACH,OAAO,CACL,KAAK,EAAE,MAAM,EAAE,GAAG,YAAY,EAC9B,IAAI,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,EACjC,MAAM,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,GAClC,eAAe;IAYlB,UAAU,CACR,KAAK,EAAE,YAAY,EACnB,IAAI,EAAE,YAAY,EAAE,EACpB,MAAM,EAAE,YAAY,EAAE,GACrB,YAAY;CAGhB;AAED;;GAEG;AACH,MAAM,WAAW,SAAS;IACxB,GAAG,EAAE,MAAM,CAAC;IACZ,UAAU,EAAE,MAAM,CAAC;IACnB,IAAI,EAAE,MAAM,CAAC;IACb,cAAc,CAAC,EAAE,MAAM,CAAC;CACzB;AAED;;GAEG;AACH,qBAAa,YAAY;IACvB,OAAO,CAAC,KAAK,CAAM;IACnB,SAAgB,MAAM,EAAE,SAAS,CAAC;IAElC;;;;OAIG;gBACS,MAAM,EAAE,SAAS;IAW7B;;OAEG;IACH,MAAM,CAAC,MAAM,CAAC,GAAG,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,GAAG,YAAY;IAI1E;;OAEG;IACH,OAAO,CACL,KAAK,EAAE,MAAM,EAAE,GAAG,YAAY,EAC9B,IAAI,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,EACjC,MAAM,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,GAClC,eAAe;IAYlB,UAAU,CACR,KAAK,EAAE,YAAY,EACnB,IAAI,EAAE,YAAY,EAAE,EACpB,MAAM,EAAE,YAAY,EAAE,GACrB,YAAY;CAGhB;AAID;;GAEG;AACH,wBAAgB,qBAAqB,CACnC,MAAM,EAAE,MAAM,EAAE,GAAG,YAAY,EAC/B,SAAS,GAAE,MAAY,GACtB,MAAM,EAAE,CAIV;AAED;;GAEG;AACH,wBAAgB,gBAAgB,CAC9B,CAAC,EAAE,MAAM,EAAE,GAAG,YAAY,EAC1B,CAAC,EAAE,MAAM,EAAE,GAAG,YAAY,EAC1B,SAAS,GAAE,MAAY,GACtB,MAAM,CAGR;AAED;;GAEG;AACH,wBAAgB,cAAc,CAC5B,CAAC,EAAE,MAAM,EAAE,GAAG,YAAY,EAC1B,CAAC,EAAE,MAAM,EAAE,GAAG,YAAY,EAC1B,SAAS,GAAE,MAAY,GACtB,MAAM,EAAE,CAIV;AAED;;GAEG;AACH,wBAAgB,MAAM,CACpB,IAAI,EAAE,MAAM,EAAE,GAAG,YAAY,EAC7B,OAAO,EAAE,MAAM,EAAE,GAAG,YAAY,EAChC,SAAS,GAAE,MAAY,GACtB,MAAM,EAAE,CAIV;AAED;;GAEG;AACH,wBAAgB,MAAM,CACpB,IAAI,EAAE,MAAM,EAAE,GAAG,YAAY,EAC7B,KAAK,EAAE,MAAM,EAAE,GAAG,YAAY,EAC9B,SAAS,GAAE,MAAY,GACtB,MAAM,EAAE,CAIV;AAED;;GAEG;AACH,wBAAgB,oBAAoB,IAAI,OAAO,CAO9C;AAED;;GAEG;AACH,wBAAgB,mBAAmB,IAAI,MAAM,GAAG,IAAI,CAOnD;;;;;;;;;;;;;;;;AAED,wBAcE"}
1
+ {"version":3,"file":"attention-fallbacks.d.ts","sourceRoot":"","sources":["../../src/core/attention-fallbacks.ts"],"names":[],"mappings":"AAAA;;;;;;;;GAQG;AA8CH;;GAEG;AACH,MAAM,WAAW,eAAe;IAC9B,qCAAqC;IACrC,MAAM,EAAE,MAAM,EAAE,CAAC;IACjB,2DAA2D;IAC3D,GAAG,EAAE,YAAY,CAAC;CACnB;AAED;;;;GAIG;AACH,qBAAa,kBAAkB;IAC7B,OAAO,CAAC,KAAK,CAAM;IACnB,SAAgB,GAAG,EAAE,MAAM,CAAC;IAC5B,SAAgB,QAAQ,EAAE,MAAM,CAAC;IAEjC;;;;;OAKG;gBACS,GAAG,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM;IAOzC;;;;;;;;;;;;;;;;;;;;;OAqBG;IACH,OAAO,CACL,KAAK,EAAE,MAAM,EAAE,GAAG,YAAY,EAC9B,IAAI,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,EACjC,MAAM,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,GAClC,eAAe;IAYlB;;OAEG;IACH,UAAU,CACR,KAAK,EAAE,YAAY,EACnB,IAAI,EAAE,YAAY,EAAE,EACpB,MAAM,EAAE,YAAY,EAAE,GACrB,YAAY;IAIf,IAAI,OAAO,IAAI,MAAM,CAEpB;CACF;AAED;;GAEG;AACH,qBAAa,cAAc;IACzB,OAAO,CAAC,KAAK,CAAM;IACnB,SAAgB,GAAG,EAAE,MAAM,CAAC;IAC5B,SAAgB,SAAS,EAAE,MAAM,CAAC;IAElC;;;;;OAKG;gBACS,GAAG,EAAE,MAAM,EAAE,SAAS,GAAE,MAAY;IAOhD;;OAEG;IACH,OAAO,CACL,KAAK,EAAE,MAAM,EAAE,GAAG,YAAY,EAC9B,IAAI,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,EACjC,MAAM,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,GAClC,eAAe;IAYlB,UAAU,CACR,KAAK,EAAE,YAAY,EACnB,IAAI,EAAE,YAAY,EAAE,EACpB,MAAM,EAAE,YAAY,EAAE,GACrB,YAAY;CAGhB;AAED;;GAEG;AACH,qBAAa,mBAAmB;IAC9B,OAAO,CAAC,KAAK,CAAM;IACnB,SAAgB,GAAG,EAAE,MAAM,CAAC;IAC5B,SAAgB,SAAS,EAAE,MAAM,CAAC;IAElC;;;;;OAKG;gBACS,GAAG,EAAE,MAAM,EAAE,SAAS,GAAE,MAAY;IAOhD;;OAEG;IACH,OAAO,CACL,KAAK,EAAE,MAAM,EAAE,GAAG,YAAY,EAC9B,IAAI,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,EACjC,MAAM,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,GAClC,eAAe;IAYlB,UAAU,CACR,KAAK,EAAE,YAAY,EACnB,IAAI,EAAE,YAAY,EAAE,EACpB,MAAM,EAAE,YAAY,EAAE,GACrB,YAAY;CAGhB;AAED;;GAEG;AACH,qBAAa,eAAe;IAC1B,OAAO,CAAC,KAAK,CAAM;IACnB,SAAgB,GAAG,EAAE,MAAM,CAAC;IAC5B,SAAgB,WAAW,EAAE,MAAM,CAAC;IAEpC;;;;;OAKG;gBACS,GAAG,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM;IAO5C;;OAEG;IACH,OAAO,CACL,KAAK,EAAE,MAAM,EAAE,GAAG,YAAY,EAC9B,IAAI,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,EACjC,MAAM,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,GAClC,eAAe;IAYlB,UAAU,CACR,KAAK,EAAE,YAAY,EACnB,IAAI,EAAE,YAAY,EAAE,EACpB,MAAM,EAAE,YAAY,EAAE,GACrB,YAAY;CAGhB;AAED;;GAEG;AACH,qBAAa,oBAAoB;IAC/B,OAAO,CAAC,KAAK,CAAM;IACnB,SAAgB,GAAG,EAAE,MAAM,CAAC;IAC5B,SAAgB,WAAW,EAAE,MAAM,CAAC;IACpC,SAAgB,YAAY,EAAE,MAAM,CAAC;IAErC;;;;;;OAMG;gBACS,GAAG,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,EAAE,YAAY,EAAE,MAAM;IAQlE;;OAEG;IACH,OAAO,CACL,KAAK,EAAE,MAAM,EAAE,GAAG,YAAY,EAC9B,IAAI,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,EACjC,MAAM,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,GAClC,eAAe;IAYlB,UAAU,CACR,KAAK,EAAE,YAAY,EACnB,IAAI,EAAE,YAAY,EAAE,EACpB,MAAM,EAAE,YAAY,EAAE,GACrB,YAAY;CAGhB;AAED;;GAEG;AACH,MAAM,WAAW,SAAS;IACxB,GAAG,EAAE,MAAM,CAAC;IACZ,UAAU,EAAE,MAAM,CAAC;IACnB,IAAI,EAAE,MAAM,CAAC;IACb,cAAc,CAAC,EAAE,MAAM,CAAC;CACzB;AAED;;GAEG;AACH,qBAAa,YAAY;IACvB,OAAO,CAAC,KAAK,CAAM;IACnB,SAAgB,MAAM,EAAE,SAAS,CAAC;IAElC;;;;OAIG;gBACS,MAAM,EAAE,SAAS;IAW7B;;OAEG;IACH,MAAM,CAAC,MAAM,CAAC,GAAG,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,GAAG,YAAY;IAI1E;;OAEG;IACH,OAAO,CACL,KAAK,EAAE,MAAM,EAAE,GAAG,YAAY,EAC9B,IAAI,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,EACjC,MAAM,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,GAClC,eAAe;IAYlB,UAAU,CACR,KAAK,EAAE,YAAY,EACnB,IAAI,EAAE,YAAY,EAAE,EACpB,MAAM,EAAE,YAAY,EAAE,GACrB,YAAY;CAGhB;AAID;;GAEG;AACH,wBAAgB,qBAAqB,CACnC,MAAM,EAAE,MAAM,EAAE,GAAG,YAAY,EAC/B,SAAS,GAAE,MAAY,GACtB,MAAM,EAAE,CAIV;AAED;;GAEG;AACH,wBAAgB,gBAAgB,CAC9B,CAAC,EAAE,MAAM,EAAE,GAAG,YAAY,EAC1B,CAAC,EAAE,MAAM,EAAE,GAAG,YAAY,EAC1B,SAAS,GAAE,MAAY,GACtB,MAAM,CAGR;AAED;;GAEG;AACH,wBAAgB,cAAc,CAC5B,CAAC,EAAE,MAAM,EAAE,GAAG,YAAY,EAC1B,CAAC,EAAE,MAAM,EAAE,GAAG,YAAY,EAC1B,SAAS,GAAE,MAAY,GACtB,MAAM,EAAE,CAIV;AAED;;GAEG;AACH,wBAAgB,MAAM,CACpB,IAAI,EAAE,MAAM,EAAE,GAAG,YAAY,EAC7B,OAAO,EAAE,MAAM,EAAE,GAAG,YAAY,EAChC,SAAS,GAAE,MAAY,GACtB,MAAM,EAAE,CAIV;AAED;;GAEG;AACH,wBAAgB,MAAM,CACpB,IAAI,EAAE,MAAM,EAAE,GAAG,YAAY,EAC7B,KAAK,EAAE,MAAM,EAAE,GAAG,YAAY,EAC9B,SAAS,GAAE,MAAY,GACtB,MAAM,EAAE,CAIV;AAED;;GAEG;AACH,wBAAgB,oBAAoB,IAAI,OAAO,CAO9C;AAED;;GAEG;AACH,wBAAgB,mBAAmB,IAAI,MAAM,GAAG,IAAI,CAOnD;AAMD;;;GAGG;AACH,qBAAa,kBAAkB;IAC7B,OAAO,CAAC,KAAK,CAAM;IACnB,SAAgB,GAAG,EAAE,MAAM,CAAC;IAC5B,SAAgB,QAAQ,EAAE,MAAM,CAAC;IACjC,SAAgB,SAAS,EAAE,MAAM,CAAC;gBAEtB,GAAG,EAAE,MAAM,EAAE,QAAQ,GAAE,MAAU,EAAE,SAAS,GAAE,MAAa;IAQvE,OAAO,CACL,KAAK,EAAE,MAAM,EAAE,GAAG,YAAY,EAC9B,IAAI,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,EACjC,MAAM,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,EACnC,SAAS,CAAC,EAAE,MAAM,EAAE,GACnB,eAAe;CASnB;AAED;;;GAGG;AACH,qBAAa,qBAAqB;IAChC,OAAO,CAAC,KAAK,CAAM;IACnB,SAAgB,GAAG,EAAE,MAAM,CAAC;IAC5B,SAAgB,OAAO,EAAE,MAAM,CAAC;gBAEpB,GAAG,EAAE,MAAM,EAAE,OAAO,GAAE,MAAW;IAO7C,OAAO,CACL,KAAK,EAAE,MAAM,EAAE,GAAG,YAAY,EAC9B,IAAI,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,EACjC,MAAM,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,EACnC,YAAY,CAAC,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,GACzC,eAAe;CASnB;AAED;;;GAGG;AACH,qBAAa,kBAAkB;IAC7B,OAAO,CAAC,KAAK,CAAM;IACnB,SAAgB,GAAG,EAAE,MAAM,CAAC;IAC5B,SAAgB,SAAS,EAAE,MAAM,CAAC;IAClC,SAAgB,KAAK,EAAE,MAAM,CAAC;gBAElB,GAAG,EAAE,MAAM,EAAE,SAAS,GAAE,MAAY,EAAE,KAAK,GAAE,MAAY;IAQrE,OAAO,CACL,KAAK,EAAE,MAAM,EAAE,GAAG,YAAY,EAC9B,IAAI,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,EACjC,MAAM,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,GAClC,eAAe;CAQnB;AAED;;GAEG;AACH,qBAAa,mBAAmB;IAC9B,OAAO,CAAC,KAAK,CAAM;IACnB,SAAgB,GAAG,EAAE,MAAM,CAAC;gBAEhB,GAAG,EAAE,MAAM;IAMvB,OAAO,CACL,KAAK,EAAE,MAAM,EAAE,GAAG,YAAY,EAC9B,IAAI,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,EACjC,MAAM,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,GAClC,eAAe;CAQnB;AAMD;;GAEG;AACH,wBAAsB,wBAAwB,CAC5C,OAAO,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,EACpC,IAAI,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,EACjC,MAAM,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,EACnC,aAAa,GAAE,KAAK,GAAG,YAAY,GAAG,OAAO,GAAG,YAAY,GAAG,QAAuB,GACrF,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC,CASrB;AAED;;GAEG;AACH,wBAAsB,qBAAqB,CACzC,OAAO,EAAE,KAAK,CAAC;IACb,KAAK,EAAE,MAAM,EAAE,GAAG,YAAY,CAAC;IAC/B,IAAI,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,CAAC;IAClC,MAAM,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,CAAC;CACrC,CAAC,EACF,aAAa,GAAE,KAAK,GAAG,YAAY,GAAG,OAAO,GAAG,YAAY,GAAG,QAAuB,GACrF,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC,CASrB;AAED;;GAEG;AACH,wBAAgB,0BAA0B,CACxC,KAAK,EAAE,MAAM,EAAE,GAAG,YAAY,EAC9B,IAAI,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,EACjC,MAAM,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,GAClC,OAAO,CAAC,MAAM,EAAE,CAAC,CAanB;AAED;;GAEG;AACH,wBAAgB,+BAA+B,CAC7C,KAAK,EAAE,MAAM,EAAE,GAAG,YAAY,EAC9B,IAAI,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,EACjC,MAAM,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,EACnC,SAAS,GAAE,MAAY,GACtB,OAAO,CAAC,MAAM,EAAE,CAAC,CAcnB;AAMD;;GAEG;AACH,qBAAa,aAAa;IACxB,OAAO,CAAC,KAAK,CAAM;gBAEP,YAAY,GAAE,MAAc,EAAE,KAAK,GAAE,MAAY,EAAE,KAAK,GAAE,MAAc;IAKpF,IAAI,CAAC,SAAS,EAAE,MAAM,EAAE,GAAG,YAAY,EAAE,MAAM,EAAE,MAAM,EAAE,GAAG,YAAY,GAAG,MAAM,EAAE;CAIpF;AAED;;GAEG;AACH,wBAAgB,WAAW,CACzB,MAAM,EAAE,MAAM,EAAE,GAAG,YAAY,EAC/B,QAAQ,EAAE,MAAM,EAAE,GAAG,YAAY,EACjC,SAAS,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,EACtC,WAAW,GAAE,MAAa,GACzB,MAAM,CAQR;AAED;;GAEG;AACH,wBAAgB,iBAAiB,CAC/B,MAAM,EAAE,MAAM,EAAE,GAAG,YAAY,EAC/B,UAAU,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,EACvC,IAAI,GAAE,MAAU,GACf,MAAM,EAAE,EAAE,CAKZ;AAMD;;GAEG;AACH,wBAAsB,kBAAkB,CACtC,GAAG,EAAE,MAAM,EACX,MAAM,EAAE,MAAM,EACd,UAAU,GAAE,MAAY,GACvB,OAAO,CAAC,MAAM,CAAC,MAAM,EAAE;IAAE,KAAK,EAAE,MAAM,CAAC;IAAC,KAAK,EAAE,MAAM,CAAC;IAAC,KAAK,EAAE,MAAM,CAAA;CAAE,CAAC,CAAC,CAG1E;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAED,wBAqCE"}
@@ -9,7 +9,7 @@
9
9
  * This wrapper handles the conversion automatically.
10
10
  */
11
11
  Object.defineProperty(exports, "__esModule", { value: true });
12
- exports.MoEAttention = exports.LocalGlobalAttention = exports.LinearAttention = exports.HyperbolicAttention = exports.FlashAttention = exports.MultiHeadAttention = void 0;
12
+ exports.AdamOptimizer = exports.DotProductAttention = exports.DualSpaceAttention = exports.EdgeFeaturedAttention = exports.GraphRoPeAttention = exports.MoEAttention = exports.LocalGlobalAttention = exports.LinearAttention = exports.HyperbolicAttention = exports.FlashAttention = exports.MultiHeadAttention = void 0;
13
13
  exports.projectToPoincareBall = projectToPoincareBall;
14
14
  exports.poincareDistance = poincareDistance;
15
15
  exports.mobiusAddition = mobiusAddition;
@@ -17,6 +17,13 @@ exports.expMap = expMap;
17
17
  exports.logMap = logMap;
18
18
  exports.isAttentionAvailable = isAttentionAvailable;
19
19
  exports.getAttentionVersion = getAttentionVersion;
20
+ exports.parallelAttentionCompute = parallelAttentionCompute;
21
+ exports.batchAttentionCompute = batchAttentionCompute;
22
+ exports.computeFlashAttentionAsync = computeFlashAttentionAsync;
23
+ exports.computeHyperbolicAttentionAsync = computeHyperbolicAttentionAsync;
24
+ exports.infoNceLoss = infoNceLoss;
25
+ exports.mineHardNegatives = mineHardNegatives;
26
+ exports.benchmarkAttention = benchmarkAttention;
20
27
  // Lazy load to avoid import errors if not installed
21
28
  let attentionModule = null;
22
29
  let loadError = null;
@@ -344,18 +351,202 @@ function getAttentionVersion() {
344
351
  return null;
345
352
  }
346
353
  }
354
+ // ============================================================================
355
+ // Graph-based Attention (for code structure)
356
+ // ============================================================================
357
+ /**
358
+ * Graph attention with Rotary Position Embeddings
359
+ * Excellent for code AST and dependency graphs
360
+ */
361
+ class GraphRoPeAttention {
362
+ constructor(dim, numHeads = 4, maxSeqLen = 4096) {
363
+ const attention = getAttentionModule();
364
+ this.inner = new attention.GraphRoPeAttention(dim, numHeads, maxSeqLen);
365
+ this.dim = dim;
366
+ this.numHeads = numHeads;
367
+ this.maxSeqLen = maxSeqLen;
368
+ }
369
+ compute(query, keys, values, positions) {
370
+ const raw = this.inner.compute(toFloat32Array(query), toFloat32Arrays(keys), toFloat32Arrays(values), positions ? new Int32Array(positions) : undefined);
371
+ return { values: fromFloat32Array(raw), raw };
372
+ }
373
+ }
374
+ exports.GraphRoPeAttention = GraphRoPeAttention;
375
+ /**
376
+ * Edge-featured attention for graphs with edge attributes
377
+ * Useful for weighted dependency graphs
378
+ */
379
+ class EdgeFeaturedAttention {
380
+ constructor(dim, edgeDim = 16) {
381
+ const attention = getAttentionModule();
382
+ this.inner = new attention.EdgeFeaturedAttention(dim, edgeDim);
383
+ this.dim = dim;
384
+ this.edgeDim = edgeDim;
385
+ }
386
+ compute(query, keys, values, edgeFeatures) {
387
+ const raw = this.inner.compute(toFloat32Array(query), toFloat32Arrays(keys), toFloat32Arrays(values), edgeFeatures ? toFloat32Arrays(edgeFeatures) : undefined);
388
+ return { values: fromFloat32Array(raw), raw };
389
+ }
390
+ }
391
+ exports.EdgeFeaturedAttention = EdgeFeaturedAttention;
392
+ /**
393
+ * Dual-space attention (Euclidean + Hyperbolic)
394
+ * Best of both worlds for hierarchical + semantic similarity
395
+ */
396
+ class DualSpaceAttention {
397
+ constructor(dim, curvature = 1.0, alpha = 0.5) {
398
+ const attention = getAttentionModule();
399
+ this.inner = new attention.DualSpaceAttention(dim, curvature, alpha);
400
+ this.dim = dim;
401
+ this.curvature = curvature;
402
+ this.alpha = alpha;
403
+ }
404
+ compute(query, keys, values) {
405
+ const raw = this.inner.compute(toFloat32Array(query), toFloat32Arrays(keys), toFloat32Arrays(values));
406
+ return { values: fromFloat32Array(raw), raw };
407
+ }
408
+ }
409
+ exports.DualSpaceAttention = DualSpaceAttention;
410
+ /**
411
+ * Basic dot-product attention
412
+ */
413
+ class DotProductAttention {
414
+ constructor(dim) {
415
+ const attention = getAttentionModule();
416
+ this.inner = new attention.DotProductAttention(dim);
417
+ this.dim = dim;
418
+ }
419
+ compute(query, keys, values) {
420
+ const raw = this.inner.compute(toFloat32Array(query), toFloat32Arrays(keys), toFloat32Arrays(values));
421
+ return { values: fromFloat32Array(raw), raw };
422
+ }
423
+ }
424
+ exports.DotProductAttention = DotProductAttention;
425
+ // ============================================================================
426
+ // Parallel/Batch Attention Compute
427
+ // ============================================================================
428
+ /**
429
+ * Compute attention in parallel across multiple queries
430
+ */
431
+ async function parallelAttentionCompute(queries, keys, values, attentionType = 'multi-head') {
432
+ const attention = getAttentionModule();
433
+ const results = await attention.parallelAttentionCompute(toFloat32Arrays(queries), toFloat32Arrays(keys), toFloat32Arrays(values), attentionType);
434
+ return results.map((r) => fromFloat32Array(r));
435
+ }
436
+ /**
437
+ * Batch attention compute for multiple query-key-value sets
438
+ */
439
+ async function batchAttentionCompute(batches, attentionType = 'multi-head') {
440
+ const attention = getAttentionModule();
441
+ const nativeBatches = batches.map(b => ({
442
+ query: toFloat32Array(b.query),
443
+ keys: toFloat32Arrays(b.keys),
444
+ values: toFloat32Arrays(b.values),
445
+ }));
446
+ const results = await attention.batchAttentionCompute(nativeBatches, attentionType);
447
+ return results.map((r) => fromFloat32Array(r));
448
+ }
449
+ /**
450
+ * Async flash attention with callback
451
+ */
452
+ function computeFlashAttentionAsync(query, keys, values) {
453
+ const attention = getAttentionModule();
454
+ return new Promise((resolve, reject) => {
455
+ attention.computeFlashAttentionAsync(toFloat32Array(query), toFloat32Arrays(keys), toFloat32Arrays(values), (err, result) => {
456
+ if (err)
457
+ reject(err);
458
+ else
459
+ resolve(fromFloat32Array(result));
460
+ });
461
+ });
462
+ }
463
+ /**
464
+ * Async hyperbolic attention
465
+ */
466
+ function computeHyperbolicAttentionAsync(query, keys, values, curvature = 1.0) {
467
+ const attention = getAttentionModule();
468
+ return new Promise((resolve, reject) => {
469
+ attention.computeHyperbolicAttentionAsync(toFloat32Array(query), toFloat32Arrays(keys), toFloat32Arrays(values), curvature, (err, result) => {
470
+ if (err)
471
+ reject(err);
472
+ else
473
+ resolve(fromFloat32Array(result));
474
+ });
475
+ });
476
+ }
477
+ // ============================================================================
478
+ // Training Utilities (for SONA integration)
479
+ // ============================================================================
480
+ /**
481
+ * Adam optimizer for attention training
482
+ */
483
+ class AdamOptimizer {
484
+ constructor(learningRate = 0.001, beta1 = 0.9, beta2 = 0.999) {
485
+ const attention = getAttentionModule();
486
+ this.inner = new attention.AdamOptimizer(learningRate, beta1, beta2);
487
+ }
488
+ step(gradients, params) {
489
+ const result = this.inner.step(toFloat32Array(gradients), toFloat32Array(params));
490
+ return fromFloat32Array(result);
491
+ }
492
+ }
493
+ exports.AdamOptimizer = AdamOptimizer;
494
+ /**
495
+ * InfoNCE contrastive loss
496
+ */
497
+ function infoNceLoss(anchor, positive, negatives, temperature = 0.07) {
498
+ const attention = getAttentionModule();
499
+ return attention.InfoNceLoss.compute(toFloat32Array(anchor), toFloat32Array(positive), toFloat32Arrays(negatives), temperature);
500
+ }
501
+ /**
502
+ * Hard negative mining for contrastive learning
503
+ */
504
+ function mineHardNegatives(anchor, candidates, topK = 5) {
505
+ const attention = getAttentionModule();
506
+ const miner = new attention.HardNegativeMiner(topK);
507
+ const results = miner.mine(toFloat32Array(anchor), toFloat32Arrays(candidates));
508
+ return results.map((r) => fromFloat32Array(r));
509
+ }
510
+ // ============================================================================
511
+ // Benchmarking
512
+ // ============================================================================
513
+ /**
514
+ * Benchmark attention implementations
515
+ */
516
+ async function benchmarkAttention(dim, seqLen, iterations = 100) {
517
+ const attention = getAttentionModule();
518
+ return attention.benchmarkAttention(dim, seqLen, iterations);
519
+ }
347
520
  exports.default = {
521
+ // Core attention types
522
+ DotProductAttention,
348
523
  MultiHeadAttention,
349
524
  FlashAttention,
350
525
  HyperbolicAttention,
351
526
  LinearAttention,
352
527
  LocalGlobalAttention,
353
528
  MoEAttention,
529
+ // Graph attention types
530
+ GraphRoPeAttention,
531
+ EdgeFeaturedAttention,
532
+ DualSpaceAttention,
533
+ // Parallel/batch compute
534
+ parallelAttentionCompute,
535
+ batchAttentionCompute,
536
+ computeFlashAttentionAsync,
537
+ computeHyperbolicAttentionAsync,
538
+ // Training utilities
539
+ AdamOptimizer,
540
+ infoNceLoss,
541
+ mineHardNegatives,
542
+ // Hyperbolic math
354
543
  projectToPoincareBall,
355
544
  poincareDistance,
356
545
  mobiusAddition,
357
546
  expMap,
358
547
  logMap,
548
+ // Utilities
359
549
  isAttentionAvailable,
360
550
  getAttentionVersion,
551
+ benchmarkAttention,
361
552
  };
@@ -0,0 +1,148 @@
1
+ /**
2
+ * Cluster Wrapper - Distributed coordination for multi-agent systems
3
+ *
4
+ * Wraps @ruvector/cluster for Raft consensus, auto-sharding,
5
+ * and distributed memory across agents.
6
+ */
7
+ export declare function isClusterAvailable(): boolean;
8
+ export interface ClusterNode {
9
+ id: string;
10
+ address: string;
11
+ role: 'leader' | 'follower' | 'candidate';
12
+ status: 'healthy' | 'unhealthy' | 'unknown';
13
+ lastHeartbeat: number;
14
+ }
15
+ export interface ShardInfo {
16
+ id: number;
17
+ range: [number, number];
18
+ node: string;
19
+ size: number;
20
+ status: 'active' | 'migrating' | 'offline';
21
+ }
22
+ export interface ClusterConfig {
23
+ nodeId: string;
24
+ address: string;
25
+ peers?: string[];
26
+ shards?: number;
27
+ replicationFactor?: number;
28
+ }
29
+ /**
30
+ * Distributed cluster for multi-agent coordination
31
+ */
32
+ export declare class RuvectorCluster {
33
+ private inner;
34
+ private nodeId;
35
+ private isLeader;
36
+ constructor(config: ClusterConfig);
37
+ /**
38
+ * Start the cluster node
39
+ */
40
+ start(): Promise<void>;
41
+ /**
42
+ * Stop the cluster node gracefully
43
+ */
44
+ stop(): Promise<void>;
45
+ /**
46
+ * Join an existing cluster
47
+ */
48
+ join(peerAddress: string): Promise<boolean>;
49
+ /**
50
+ * Leave the cluster
51
+ */
52
+ leave(): Promise<void>;
53
+ /**
54
+ * Get current node info
55
+ */
56
+ getNodeInfo(): ClusterNode;
57
+ /**
58
+ * Get all cluster nodes
59
+ */
60
+ getNodes(): ClusterNode[];
61
+ /**
62
+ * Check if this node is the leader
63
+ */
64
+ isClusterLeader(): boolean;
65
+ /**
66
+ * Get the current leader
67
+ */
68
+ getLeader(): ClusterNode | null;
69
+ /**
70
+ * Put a value in distributed storage
71
+ */
72
+ put(key: string, value: any): Promise<boolean>;
73
+ /**
74
+ * Get a value from distributed storage
75
+ */
76
+ get(key: string): Promise<any | null>;
77
+ /**
78
+ * Delete a value from distributed storage
79
+ */
80
+ delete(key: string): Promise<boolean>;
81
+ /**
82
+ * Atomic compare-and-swap
83
+ */
84
+ compareAndSwap(key: string, expected: any, newValue: any): Promise<boolean>;
85
+ /**
86
+ * Get shard information
87
+ */
88
+ getShards(): ShardInfo[];
89
+ /**
90
+ * Get the shard for a key
91
+ */
92
+ getShardForKey(key: string): ShardInfo;
93
+ /**
94
+ * Trigger shard rebalancing
95
+ */
96
+ rebalance(): Promise<void>;
97
+ /**
98
+ * Acquire a distributed lock
99
+ */
100
+ lock(name: string, timeout?: number): Promise<string | null>;
101
+ /**
102
+ * Release a distributed lock
103
+ */
104
+ unlock(name: string, token: string): Promise<boolean>;
105
+ /**
106
+ * Extend a lock's TTL
107
+ */
108
+ extendLock(name: string, token: string, extension?: number): Promise<boolean>;
109
+ /**
110
+ * Subscribe to a channel
111
+ */
112
+ subscribe(channel: string, callback: (message: any) => void): () => void;
113
+ /**
114
+ * Publish to a channel
115
+ */
116
+ publish(channel: string, message: any): Promise<number>;
117
+ /**
118
+ * Register an agent with the cluster
119
+ */
120
+ registerAgent(agentId: string, capabilities: string[]): Promise<boolean>;
121
+ /**
122
+ * Find agents with a capability
123
+ */
124
+ findAgents(capability: string): Promise<string[]>;
125
+ /**
126
+ * Assign a task to an agent
127
+ */
128
+ assignTask(taskId: string, agentId: string, task: any): Promise<boolean>;
129
+ /**
130
+ * Complete a task
131
+ */
132
+ completeTask(taskId: string, result: any): Promise<boolean>;
133
+ /**
134
+ * Get cluster statistics
135
+ */
136
+ stats(): {
137
+ nodes: number;
138
+ shards: number;
139
+ leader: string | null;
140
+ healthy: boolean;
141
+ };
142
+ }
143
+ /**
144
+ * Create a cluster node for agent coordination
145
+ */
146
+ export declare function createCluster(config: ClusterConfig): RuvectorCluster;
147
+ export default RuvectorCluster;
148
+ //# sourceMappingURL=cluster-wrapper.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"cluster-wrapper.d.ts","sourceRoot":"","sources":["../../src/core/cluster-wrapper.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAqBH,wBAAgB,kBAAkB,IAAI,OAAO,CAO5C;AAED,MAAM,WAAW,WAAW;IAC1B,EAAE,EAAE,MAAM,CAAC;IACX,OAAO,EAAE,MAAM,CAAC;IAChB,IAAI,EAAE,QAAQ,GAAG,UAAU,GAAG,WAAW,CAAC;IAC1C,MAAM,EAAE,SAAS,GAAG,WAAW,GAAG,SAAS,CAAC;IAC5C,aAAa,EAAE,MAAM,CAAC;CACvB;AAED,MAAM,WAAW,SAAS;IACxB,EAAE,EAAE,MAAM,CAAC;IACX,KAAK,EAAE,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACxB,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC;IACb,MAAM,EAAE,QAAQ,GAAG,WAAW,GAAG,SAAS,CAAC;CAC5C;AAED,MAAM,WAAW,aAAa;IAC5B,MAAM,EAAE,MAAM,CAAC;IACf,OAAO,EAAE,MAAM,CAAC;IAChB,KAAK,CAAC,EAAE,MAAM,EAAE,CAAC;IACjB,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,iBAAiB,CAAC,EAAE,MAAM,CAAC;CAC5B;AAED;;GAEG;AACH,qBAAa,eAAe;IAC1B,OAAO,CAAC,KAAK,CAAM;IACnB,OAAO,CAAC,MAAM,CAAS;IACvB,OAAO,CAAC,QAAQ,CAAkB;gBAEtB,MAAM,EAAE,aAAa;IAgBjC;;OAEG;IACG,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAI5B;;OAEG;IACG,IAAI,IAAI,OAAO,CAAC,IAAI,CAAC;IAI3B;;OAEG;IACG,IAAI,CAAC,WAAW,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;IAIjD;;OAEG;IACG,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAQ5B;;OAEG;IACH,WAAW,IAAI,WAAW;IAI1B;;OAEG;IACH,QAAQ,IAAI,WAAW,EAAE;IAIzB;;OAEG;IACH,eAAe,IAAI,OAAO;IAK1B;;OAEG;IACH,SAAS,IAAI,WAAW,GAAG,IAAI;IAQ/B;;OAEG;IACG,GAAG,CAAC,GAAG,EAAE,MAAM,EAAE,KAAK,EAAE,GAAG,GAAG,OAAO,CAAC,OAAO,CAAC;IAIpD;;OAEG;IACG,GAAG,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,GAAG,GAAG,IAAI,CAAC;IAK3C;;OAEG;IACG,MAAM,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;IAI3C;;OAEG;IACG,cAAc,CAAC,GAAG,EAAE,MAAM,EAAE,QAAQ,EAAE,GAAG,EAAE,QAAQ,EAAE,GAAG,GAAG,OAAO,CAAC,OAAO,CAAC;IAYjF;;OAEG;IACH,SAAS,IAAI,SAAS,EAAE;IAIxB;;OAEG;IACH,cAAc,CAAC,GAAG,EAAE,MAAM,GAAG,SAAS;IAItC;;OAEG;IACG,SAAS,IAAI,OAAO,CAAC,IAAI,CAAC;IAQhC;;OAEG;IACG,IAAI,CAAC,IAAI,EAAE,MAAM,EAAE,OAAO,GAAE,MAAc,GAAG,OAAO,CAAC,MAAM,GAAG,IAAI,CAAC;IAIzE;;OAEG;IACG,MAAM,CAAC,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;IAI3D;;OAEG;IACG,UAAU,CAAC,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,SAAS,GAAE,MAAc,GAAG,OAAO,CAAC,OAAO,CAAC;IAQ1F;;OAEG;IACH,SAAS,CAAC,OAAO,EAAE,MAAM,EAAE,QAAQ,EAAE,CAAC,OAAO,EAAE,GAAG,KAAK,IAAI,GAAG,MAAM,IAAI;IAMxE;;OAEG;IACG,OAAO,CAAC,OAAO,EAAE,MAAM,EAAE,OAAO,EAAE,GAAG,GAAG,OAAO,CAAC,MAAM,CAAC;IAQ7D;;OAEG;IACG,aAAa,CAAC,OAAO,EAAE,MAAM,EAAE,YAAY,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,OAAO,CAAC;IAS9E;;OAEG;IACG,UAAU,CAAC,UAAU,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC;IAcvD;;OAEG;IACG,UAAU,CAAC,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,IAAI,EAAE,GAAG,GAAG,OAAO,CAAC,OAAO,CAAC;IAgB9E;;OAEG;IACG,YAAY,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,GAAG,GAAG,OAAO,CAAC,OAAO,CAAC;IAgBjE;;OAEG;IACH,KAAK,IAAI;QACP,KAAK,EAAE,MAAM,CAAC;QACd,MAAM,EAAE,MAAM,CAAC;QACf,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC;QACtB,OAAO,EAAE,OAAO,CAAC;KAClB;CAGF;AAED;;GAEG;AACH,wBAAgB,aAAa,CAAC,MAAM,EAAE,aAAa,GAAG,eAAe,CAEpE;AAED,eAAe,eAAe,CAAC"}