ruvector 0.2.22 → 0.2.23

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (108) hide show
  1. package/README.md +2 -2
  2. package/package.json +9 -5
  3. package/dist/analysis/complexity.d.ts +0 -52
  4. package/dist/analysis/complexity.d.ts.map +0 -1
  5. package/dist/analysis/complexity.js +0 -146
  6. package/dist/analysis/index.d.ts +0 -15
  7. package/dist/analysis/index.d.ts.map +0 -1
  8. package/dist/analysis/index.js +0 -38
  9. package/dist/analysis/patterns.d.ts +0 -71
  10. package/dist/analysis/patterns.d.ts.map +0 -1
  11. package/dist/analysis/patterns.js +0 -243
  12. package/dist/analysis/security.d.ts +0 -51
  13. package/dist/analysis/security.d.ts.map +0 -1
  14. package/dist/analysis/security.js +0 -139
  15. package/dist/core/adaptive-embedder.d.ts +0 -156
  16. package/dist/core/adaptive-embedder.d.ts.map +0 -1
  17. package/dist/core/adaptive-embedder.js +0 -838
  18. package/dist/core/agentdb-fast.d.ts +0 -149
  19. package/dist/core/agentdb-fast.d.ts.map +0 -1
  20. package/dist/core/agentdb-fast.js +0 -301
  21. package/dist/core/ast-parser.d.ts +0 -108
  22. package/dist/core/ast-parser.d.ts.map +0 -1
  23. package/dist/core/ast-parser.js +0 -602
  24. package/dist/core/attention-fallbacks.d.ts +0 -321
  25. package/dist/core/attention-fallbacks.d.ts.map +0 -1
  26. package/dist/core/attention-fallbacks.js +0 -552
  27. package/dist/core/cluster-wrapper.d.ts +0 -148
  28. package/dist/core/cluster-wrapper.d.ts.map +0 -1
  29. package/dist/core/cluster-wrapper.js +0 -271
  30. package/dist/core/coverage-router.d.ts +0 -88
  31. package/dist/core/coverage-router.d.ts.map +0 -1
  32. package/dist/core/coverage-router.js +0 -315
  33. package/dist/core/diff-embeddings.d.ts +0 -93
  34. package/dist/core/diff-embeddings.d.ts.map +0 -1
  35. package/dist/core/diff-embeddings.js +0 -334
  36. package/dist/core/gnn-wrapper.d.ts +0 -143
  37. package/dist/core/gnn-wrapper.d.ts.map +0 -1
  38. package/dist/core/gnn-wrapper.js +0 -213
  39. package/dist/core/graph-algorithms.d.ts +0 -83
  40. package/dist/core/graph-algorithms.d.ts.map +0 -1
  41. package/dist/core/graph-algorithms.js +0 -514
  42. package/dist/core/graph-wrapper.d.ts +0 -147
  43. package/dist/core/graph-wrapper.d.ts.map +0 -1
  44. package/dist/core/graph-wrapper.js +0 -299
  45. package/dist/core/index.d.ts +0 -48
  46. package/dist/core/index.d.ts.map +0 -1
  47. package/dist/core/index.js +0 -89
  48. package/dist/core/intelligence-engine.d.ts +0 -258
  49. package/dist/core/intelligence-engine.d.ts.map +0 -1
  50. package/dist/core/intelligence-engine.js +0 -1030
  51. package/dist/core/learning-engine.d.ts +0 -160
  52. package/dist/core/learning-engine.d.ts.map +0 -1
  53. package/dist/core/learning-engine.js +0 -589
  54. package/dist/core/neural-embeddings.d.ts +0 -393
  55. package/dist/core/neural-embeddings.d.ts.map +0 -1
  56. package/dist/core/neural-embeddings.js +0 -1091
  57. package/dist/core/neural-perf.d.ts +0 -331
  58. package/dist/core/neural-perf.d.ts.map +0 -1
  59. package/dist/core/neural-perf.js +0 -704
  60. package/dist/core/onnx/pkg/package.json +0 -3
  61. package/dist/core/onnx-embedder.d.ts +0 -105
  62. package/dist/core/onnx-embedder.d.ts.map +0 -1
  63. package/dist/core/onnx-embedder.js +0 -410
  64. package/dist/core/onnx-optimized.d.ts +0 -109
  65. package/dist/core/onnx-optimized.d.ts.map +0 -1
  66. package/dist/core/onnx-optimized.js +0 -419
  67. package/dist/core/parallel-intelligence.d.ts +0 -109
  68. package/dist/core/parallel-intelligence.d.ts.map +0 -1
  69. package/dist/core/parallel-intelligence.js +0 -340
  70. package/dist/core/parallel-workers.d.ts +0 -177
  71. package/dist/core/parallel-workers.d.ts.map +0 -1
  72. package/dist/core/parallel-workers.js +0 -671
  73. package/dist/core/router-wrapper.d.ts +0 -75
  74. package/dist/core/router-wrapper.d.ts.map +0 -1
  75. package/dist/core/router-wrapper.js +0 -243
  76. package/dist/core/rvf-wrapper.d.ts +0 -86
  77. package/dist/core/rvf-wrapper.d.ts.map +0 -1
  78. package/dist/core/rvf-wrapper.js +0 -102
  79. package/dist/core/sona-wrapper.d.ts +0 -226
  80. package/dist/core/sona-wrapper.d.ts.map +0 -1
  81. package/dist/core/sona-wrapper.js +0 -282
  82. package/dist/core/tensor-compress.d.ts +0 -134
  83. package/dist/core/tensor-compress.d.ts.map +0 -1
  84. package/dist/core/tensor-compress.js +0 -432
  85. package/dist/index.d.ts +0 -105
  86. package/dist/index.d.ts.map +0 -1
  87. package/dist/index.js +0 -221
  88. package/dist/services/embedding-service.d.ts +0 -136
  89. package/dist/services/embedding-service.d.ts.map +0 -1
  90. package/dist/services/embedding-service.js +0 -294
  91. package/dist/services/index.d.ts +0 -6
  92. package/dist/services/index.d.ts.map +0 -1
  93. package/dist/services/index.js +0 -26
  94. package/dist/types.d.ts +0 -145
  95. package/dist/types.d.ts.map +0 -1
  96. package/dist/types.js +0 -2
  97. package/dist/workers/benchmark.d.ts +0 -44
  98. package/dist/workers/benchmark.d.ts.map +0 -1
  99. package/dist/workers/benchmark.js +0 -230
  100. package/dist/workers/index.d.ts +0 -10
  101. package/dist/workers/index.d.ts.map +0 -1
  102. package/dist/workers/index.js +0 -25
  103. package/dist/workers/native-worker.d.ts +0 -76
  104. package/dist/workers/native-worker.d.ts.map +0 -1
  105. package/dist/workers/native-worker.js +0 -490
  106. package/dist/workers/types.d.ts +0 -69
  107. package/dist/workers/types.d.ts.map +0 -1
  108. package/dist/workers/types.js +0 -7
@@ -1,321 +0,0 @@
1
- /**
2
- * Attention Fallbacks - Safe wrapper around @ruvector/attention with automatic array conversion
3
- *
4
- * This wrapper handles the array type conversion automatically, allowing users
5
- * to pass either regular arrays or Float32Arrays.
6
- *
7
- * @ruvector/attention requires Float32Array inputs.
8
- * This wrapper handles the conversion automatically.
9
- */
10
- /**
11
- * Attention output interface
12
- */
13
- export interface AttentionOutput {
14
- /** Output vector as regular array */
15
- values: number[];
16
- /** Output as Float32Array for performance-critical code */
17
- raw: Float32Array;
18
- }
19
- /**
20
- * Multi-head attention mechanism
21
- *
22
- * This wrapper automatically converts array inputs to Float32Array.
23
- */
24
- export declare class MultiHeadAttention {
25
- private inner;
26
- readonly dim: number;
27
- readonly numHeads: number;
28
- /**
29
- * Create a new multi-head attention instance
30
- *
31
- * @param dim - Embedding dimension (must be divisible by numHeads)
32
- * @param numHeads - Number of attention heads
33
- */
34
- constructor(dim: number, numHeads: number);
35
- /**
36
- * Compute multi-head attention
37
- *
38
- * @param query - Query vector
39
- * @param keys - Array of key vectors
40
- * @param values - Array of value vectors
41
- * @returns Attention output
42
- *
43
- * @example
44
- * ```typescript
45
- * const mha = new MultiHeadAttention(64, 4);
46
- *
47
- * // Works with regular arrays
48
- * const result1 = mha.compute([...64 values], [[...64], [...64]], [[...64], [...64]]);
49
- *
50
- * // Also works with Float32Array
51
- * const q = new Float32Array(64);
52
- * const k = [new Float32Array(64)];
53
- * const v = [new Float32Array(64)];
54
- * const result2 = mha.compute(q, k, v);
55
- * ```
56
- */
57
- compute(query: number[] | Float32Array, keys: (number[] | Float32Array)[], values: (number[] | Float32Array)[]): AttentionOutput;
58
- /**
59
- * Compute and return raw Float32Array (faster, no conversion)
60
- */
61
- computeRaw(query: Float32Array, keys: Float32Array[], values: Float32Array[]): Float32Array;
62
- get headDim(): number;
63
- }
64
- /**
65
- * Flash attention with tiled computation
66
- */
67
- export declare class FlashAttention {
68
- private inner;
69
- readonly dim: number;
70
- readonly blockSize: number;
71
- /**
72
- * Create a new flash attention instance
73
- *
74
- * @param dim - Embedding dimension
75
- * @param blockSize - Block size for tiled computation (default: 512)
76
- */
77
- constructor(dim: number, blockSize?: number);
78
- /**
79
- * Compute flash attention
80
- */
81
- compute(query: number[] | Float32Array, keys: (number[] | Float32Array)[], values: (number[] | Float32Array)[]): AttentionOutput;
82
- computeRaw(query: Float32Array, keys: Float32Array[], values: Float32Array[]): Float32Array;
83
- }
84
- /**
85
- * Hyperbolic attention in Poincare ball model
86
- */
87
- export declare class HyperbolicAttention {
88
- private inner;
89
- readonly dim: number;
90
- readonly curvature: number;
91
- /**
92
- * Create a new hyperbolic attention instance
93
- *
94
- * @param dim - Embedding dimension
95
- * @param curvature - Hyperbolic curvature (typically 1.0)
96
- */
97
- constructor(dim: number, curvature?: number);
98
- /**
99
- * Compute hyperbolic attention
100
- */
101
- compute(query: number[] | Float32Array, keys: (number[] | Float32Array)[], values: (number[] | Float32Array)[]): AttentionOutput;
102
- computeRaw(query: Float32Array, keys: Float32Array[], values: Float32Array[]): Float32Array;
103
- }
104
- /**
105
- * Linear attention (Performer-style) with O(n) complexity
106
- */
107
- export declare class LinearAttention {
108
- private inner;
109
- readonly dim: number;
110
- readonly numFeatures: number;
111
- /**
112
- * Create a new linear attention instance
113
- *
114
- * @param dim - Embedding dimension
115
- * @param numFeatures - Number of random features
116
- */
117
- constructor(dim: number, numFeatures: number);
118
- /**
119
- * Compute linear attention
120
- */
121
- compute(query: number[] | Float32Array, keys: (number[] | Float32Array)[], values: (number[] | Float32Array)[]): AttentionOutput;
122
- computeRaw(query: Float32Array, keys: Float32Array[], values: Float32Array[]): Float32Array;
123
- }
124
- /**
125
- * Local-global attention (Longformer-style)
126
- */
127
- export declare class LocalGlobalAttention {
128
- private inner;
129
- readonly dim: number;
130
- readonly localWindow: number;
131
- readonly globalTokens: number;
132
- /**
133
- * Create a new local-global attention instance
134
- *
135
- * @param dim - Embedding dimension
136
- * @param localWindow - Size of local attention window
137
- * @param globalTokens - Number of global attention tokens
138
- */
139
- constructor(dim: number, localWindow: number, globalTokens: number);
140
- /**
141
- * Compute local-global attention
142
- */
143
- compute(query: number[] | Float32Array, keys: (number[] | Float32Array)[], values: (number[] | Float32Array)[]): AttentionOutput;
144
- computeRaw(query: Float32Array, keys: Float32Array[], values: Float32Array[]): Float32Array;
145
- }
146
- /**
147
- * MoE configuration
148
- */
149
- export interface MoEConfig {
150
- dim: number;
151
- numExperts: number;
152
- topK: number;
153
- expertCapacity?: number;
154
- }
155
- /**
156
- * Mixture of Experts attention
157
- */
158
- export declare class MoEAttention {
159
- private inner;
160
- readonly config: MoEConfig;
161
- /**
162
- * Create a new MoE attention instance
163
- *
164
- * @param config - MoE configuration
165
- */
166
- constructor(config: MoEConfig);
167
- /**
168
- * Create with simple parameters
169
- */
170
- static simple(dim: number, numExperts: number, topK: number): MoEAttention;
171
- /**
172
- * Compute MoE attention
173
- */
174
- compute(query: number[] | Float32Array, keys: (number[] | Float32Array)[], values: (number[] | Float32Array)[]): AttentionOutput;
175
- computeRaw(query: Float32Array, keys: Float32Array[], values: Float32Array[]): Float32Array;
176
- }
177
- /**
178
- * Project a vector into the Poincare ball
179
- */
180
- export declare function projectToPoincareBall(vector: number[] | Float32Array, curvature?: number): number[];
181
- /**
182
- * Compute hyperbolic (Poincare) distance between two points
183
- */
184
- export declare function poincareDistance(a: number[] | Float32Array, b: number[] | Float32Array, curvature?: number): number;
185
- /**
186
- * Mobius addition in hyperbolic space
187
- */
188
- export declare function mobiusAddition(a: number[] | Float32Array, b: number[] | Float32Array, curvature?: number): number[];
189
- /**
190
- * Exponential map from tangent space to hyperbolic space
191
- */
192
- export declare function expMap(base: number[] | Float32Array, tangent: number[] | Float32Array, curvature?: number): number[];
193
- /**
194
- * Logarithmic map from hyperbolic space to tangent space
195
- */
196
- export declare function logMap(base: number[] | Float32Array, point: number[] | Float32Array, curvature?: number): number[];
197
- /**
198
- * Check if attention module is available
199
- */
200
- export declare function isAttentionAvailable(): boolean;
201
- /**
202
- * Get attention module version
203
- */
204
- export declare function getAttentionVersion(): string | null;
205
- /**
206
- * Graph attention with Rotary Position Embeddings
207
- * Excellent for code AST and dependency graphs
208
- */
209
- export declare class GraphRoPeAttention {
210
- private inner;
211
- readonly dim: number;
212
- readonly numHeads: number;
213
- readonly maxSeqLen: number;
214
- constructor(dim: number, numHeads?: number, maxSeqLen?: number);
215
- compute(query: number[] | Float32Array, keys: (number[] | Float32Array)[], values: (number[] | Float32Array)[], positions?: number[]): AttentionOutput;
216
- }
217
- /**
218
- * Edge-featured attention for graphs with edge attributes
219
- * Useful for weighted dependency graphs
220
- */
221
- export declare class EdgeFeaturedAttention {
222
- private inner;
223
- readonly dim: number;
224
- readonly edgeDim: number;
225
- constructor(dim: number, edgeDim?: number);
226
- compute(query: number[] | Float32Array, keys: (number[] | Float32Array)[], values: (number[] | Float32Array)[], edgeFeatures?: (number[] | Float32Array)[]): AttentionOutput;
227
- }
228
- /**
229
- * Dual-space attention (Euclidean + Hyperbolic)
230
- * Best of both worlds for hierarchical + semantic similarity
231
- */
232
- export declare class DualSpaceAttention {
233
- private inner;
234
- readonly dim: number;
235
- readonly curvature: number;
236
- readonly alpha: number;
237
- constructor(dim: number, curvature?: number, alpha?: number);
238
- compute(query: number[] | Float32Array, keys: (number[] | Float32Array)[], values: (number[] | Float32Array)[]): AttentionOutput;
239
- }
240
- /**
241
- * Basic dot-product attention
242
- */
243
- export declare class DotProductAttention {
244
- private inner;
245
- readonly dim: number;
246
- constructor(dim: number);
247
- compute(query: number[] | Float32Array, keys: (number[] | Float32Array)[], values: (number[] | Float32Array)[]): AttentionOutput;
248
- }
249
- /**
250
- * Compute attention in parallel across multiple queries
251
- */
252
- export declare function parallelAttentionCompute(queries: (number[] | Float32Array)[], keys: (number[] | Float32Array)[], values: (number[] | Float32Array)[], attentionType?: 'dot' | 'multi-head' | 'flash' | 'hyperbolic' | 'linear'): Promise<number[][]>;
253
- /**
254
- * Batch attention compute for multiple query-key-value sets
255
- */
256
- export declare function batchAttentionCompute(batches: Array<{
257
- query: number[] | Float32Array;
258
- keys: (number[] | Float32Array)[];
259
- values: (number[] | Float32Array)[];
260
- }>, attentionType?: 'dot' | 'multi-head' | 'flash' | 'hyperbolic' | 'linear'): Promise<number[][]>;
261
- /**
262
- * Async flash attention with callback
263
- */
264
- export declare function computeFlashAttentionAsync(query: number[] | Float32Array, keys: (number[] | Float32Array)[], values: (number[] | Float32Array)[]): Promise<number[]>;
265
- /**
266
- * Async hyperbolic attention
267
- */
268
- export declare function computeHyperbolicAttentionAsync(query: number[] | Float32Array, keys: (number[] | Float32Array)[], values: (number[] | Float32Array)[], curvature?: number): Promise<number[]>;
269
- /**
270
- * Adam optimizer for attention training
271
- */
272
- export declare class AdamOptimizer {
273
- private inner;
274
- constructor(learningRate?: number, beta1?: number, beta2?: number);
275
- step(gradients: number[] | Float32Array, params: number[] | Float32Array): number[];
276
- }
277
- /**
278
- * InfoNCE contrastive loss
279
- */
280
- export declare function infoNceLoss(anchor: number[] | Float32Array, positive: number[] | Float32Array, negatives: (number[] | Float32Array)[], temperature?: number): number;
281
- /**
282
- * Hard negative mining for contrastive learning
283
- */
284
- export declare function mineHardNegatives(anchor: number[] | Float32Array, candidates: (number[] | Float32Array)[], topK?: number): number[][];
285
- /**
286
- * Benchmark attention implementations
287
- */
288
- export declare function benchmarkAttention(dim: number, seqLen: number, iterations?: number): Promise<Record<string, {
289
- avgMs: number;
290
- minMs: number;
291
- maxMs: number;
292
- }>>;
293
- declare const _default: {
294
- DotProductAttention: typeof DotProductAttention;
295
- MultiHeadAttention: typeof MultiHeadAttention;
296
- FlashAttention: typeof FlashAttention;
297
- HyperbolicAttention: typeof HyperbolicAttention;
298
- LinearAttention: typeof LinearAttention;
299
- LocalGlobalAttention: typeof LocalGlobalAttention;
300
- MoEAttention: typeof MoEAttention;
301
- GraphRoPeAttention: typeof GraphRoPeAttention;
302
- EdgeFeaturedAttention: typeof EdgeFeaturedAttention;
303
- DualSpaceAttention: typeof DualSpaceAttention;
304
- parallelAttentionCompute: typeof parallelAttentionCompute;
305
- batchAttentionCompute: typeof batchAttentionCompute;
306
- computeFlashAttentionAsync: typeof computeFlashAttentionAsync;
307
- computeHyperbolicAttentionAsync: typeof computeHyperbolicAttentionAsync;
308
- AdamOptimizer: typeof AdamOptimizer;
309
- infoNceLoss: typeof infoNceLoss;
310
- mineHardNegatives: typeof mineHardNegatives;
311
- projectToPoincareBall: typeof projectToPoincareBall;
312
- poincareDistance: typeof poincareDistance;
313
- mobiusAddition: typeof mobiusAddition;
314
- expMap: typeof expMap;
315
- logMap: typeof logMap;
316
- isAttentionAvailable: typeof isAttentionAvailable;
317
- getAttentionVersion: typeof getAttentionVersion;
318
- benchmarkAttention: typeof benchmarkAttention;
319
- };
320
- export default _default;
321
- //# sourceMappingURL=attention-fallbacks.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"attention-fallbacks.d.ts","sourceRoot":"","sources":["../../src/core/attention-fallbacks.ts"],"names":[],"mappings":"AAAA;;;;;;;;GAQG;AA8CH;;GAEG;AACH,MAAM,WAAW,eAAe;IAC9B,qCAAqC;IACrC,MAAM,EAAE,MAAM,EAAE,CAAC;IACjB,2DAA2D;IAC3D,GAAG,EAAE,YAAY,CAAC;CACnB;AAED;;;;GAIG;AACH,qBAAa,kBAAkB;IAC7B,OAAO,CAAC,KAAK,CAAM;IACnB,SAAgB,GAAG,EAAE,MAAM,CAAC;IAC5B,SAAgB,QAAQ,EAAE,MAAM,CAAC;IAEjC;;;;;OAKG;gBACS,GAAG,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM;IAOzC;;;;;;;;;;;;;;;;;;;;;OAqBG;IACH,OAAO,CACL,KAAK,EAAE,MAAM,EAAE,GAAG,YAAY,EAC9B,IAAI,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,EACjC,MAAM,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,GAClC,eAAe;IAYlB;;OAEG;IACH,UAAU,CACR,KAAK,EAAE,YAAY,EACnB,IAAI,EAAE,YAAY,EAAE,EACpB,MAAM,EAAE,YAAY,EAAE,GACrB,YAAY;IAIf,IAAI,OAAO,IAAI,MAAM,CAEpB;CACF;AAED;;GAEG;AACH,qBAAa,cAAc;IACzB,OAAO,CAAC,KAAK,CAAM;IACnB,SAAgB,GAAG,EAAE,MAAM,CAAC;IAC5B,SAAgB,SAAS,EAAE,MAAM,CAAC;IAElC;;;;;OAKG;gBACS,GAAG,EAAE,MAAM,EAAE,SAAS,GAAE,MAAY;IAOhD;;OAEG;IACH,OAAO,CACL,KAAK,EAAE,MAAM,EAAE,GAAG,YAAY,EAC9B,IAAI,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,EACjC,MAAM,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,GAClC,eAAe;IAYlB,UAAU,CACR,KAAK,EAAE,YAAY,EACnB,IAAI,EAAE,YAAY,EAAE,EACpB,MAAM,EAAE,YAAY,EAAE,GACrB,YAAY;CAGhB;AAED;;GAEG;AACH,qBAAa,mBAAmB;IAC9B,OAAO,CAAC,KAAK,CAAM;IACnB,SAAgB,GAAG,EAAE,MAAM,CAAC;IAC5B,SAAgB,SAAS,EAAE,MAAM,CAAC;IAElC;;;;;OAKG;gBACS,GAAG,EAAE,MAAM,EAAE,SAAS,GAAE,MAAY;IAOhD;;OAEG;IACH,OAAO,CACL,KAAK,EAAE,MAAM,EAAE,GAAG,YAAY,EAC9B,IAAI,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,EACjC,MAAM,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,GAClC,eAAe;IAYlB,UAAU,CACR,KAAK,EAAE,YAAY,EACnB,IAAI,EAAE,YAAY,EAAE,EACpB,MAAM,EAAE,YAAY,EAAE,GACrB,YAAY;CAGhB;AAED;;GAEG;AACH,qBAAa,eAAe;IAC1B,OAAO,CAAC,KAAK,CAAM;IACnB,SAAgB,GAAG,EAAE,MAAM,CAAC;IAC5B,SAAgB,WAAW,EAAE,MAAM,CAAC;IAEpC;;;;;OAKG;gBACS,GAAG,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM;IAO5C;;OAEG;IACH,OAAO,CACL,KAAK,EAAE,MAAM,EAAE,GAAG,YAAY,EAC9B,IAAI,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,EACjC,MAAM,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,GAClC,eAAe;IAYlB,UAAU,CACR,KAAK,EAAE,YAAY,EACnB,IAAI,EAAE,YAAY,EAAE,EACpB,MAAM,EAAE,YAAY,EAAE,GACrB,YAAY;CAGhB;AAED;;GAEG;AACH,qBAAa,oBAAoB;IAC/B,OAAO,CAAC,KAAK,CAAM;IACnB,SAAgB,GAAG,EAAE,MAAM,CAAC;IAC5B,SAAgB,WAAW,EAAE,MAAM,CAAC;IACpC,SAAgB,YAAY,EAAE,MAAM,CAAC;IAErC;;;;;;OAMG;gBACS,GAAG,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,EAAE,YAAY,EAAE,MAAM;IAQlE;;OAEG;IACH,OAAO,CACL,KAAK,EAAE,MAAM,EAAE,GAAG,YAAY,EAC9B,IAAI,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,EACjC,MAAM,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,GAClC,eAAe;IAYlB,UAAU,CACR,KAAK,EAAE,YAAY,EACnB,IAAI,EAAE,YAAY,EAAE,EACpB,MAAM,EAAE,YAAY,EAAE,GACrB,YAAY;CAGhB;AAED;;GAEG;AACH,MAAM,WAAW,SAAS;IACxB,GAAG,EAAE,MAAM,CAAC;IACZ,UAAU,EAAE,MAAM,CAAC;IACnB,IAAI,EAAE,MAAM,CAAC;IACb,cAAc,CAAC,EAAE,MAAM,CAAC;CACzB;AAED;;GAEG;AACH,qBAAa,YAAY;IACvB,OAAO,CAAC,KAAK,CAAM;IACnB,SAAgB,MAAM,EAAE,SAAS,CAAC;IAElC;;;;OAIG;gBACS,MAAM,EAAE,SAAS;IAW7B;;OAEG;IACH,MAAM,CAAC,MAAM,CAAC,GAAG,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,GAAG,YAAY;IAI1E;;OAEG;IACH,OAAO,CACL,KAAK,EAAE,MAAM,EAAE,GAAG,YAAY,EAC9B,IAAI,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,EACjC,MAAM,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,GAClC,eAAe;IAYlB,UAAU,CACR,KAAK,EAAE,YAAY,EACnB,IAAI,EAAE,YAAY,EAAE,EACpB,MAAM,EAAE,YAAY,EAAE,GACrB,YAAY;CAGhB;AAID;;GAEG;AACH,wBAAgB,qBAAqB,CACnC,MAAM,EAAE,MAAM,EAAE,GAAG,YAAY,EAC/B,SAAS,GAAE,MAAY,GACtB,MAAM,EAAE,CAIV;AAED;;GAEG;AACH,wBAAgB,gBAAgB,CAC9B,CAAC,EAAE,MAAM,EAAE,GAAG,YAAY,EAC1B,CAAC,EAAE,MAAM,EAAE,GAAG,YAAY,EAC1B,SAAS,GAAE,MAAY,GACtB,MAAM,CAGR;AAED;;GAEG;AACH,wBAAgB,cAAc,CAC5B,CAAC,EAAE,MAAM,EAAE,GAAG,YAAY,EAC1B,CAAC,EAAE,MAAM,EAAE,GAAG,YAAY,EAC1B,SAAS,GAAE,MAAY,GACtB,MAAM,EAAE,CAIV;AAED;;GAEG;AACH,wBAAgB,MAAM,CACpB,IAAI,EAAE,MAAM,EAAE,GAAG,YAAY,EAC7B,OAAO,EAAE,MAAM,EAAE,GAAG,YAAY,EAChC,SAAS,GAAE,MAAY,GACtB,MAAM,EAAE,CAIV;AAED;;GAEG;AACH,wBAAgB,MAAM,CACpB,IAAI,EAAE,MAAM,EAAE,GAAG,YAAY,EAC7B,KAAK,EAAE,MAAM,EAAE,GAAG,YAAY,EAC9B,SAAS,GAAE,MAAY,GACtB,MAAM,EAAE,CAIV;AAED;;GAEG;AACH,wBAAgB,oBAAoB,IAAI,OAAO,CAO9C;AAED;;GAEG;AACH,wBAAgB,mBAAmB,IAAI,MAAM,GAAG,IAAI,CAOnD;AAMD;;;GAGG;AACH,qBAAa,kBAAkB;IAC7B,OAAO,CAAC,KAAK,CAAM;IACnB,SAAgB,GAAG,EAAE,MAAM,CAAC;IAC5B,SAAgB,QAAQ,EAAE,MAAM,CAAC;IACjC,SAAgB,SAAS,EAAE,MAAM,CAAC;gBAEtB,GAAG,EAAE,MAAM,EAAE,QAAQ,GAAE,MAAU,EAAE,SAAS,GAAE,MAAa;IAQvE,OAAO,CACL,KAAK,EAAE,MAAM,EAAE,GAAG,YAAY,EAC9B,IAAI,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,EACjC,MAAM,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,EACnC,SAAS,CAAC,EAAE,MAAM,EAAE,GACnB,eAAe;CASnB;AAED;;;GAGG;AACH,qBAAa,qBAAqB;IAChC,OAAO,CAAC,KAAK,CAAM;IACnB,SAAgB,GAAG,EAAE,MAAM,CAAC;IAC5B,SAAgB,OAAO,EAAE,MAAM,CAAC;gBAEpB,GAAG,EAAE,MAAM,EAAE,OAAO,GAAE,MAAW;IAO7C,OAAO,CACL,KAAK,EAAE,MAAM,EAAE,GAAG,YAAY,EAC9B,IAAI,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,EACjC,MAAM,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,EACnC,YAAY,CAAC,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,GACzC,eAAe;CASnB;AAED;;;GAGG;AACH,qBAAa,kBAAkB;IAC7B,OAAO,CAAC,KAAK,CAAM;IACnB,SAAgB,GAAG,EAAE,MAAM,CAAC;IAC5B,SAAgB,SAAS,EAAE,MAAM,CAAC;IAClC,SAAgB,KAAK,EAAE,MAAM,CAAC;gBAElB,GAAG,EAAE,MAAM,EAAE,SAAS,GAAE,MAAY,EAAE,KAAK,GAAE,MAAY;IAQrE,OAAO,CACL,KAAK,EAAE,MAAM,EAAE,GAAG,YAAY,EAC9B,IAAI,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,EACjC,MAAM,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,GAClC,eAAe;CAQnB;AAED;;GAEG;AACH,qBAAa,mBAAmB;IAC9B,OAAO,CAAC,KAAK,CAAM;IACnB,SAAgB,GAAG,EAAE,MAAM,CAAC;gBAEhB,GAAG,EAAE,MAAM;IAMvB,OAAO,CACL,KAAK,EAAE,MAAM,EAAE,GAAG,YAAY,EAC9B,IAAI,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,EACjC,MAAM,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,GAClC,eAAe;CAQnB;AAMD;;GAEG;AACH,wBAAsB,wBAAwB,CAC5C,OAAO,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,EACpC,IAAI,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,EACjC,MAAM,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,EACnC,aAAa,GAAE,KAAK,GAAG,YAAY,GAAG,OAAO,GAAG,YAAY,GAAG,QAAuB,GACrF,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC,CASrB;AAED;;GAEG;AACH,wBAAsB,qBAAqB,CACzC,OAAO,EAAE,KAAK,CAAC;IACb,KAAK,EAAE,MAAM,EAAE,GAAG,YAAY,CAAC;IAC/B,IAAI,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,CAAC;IAClC,MAAM,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,CAAC;CACrC,CAAC,EACF,aAAa,GAAE,KAAK,GAAG,YAAY,GAAG,OAAO,GAAG,YAAY,GAAG,QAAuB,GACrF,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC,CASrB;AAED;;GAEG;AACH,wBAAgB,0BAA0B,CACxC,KAAK,EAAE,MAAM,EAAE,GAAG,YAAY,EAC9B,IAAI,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,EACjC,MAAM,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,GAClC,OAAO,CAAC,MAAM,EAAE,CAAC,CAanB;AAED;;GAEG;AACH,wBAAgB,+BAA+B,CAC7C,KAAK,EAAE,MAAM,EAAE,GAAG,YAAY,EAC9B,IAAI,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,EACjC,MAAM,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,EACnC,SAAS,GAAE,MAAY,GACtB,OAAO,CAAC,MAAM,EAAE,CAAC,CAcnB;AAMD;;GAEG;AACH,qBAAa,aAAa;IACxB,OAAO,CAAC,KAAK,CAAM;gBAEP,YAAY,GAAE,MAAc,EAAE,KAAK,GAAE,MAAY,EAAE,KAAK,GAAE,MAAc;IAKpF,IAAI,CAAC,SAAS,EAAE,MAAM,EAAE,GAAG,YAAY,EAAE,MAAM,EAAE,MAAM,EAAE,GAAG,YAAY,GAAG,MAAM,EAAE;CAIpF;AAED;;GAEG;AACH,wBAAgB,WAAW,CACzB,MAAM,EAAE,MAAM,EAAE,GAAG,YAAY,EAC/B,QAAQ,EAAE,MAAM,EAAE,GAAG,YAAY,EACjC,SAAS,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,EACtC,WAAW,GAAE,MAAa,GACzB,MAAM,CAQR;AAED;;GAEG;AACH,wBAAgB,iBAAiB,CAC/B,MAAM,EAAE,MAAM,EAAE,GAAG,YAAY,EAC/B,UAAU,EAAE,CAAC,MAAM,EAAE,GAAG,YAAY,CAAC,EAAE,EACvC,IAAI,GAAE,MAAU,GACf,MAAM,EAAE,EAAE,CAKZ;AAMD;;GAEG;AACH,wBAAsB,kBAAkB,CACtC,GAAG,EAAE,MAAM,EACX,MAAM,EAAE,MAAM,EACd,UAAU,GAAE,MAAY,GACvB,OAAO,CAAC,MAAM,CAAC,MAAM,EAAE;IAAE,KAAK,EAAE,MAAM,CAAC;IAAC,KAAK,EAAE,MAAM,CAAC;IAAC,KAAK,EAAE,MAAM,CAAA;CAAE,CAAC,CAAC,CAG1E;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAED,wBAqCE"}