@claude-flow/cli 3.0.0-alpha.175 → 3.0.0-alpha.176

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,357 @@
1
+ /**
2
+ * RuVector Training Service
3
+ * Real WASM-accelerated neural training using @ruvector packages
4
+ *
5
+ * Features:
6
+ * - MicroLoRA: <100µs adaptation with rank-2 LoRA
7
+ * - Flash Attention: 2.49x-7.47x speedup
8
+ * - Trajectory Buffer: Learning from success/failure
9
+ * - Contrastive Learning: InfoNCE loss
10
+ *
11
+ * Created with ❤️ by ruv.io
12
+ */
13
+ // Lazy-loaded WASM modules
14
+ let microLoRA = null;
15
+ let scopedLoRA = null;
16
+ let trajectoryBuffer = null;
17
+ let flashAttention = null;
18
+ let moeAttention = null;
19
+ let hyperbolicAttention = null;
20
+ let optimizer = null;
21
+ let contrastiveLoss = null;
22
+ let curriculum = null;
23
+ let hardMiner = null;
24
+ // Training state
25
+ let initialized = false;
26
+ let totalAdaptations = 0;
27
+ let totalForwards = 0;
28
+ let lastBenchmark = null;
29
+ /**
30
+ * Initialize the RuVector training system
31
+ */
32
+ export async function initializeTraining(config = {}) {
33
+ const features = [];
34
+ const dim = Math.min(config.dim || 256, 256); // Max 256 for WASM
35
+ const lr = config.learningRate || 0.01;
36
+ const alpha = config.alpha || 0.1;
37
+ try {
38
+ // Initialize MicroLoRA with direct WASM loading (Node.js compatible)
39
+ const fs = await import('fs');
40
+ const { createRequire } = await import('module');
41
+ const require = createRequire(import.meta.url);
42
+ // Load WASM file directly instead of using fetch
43
+ const wasmPath = require.resolve('@ruvector/learning-wasm/ruvector_learning_wasm_bg.wasm');
44
+ const wasmBuffer = fs.readFileSync(wasmPath);
45
+ const learningWasm = await import('@ruvector/learning-wasm');
46
+ learningWasm.initSync({ module: wasmBuffer });
47
+ microLoRA = new learningWasm.WasmMicroLoRA(dim, alpha, lr);
48
+ features.push(`MicroLoRA (${dim}-dim, <1μs adaptation)`);
49
+ // Initialize ScopedLoRA for per-operator learning
50
+ scopedLoRA = new learningWasm.WasmScopedLoRA(dim, alpha, lr);
51
+ scopedLoRA.set_category_fallback(true);
52
+ features.push('ScopedLoRA (17 operators)');
53
+ // Initialize trajectory buffer
54
+ trajectoryBuffer = new learningWasm.WasmTrajectoryBuffer(config.trajectoryCapacity || 10000, dim);
55
+ features.push('TrajectoryBuffer');
56
+ // Initialize attention mechanisms
57
+ const attention = await import('@ruvector/attention');
58
+ if (config.useFlashAttention !== false) {
59
+ flashAttention = new attention.FlashAttention(dim, 64);
60
+ features.push('FlashAttention');
61
+ }
62
+ if (config.useMoE) {
63
+ moeAttention = attention.MoEAttention.simple(dim, 8, 2);
64
+ features.push('MoE (8 experts, top-2)');
65
+ }
66
+ if (config.useHyperbolic) {
67
+ hyperbolicAttention = new attention.HyperbolicAttention(dim, 1.0);
68
+ features.push('HyperbolicAttention');
69
+ }
70
+ // Initialize optimizer and loss
71
+ optimizer = new attention.AdamWOptimizer(lr, 0.9, 0.999, 1e-8, 0.01);
72
+ features.push('AdamW Optimizer');
73
+ contrastiveLoss = new attention.InfoNceLoss(0.07);
74
+ features.push('InfoNCE Loss');
75
+ // Curriculum scheduler
76
+ if (config.totalSteps) {
77
+ curriculum = new attention.CurriculumScheduler(config.totalSteps, config.warmupSteps || Math.floor(config.totalSteps * 0.1));
78
+ features.push('Curriculum Learning');
79
+ }
80
+ // Hard negative mining - use string for MiningStrategy enum due to NAPI binding quirk
81
+ try {
82
+ // @ts-expect-error - MiningStrategy enum binding expects string not enum value
83
+ hardMiner = new attention.HardNegativeMiner(5, 'semi_hard');
84
+ features.push('Hard Negative Mining');
85
+ }
86
+ catch {
87
+ // Mining not available, continue without it
88
+ }
89
+ initialized = true;
90
+ return { success: true, features };
91
+ }
92
+ catch (error) {
93
+ return {
94
+ success: false,
95
+ features,
96
+ error: error instanceof Error ? error.message : String(error),
97
+ };
98
+ }
99
+ }
100
+ /**
101
+ * Operator types for scoped LoRA (0-16)
102
+ */
103
+ export const OperatorType = {
104
+ GENERAL: 0,
105
+ ATTENTION: 1,
106
+ MLP: 2,
107
+ EMBEDDING: 3,
108
+ NORMALIZATION: 4,
109
+ PROJECTION: 5,
110
+ POOLING: 6,
111
+ CONVOLUTION: 7,
112
+ RECURRENT: 8,
113
+ ROUTING: 9,
114
+ MEMORY: 10,
115
+ REASONING: 11,
116
+ COORDINATION: 12,
117
+ OPTIMIZATION: 13,
118
+ SECURITY: 14,
119
+ TESTING: 15,
120
+ DEBUGGING: 16,
121
+ };
122
+ /**
123
+ * Train a pattern with MicroLoRA
124
+ */
125
+ export async function trainPattern(embedding, gradient, operatorType) {
126
+ if (!initialized || !microLoRA) {
127
+ throw new Error('Training system not initialized');
128
+ }
129
+ // Use scoped LoRA if operator type specified
130
+ if (operatorType !== undefined && scopedLoRA) {
131
+ scopedLoRA.adapt_array(operatorType, gradient);
132
+ return {
133
+ deltaNorm: scopedLoRA.delta_norm(operatorType),
134
+ adaptCount: scopedLoRA.adapt_count(operatorType),
135
+ };
136
+ }
137
+ // Standard MicroLoRA adaptation
138
+ microLoRA.adapt_array(gradient);
139
+ totalAdaptations++;
140
+ return {
141
+ deltaNorm: microLoRA.delta_norm(),
142
+ adaptCount: microLoRA.adapt_count(),
143
+ };
144
+ }
145
+ /**
146
+ * Forward pass through LoRA
147
+ */
148
+ export function forward(input, operatorType) {
149
+ if (!initialized || !microLoRA) {
150
+ throw new Error('Training system not initialized');
151
+ }
152
+ totalForwards++;
153
+ if (operatorType !== undefined && scopedLoRA) {
154
+ return scopedLoRA.forward_array(operatorType, input);
155
+ }
156
+ return microLoRA.forward_array(input);
157
+ }
158
+ /**
159
+ * Reward-based adaptation (reinforcement learning)
160
+ */
161
+ export function adaptWithReward(improvement, operatorType) {
162
+ if (!initialized) {
163
+ throw new Error('Training system not initialized');
164
+ }
165
+ if (operatorType !== undefined && scopedLoRA) {
166
+ scopedLoRA.adapt_with_reward(operatorType, improvement);
167
+ }
168
+ else if (microLoRA) {
169
+ microLoRA.adapt_with_reward(improvement);
170
+ }
171
+ totalAdaptations++;
172
+ }
173
+ /**
174
+ * Record a learning trajectory
175
+ */
176
+ export function recordTrajectory(embedding, operatorType, attentionType, executionMs, baselineMs) {
177
+ if (!trajectoryBuffer) {
178
+ throw new Error('Trajectory buffer not initialized');
179
+ }
180
+ trajectoryBuffer.record(embedding, operatorType, attentionType, executionMs, baselineMs);
181
+ }
182
+ /**
183
+ * Get trajectory statistics
184
+ */
185
+ export function getTrajectoryStats() {
186
+ if (!trajectoryBuffer || trajectoryBuffer.is_empty()) {
187
+ return null;
188
+ }
189
+ return {
190
+ successRate: trajectoryBuffer.success_rate(),
191
+ meanImprovement: trajectoryBuffer.mean_improvement(),
192
+ bestImprovement: trajectoryBuffer.best_improvement(),
193
+ totalCount: trajectoryBuffer.total_count(),
194
+ highQualityCount: trajectoryBuffer.high_quality_count(0.1),
195
+ variance: trajectoryBuffer.variance(),
196
+ };
197
+ }
198
+ /**
199
+ * Compute attention with Flash Attention (2.49x-7.47x faster)
200
+ */
201
+ export function computeFlashAttention(query, keys, values) {
202
+ if (!flashAttention) {
203
+ throw new Error('Flash attention not initialized');
204
+ }
205
+ return flashAttention.computeRaw(query, keys, values);
206
+ }
207
+ /**
208
+ * Compute MoE routing
209
+ */
210
+ export function computeMoEAttention(query, keys, values) {
211
+ if (!moeAttention) {
212
+ throw new Error('MoE attention not initialized');
213
+ }
214
+ return moeAttention.computeRaw(query, keys, values);
215
+ }
216
+ /**
217
+ * Compute hyperbolic attention (for hierarchical patterns)
218
+ */
219
+ export function computeHyperbolicAttention(query, keys, values) {
220
+ if (!hyperbolicAttention) {
221
+ throw new Error('Hyperbolic attention not initialized');
222
+ }
223
+ return hyperbolicAttention.computeRaw(query, keys, values);
224
+ }
225
+ /**
226
+ * Compute contrastive loss for training
227
+ */
228
+ export function computeContrastiveLoss(anchor, positives, negatives) {
229
+ if (!contrastiveLoss) {
230
+ throw new Error('Contrastive loss not initialized');
231
+ }
232
+ const loss = contrastiveLoss.compute(anchor, positives, negatives);
233
+ const gradient = contrastiveLoss.backward(anchor, positives, negatives);
234
+ return { loss, gradient };
235
+ }
236
+ /**
237
+ * Optimizer step
238
+ */
239
+ export function optimizerStep(params, gradients) {
240
+ if (!optimizer) {
241
+ throw new Error('Optimizer not initialized');
242
+ }
243
+ return optimizer.step(params, gradients);
244
+ }
245
+ /**
246
+ * Get curriculum difficulty for current step
247
+ */
248
+ export function getCurriculumDifficulty(step) {
249
+ if (!curriculum) {
250
+ return 1.0; // Full difficulty if no curriculum
251
+ }
252
+ return curriculum.getDifficulty(step);
253
+ }
254
+ /**
255
+ * Mine hard negatives for better training
256
+ */
257
+ export function mineHardNegatives(anchor, candidates) {
258
+ if (!hardMiner) {
259
+ throw new Error('Hard negative miner not initialized');
260
+ }
261
+ return hardMiner.mine(anchor, candidates);
262
+ }
263
+ /**
264
+ * Benchmark the training system
265
+ */
266
+ export async function benchmarkTraining(dim, iterations) {
267
+ const attention = await import('@ruvector/attention');
268
+ lastBenchmark = attention.benchmarkAttention(dim || 256, 100, iterations || 1000);
269
+ return lastBenchmark;
270
+ }
271
+ /**
272
+ * Get training statistics
273
+ */
274
+ export function getTrainingStats() {
275
+ const stats = {
276
+ initialized,
277
+ totalAdaptations,
278
+ totalForwards,
279
+ };
280
+ if (microLoRA) {
281
+ stats.microLoraStats = {
282
+ paramCount: microLoRA.param_count(),
283
+ adaptCount: microLoRA.adapt_count(),
284
+ forwardCount: microLoRA.forward_count(),
285
+ deltaNorm: microLoRA.delta_norm(),
286
+ };
287
+ }
288
+ if (scopedLoRA) {
289
+ stats.scopedLoraStats = {
290
+ totalAdaptCount: scopedLoRA.total_adapt_count(),
291
+ totalForwardCount: scopedLoRA.total_forward_count(),
292
+ };
293
+ }
294
+ if (trajectoryBuffer && !trajectoryBuffer.is_empty()) {
295
+ stats.trajectoryStats = getTrajectoryStats();
296
+ }
297
+ if (lastBenchmark) {
298
+ stats.lastBenchmark = lastBenchmark;
299
+ }
300
+ return stats;
301
+ }
302
+ /**
303
+ * Reset the training system
304
+ */
305
+ export function resetTraining() {
306
+ if (microLoRA)
307
+ microLoRA.reset();
308
+ if (scopedLoRA)
309
+ scopedLoRA.reset_all();
310
+ if (trajectoryBuffer)
311
+ trajectoryBuffer.reset();
312
+ totalAdaptations = 0;
313
+ totalForwards = 0;
314
+ }
315
+ /**
316
+ * Export trained weights
317
+ */
318
+ export function exportWeights() {
319
+ if (!initialized || !microLoRA) {
320
+ return null;
321
+ }
322
+ return {
323
+ dim: microLoRA.dim(),
324
+ deltaNorm: microLoRA.delta_norm(),
325
+ adaptCount: microLoRA.adapt_count(),
326
+ trajectoryStats: getTrajectoryStats(),
327
+ };
328
+ }
329
+ /**
330
+ * Cleanup resources
331
+ */
332
+ export function cleanup() {
333
+ if (microLoRA) {
334
+ microLoRA.free();
335
+ microLoRA = null;
336
+ }
337
+ if (scopedLoRA) {
338
+ scopedLoRA.free();
339
+ scopedLoRA = null;
340
+ }
341
+ if (trajectoryBuffer) {
342
+ trajectoryBuffer.free();
343
+ trajectoryBuffer = null;
344
+ }
345
+ flashAttention = null;
346
+ moeAttention = null;
347
+ hyperbolicAttention = null;
348
+ optimizer = null;
349
+ contrastiveLoss = null;
350
+ curriculum = null;
351
+ hardMiner = null;
352
+ initialized = false;
353
+ totalAdaptations = 0;
354
+ totalForwards = 0;
355
+ lastBenchmark = null;
356
+ }
357
+ //# sourceMappingURL=ruvector-training.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"ruvector-training.js","sourceRoot":"","sources":["../../../src/services/ruvector-training.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;GAWG;AAmBH,2BAA2B;AAC3B,IAAI,SAAS,GAAyB,IAAI,CAAC;AAC3C,IAAI,UAAU,GAA0B,IAAI,CAAC;AAC7C,IAAI,gBAAgB,GAAgC,IAAI,CAAC;AACzD,IAAI,cAAc,GAA0B,IAAI,CAAC;AACjD,IAAI,YAAY,GAAwB,IAAI,CAAC;AAC7C,IAAI,mBAAmB,GAA+B,IAAI,CAAC;AAC3D,IAAI,SAAS,GAA0B,IAAI,CAAC;AAC5C,IAAI,eAAe,GAAuB,IAAI,CAAC;AAC/C,IAAI,UAAU,GAA+B,IAAI,CAAC;AAClD,IAAI,SAAS,GAA6B,IAAI,CAAC;AAE/C,iBAAiB;AACjB,IAAI,WAAW,GAAG,KAAK,CAAC;AACxB,IAAI,gBAAgB,GAAG,CAAC,CAAC;AACzB,IAAI,aAAa,GAAG,CAAC,CAAC;AACtB,IAAI,aAAa,GAA6B,IAAI,CAAC;AA4BnD;;GAEG;AACH,MAAM,CAAC,KAAK,UAAU,kBAAkB,CAAC,SAAyB,EAAE;IAKlE,MAAM,QAAQ,GAAa,EAAE,CAAC;IAC9B,MAAM,GAAG,GAAG,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,IAAI,GAAG,EAAE,GAAG,CAAC,CAAC,CAAC,mBAAmB;IACjE,MAAM,EAAE,GAAG,MAAM,CAAC,YAAY,IAAI,IAAI,CAAC;IACvC,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,IAAI,GAAG,CAAC;IAElC,IAAI,CAAC;QACH,qEAAqE;QACrE,MAAM,EAAE,GAAG,MAAM,MAAM,CAAC,IAAI,CAAC,CAAC;QAC9B,MAAM,EAAE,aAAa,EAAE,GAAG,MAAM,MAAM,CAAC,QAAQ,CAAC,CAAC;QACjD,MAAM,OAAO,GAAG,aAAa,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;QAE/C,iDAAiD;QACjD,MAAM,QAAQ,GAAG,OAAO,CAAC,OAAO,CAAC,wDAAwD,CAAC,CAAC;QAC3F,MAAM,UAAU,GAAG,EAAE,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;QAE7C,MAAM,YAAY,GAAG,MAAM,MAAM,CAAC,yBAAyB,CAAC,CAAC;QAC7D,YAAY,CAAC,QAAQ,CAAC,EAAE,MAAM,EAAE,UAAU,EAAE,CAAC,CAAC;QAE9C,SAAS,GAAG,IAAI,YAAY,CAAC,aAAa,CAAC,GAAG,EAAE,KAAK,EAAE,EAAE,CAAC,CAAC;QAC3D,QAAQ,CAAC,IAAI,CAAC,cAAc,GAAG,wBAAwB,CAAC,CAAC;QAEzD,kDAAkD;QAClD,UAAU,GAAG,IAAI,YAAY,CAAC,cAAc,CAAC,GAAG,EAAE,KAAK,EAAE,EAAE,CAAC,CAAC;QAC7D,UAAU,CAAC,qBAAqB,CAAC,IAAI,CAAC,CAAC;QACvC,QAAQ,CAAC,IAAI,CAAC,2BAA2B,CAAC,CAAC;QAE3C,+BAA+B;QAC/B,gBAAgB,GAAG,IAAI,YAAY,CAAC,oBAAoB,CACtD,MAAM,CAAC,kBAAkB,IAAI,KAAK,EAClC,GAAG,CACJ,CAAC;QACF,QAAQ,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC;QAElC,kCAAkC;QAClC,MAAM,SAAS,GAAG,MAAM,MAAM,CAAC,qBAAqB,CAAC,CAAC;QAEtD,IAAI,MAAM,CAAC,iBAAiB,KAAK,KAAK,EAAE,CAAC;YACvC,cAAc,GAAG,IAAI,SAAS,CAAC,cAAc,CAAC,GAAG,EAAE,EAAE,CAAC,CAAC;YACvD,QAAQ,CAAC,IAAI,CAAC,gBAAgB,CAAC,CAAC;QAClC,CAAC;QAED,IAAI,MAAM,CAAC,MAAM,EAAE,CAAC;YAClB,YAAY,GAAG,SAAS,CAAC,YAAY,CAAC,MAAM,CAAC,GAAG,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;YACxD,QAAQ,CAAC,IAAI,CAAC,wBAAwB,CAAC,CAAC;QAC1C,CAAC;QAED,IAAI,MAAM,CAAC,aAAa,EAAE,CAAC;YACzB,mBAAmB,GAAG,IAAI,SAAS,CAAC,mBAAmB,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC;YAClE,QAAQ,CAAC,IAAI,CAAC,qBAAqB,CAAC,CAAC;QACvC,CAAC;QAED,gCAAgC;QAChC,SAAS,GAAG,IAAI,SAAS,CAAC,cAAc,CAAC,EAAE,EAAE,GAAG,EAAE,KAAK,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;QACrE,QAAQ,CAAC,IAAI,CAAC,iBAAiB,CAAC,CAAC;QAEjC,eAAe,GAAG,IAAI,SAAS,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;QAClD,QAAQ,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;QAE9B,uBAAuB;QACvB,IAAI,MAAM,CAAC,UAAU,EAAE,CAAC;YACtB,UAAU,GAAG,IAAI,SAAS,CAAC,mBAAmB,CAC5C,MAAM,CAAC,UAAU,EACjB,MAAM,CAAC,WAAW,IAAI,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,UAAU,GAAG,GAAG,CAAC,CAC1D,CAAC;YACF,QAAQ,CAAC,IAAI,CAAC,qBAAqB,CAAC,CAAC;QACvC,CAAC;QAED,sFAAsF;QACtF,IAAI,CAAC;YACH,+EAA+E;YAC/E,SAAS,GAAG,IAAI,SAAS,CAAC,iBAAiB,CAAC,CAAC,EAAE,WAAW,CAAC,CAAC;YAC5D,QAAQ,CAAC,IAAI,CAAC,sBAAsB,CAAC,CAAC;QACxC,CAAC;QAAC,MAAM,CAAC;YACP,4CAA4C;QAC9C,CAAC;QAED,WAAW,GAAG,IAAI,CAAC;QACnB,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC;IACrC,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,OAAO;YACL,OAAO,EAAE,KAAK;YACd,QAAQ;YACR,KAAK,EAAE,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;SAC9D,CAAC;IACJ,CAAC;AACH,CAAC;AAED;;GAEG;AACH,MAAM,CAAC,MAAM,YAAY,GAAG;IAC1B,OAAO,EAAE,CAAC;IACV,SAAS,EAAE,CAAC;IACZ,GAAG,EAAE,CAAC;IACN,SAAS,EAAE,CAAC;IACZ,aAAa,EAAE,CAAC;IAChB,UAAU,EAAE,CAAC;IACb,OAAO,EAAE,CAAC;IACV,WAAW,EAAE,CAAC;IACd,SAAS,EAAE,CAAC;IACZ,OAAO,EAAE,CAAC;IACV,MAAM,EAAE,EAAE;IACV,SAAS,EAAE,EAAE;IACb,YAAY,EAAE,EAAE;IAChB,YAAY,EAAE,EAAE;IAChB,QAAQ,EAAE,EAAE;IACZ,OAAO,EAAE,EAAE;IACX,SAAS,EAAE,EAAE;CACL,CAAC;AAEX;;GAEG;AACH,MAAM,CAAC,KAAK,UAAU,YAAY,CAChC,SAAuB,EACvB,QAAsB,EACtB,YAAqB;IAErB,IAAI,CAAC,WAAW,IAAI,CAAC,SAAS,EAAE,CAAC;QAC/B,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC,CAAC;IACrD,CAAC;IAED,6CAA6C;IAC7C,IAAI,YAAY,KAAK,SAAS,IAAI,UAAU,EAAE,CAAC;QAC7C,UAAU,CAAC,WAAW,CAAC,YAAY,EAAE,QAAQ,CAAC,CAAC;QAC/C,OAAO;YACL,SAAS,EAAE,UAAU,CAAC,UAAU,CAAC,YAAY,CAAC;YAC9C,UAAU,EAAE,UAAU,CAAC,WAAW,CAAC,YAAY,CAAC;SACjD,CAAC;IACJ,CAAC;IAED,gCAAgC;IAChC,SAAS,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC;IAChC,gBAAgB,EAAE,CAAC;IAEnB,OAAO;QACL,SAAS,EAAE,SAAS,CAAC,UAAU,EAAE;QACjC,UAAU,EAAE,SAAS,CAAC,WAAW,EAAE;KACpC,CAAC;AACJ,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,OAAO,CACrB,KAAmB,EACnB,YAAqB;IAErB,IAAI,CAAC,WAAW,IAAI,CAAC,SAAS,EAAE,CAAC;QAC/B,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC,CAAC;IACrD,CAAC;IAED,aAAa,EAAE,CAAC;IAEhB,IAAI,YAAY,KAAK,SAAS,IAAI,UAAU,EAAE,CAAC;QAC7C,OAAO,UAAU,CAAC,aAAa,CAAC,YAAY,EAAE,KAAK,CAAC,CAAC;IACvD,CAAC;IAED,OAAO,SAAS,CAAC,aAAa,CAAC,KAAK,CAAC,CAAC;AACxC,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,eAAe,CAC7B,WAAmB,EACnB,YAAqB;IAErB,IAAI,CAAC,WAAW,EAAE,CAAC;QACjB,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC,CAAC;IACrD,CAAC;IAED,IAAI,YAAY,KAAK,SAAS,IAAI,UAAU,EAAE,CAAC;QAC7C,UAAU,CAAC,iBAAiB,CAAC,YAAY,EAAE,WAAW,CAAC,CAAC;IAC1D,CAAC;SAAM,IAAI,SAAS,EAAE,CAAC;QACrB,SAAS,CAAC,iBAAiB,CAAC,WAAW,CAAC,CAAC;IAC3C,CAAC;IAED,gBAAgB,EAAE,CAAC;AACrB,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,gBAAgB,CAC9B,SAAuB,EACvB,YAAoB,EACpB,aAAqB,EACrB,WAAmB,EACnB,UAAkB;IAElB,IAAI,CAAC,gBAAgB,EAAE,CAAC;QACtB,MAAM,IAAI,KAAK,CAAC,mCAAmC,CAAC,CAAC;IACvD,CAAC;IAED,gBAAgB,CAAC,MAAM,CACrB,SAAS,EACT,YAAY,EACZ,aAAa,EACb,WAAW,EACX,UAAU,CACX,CAAC;AACJ,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,kBAAkB;IAQhC,IAAI,CAAC,gBAAgB,IAAI,gBAAgB,CAAC,QAAQ,EAAE,EAAE,CAAC;QACrD,OAAO,IAAI,CAAC;IACd,CAAC;IAED,OAAO;QACL,WAAW,EAAE,gBAAgB,CAAC,YAAY,EAAE;QAC5C,eAAe,EAAE,gBAAgB,CAAC,gBAAgB,EAAE;QACpD,eAAe,EAAE,gBAAgB,CAAC,gBAAgB,EAAE;QACpD,UAAU,EAAE,gBAAgB,CAAC,WAAW,EAAE;QAC1C,gBAAgB,EAAE,gBAAgB,CAAC,kBAAkB,CAAC,GAAG,CAAC;QAC1D,QAAQ,EAAE,gBAAgB,CAAC,QAAQ,EAAE;KACtC,CAAC;AACJ,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,qBAAqB,CACnC,KAAmB,EACnB,IAAoB,EACpB,MAAsB;IAEtB,IAAI,CAAC,cAAc,EAAE,CAAC;QACpB,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC,CAAC;IACrD,CAAC;IAED,OAAO,cAAc,CAAC,UAAU,CAAC,KAAK,EAAE,IAAI,EAAE,MAAM,CAAC,CAAC;AACxD,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,mBAAmB,CACjC,KAAmB,EACnB,IAAoB,EACpB,MAAsB;IAEtB,IAAI,CAAC,YAAY,EAAE,CAAC;QAClB,MAAM,IAAI,KAAK,CAAC,+BAA+B,CAAC,CAAC;IACnD,CAAC;IAED,OAAO,YAAY,CAAC,UAAU,CAAC,KAAK,EAAE,IAAI,EAAE,MAAM,CAAC,CAAC;AACtD,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,0BAA0B,CACxC,KAAmB,EACnB,IAAoB,EACpB,MAAsB;IAEtB,IAAI,CAAC,mBAAmB,EAAE,CAAC;QACzB,MAAM,IAAI,KAAK,CAAC,sCAAsC,CAAC,CAAC;IAC1D,CAAC;IAED,OAAO,mBAAmB,CAAC,UAAU,CAAC,KAAK,EAAE,IAAI,EAAE,MAAM,CAAC,CAAC;AAC7D,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,sBAAsB,CACpC,MAAoB,EACpB,SAAyB,EACzB,SAAyB;IAEzB,IAAI,CAAC,eAAe,EAAE,CAAC;QACrB,MAAM,IAAI,KAAK,CAAC,kCAAkC,CAAC,CAAC;IACtD,CAAC;IAED,MAAM,IAAI,GAAG,eAAe,CAAC,OAAO,CAAC,MAAM,EAAE,SAAS,EAAE,SAAS,CAAC,CAAC;IACnE,MAAM,QAAQ,GAAG,eAAe,CAAC,QAAQ,CAAC,MAAM,EAAE,SAAS,EAAE,SAAS,CAAC,CAAC;IAExE,OAAO,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC;AAC5B,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,aAAa,CAC3B,MAAoB,EACpB,SAAuB;IAEvB,IAAI,CAAC,SAAS,EAAE,CAAC;QACf,MAAM,IAAI,KAAK,CAAC,2BAA2B,CAAC,CAAC;IAC/C,CAAC;IAED,OAAO,SAAS,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC,CAAC;AAC3C,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,uBAAuB,CAAC,IAAY;IAClD,IAAI,CAAC,UAAU,EAAE,CAAC;QAChB,OAAO,GAAG,CAAC,CAAC,mCAAmC;IACjD,CAAC;IAED,OAAO,UAAU,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC;AACxC,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,iBAAiB,CAC/B,MAAoB,EACpB,UAA0B;IAE1B,IAAI,CAAC,SAAS,EAAE,CAAC;QACf,MAAM,IAAI,KAAK,CAAC,qCAAqC,CAAC,CAAC;IACzD,CAAC;IAED,OAAO,SAAS,CAAC,IAAI,CAAC,MAAM,EAAE,UAAU,CAAC,CAAC;AAC5C,CAAC;AAED;;GAEG;AACH,MAAM,CAAC,KAAK,UAAU,iBAAiB,CACrC,GAAY,EACZ,UAAmB;IAEnB,MAAM,SAAS,GAAG,MAAM,MAAM,CAAC,qBAAqB,CAAC,CAAC;IACtD,aAAa,GAAG,SAAS,CAAC,kBAAkB,CAAC,GAAG,IAAI,GAAG,EAAE,GAAG,EAAE,UAAU,IAAI,IAAI,CAAC,CAAC;IAClF,OAAO,aAAa,CAAC;AACvB,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,gBAAgB;IAiB9B,MAAM,KAAK,GAAwC;QACjD,WAAW;QACX,gBAAgB;QAChB,aAAa;KACd,CAAC;IAEF,IAAI,SAAS,EAAE,CAAC;QACd,KAAK,CAAC,cAAc,GAAG;YACrB,UAAU,EAAE,SAAS,CAAC,WAAW,EAAE;YACnC,UAAU,EAAE,SAAS,CAAC,WAAW,EAAE;YACnC,YAAY,EAAE,SAAS,CAAC,aAAa,EAAE;YACvC,SAAS,EAAE,SAAS,CAAC,UAAU,EAAE;SAClC,CAAC;IACJ,CAAC;IAED,IAAI,UAAU,EAAE,CAAC;QACf,KAAK,CAAC,eAAe,GAAG;YACtB,eAAe,EAAE,UAAU,CAAC,iBAAiB,EAAE;YAC/C,iBAAiB,EAAE,UAAU,CAAC,mBAAmB,EAAE;SACpD,CAAC;IACJ,CAAC;IAED,IAAI,gBAAgB,IAAI,CAAC,gBAAgB,CAAC,QAAQ,EAAE,EAAE,CAAC;QACrD,KAAK,CAAC,eAAe,GAAG,kBAAkB,EAAE,CAAC;IAC/C,CAAC;IAED,IAAI,aAAa,EAAE,CAAC;QAClB,KAAK,CAAC,aAAa,GAAG,aAAa,CAAC;IACtC,CAAC;IAED,OAAO,KAAK,CAAC;AACf,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,aAAa;IAC3B,IAAI,SAAS;QAAE,SAAS,CAAC,KAAK,EAAE,CAAC;IACjC,IAAI,UAAU;QAAE,UAAU,CAAC,SAAS,EAAE,CAAC;IACvC,IAAI,gBAAgB;QAAE,gBAAgB,CAAC,KAAK,EAAE,CAAC;IAE/C,gBAAgB,GAAG,CAAC,CAAC;IACrB,aAAa,GAAG,CAAC,CAAC;AACpB,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,aAAa;IAM3B,IAAI,CAAC,WAAW,IAAI,CAAC,SAAS,EAAE,CAAC;QAC/B,OAAO,IAAI,CAAC;IACd,CAAC;IAED,OAAO;QACL,GAAG,EAAE,SAAS,CAAC,GAAG,EAAE;QACpB,SAAS,EAAE,SAAS,CAAC,UAAU,EAAE;QACjC,UAAU,EAAE,SAAS,CAAC,WAAW,EAAE;QACnC,eAAe,EAAE,kBAAkB,EAAE;KACtC,CAAC;AACJ,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,OAAO;IACrB,IAAI,SAAS,EAAE,CAAC;QACd,SAAS,CAAC,IAAI,EAAE,CAAC;QACjB,SAAS,GAAG,IAAI,CAAC;IACnB,CAAC;IACD,IAAI,UAAU,EAAE,CAAC;QACf,UAAU,CAAC,IAAI,EAAE,CAAC;QAClB,UAAU,GAAG,IAAI,CAAC;IACpB,CAAC;IACD,IAAI,gBAAgB,EAAE,CAAC;QACrB,gBAAgB,CAAC,IAAI,EAAE,CAAC;QACxB,gBAAgB,GAAG,IAAI,CAAC;IAC1B,CAAC;IAED,cAAc,GAAG,IAAI,CAAC;IACtB,YAAY,GAAG,IAAI,CAAC;IACpB,mBAAmB,GAAG,IAAI,CAAC;IAC3B,SAAS,GAAG,IAAI,CAAC;IACjB,eAAe,GAAG,IAAI,CAAC;IACvB,UAAU,GAAG,IAAI,CAAC;IAClB,SAAS,GAAG,IAAI,CAAC;IAEjB,WAAW,GAAG,KAAK,CAAC;IACpB,gBAAgB,GAAG,CAAC,CAAC;IACrB,aAAa,GAAG,CAAC,CAAC;IAClB,aAAa,GAAG,IAAI,CAAC;AACvB,CAAC"}