@sparkleideas/neural 3.5.2-patch.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +260 -0
- package/__tests__/README.md +235 -0
- package/__tests__/algorithms.test.ts +582 -0
- package/__tests__/patterns.test.ts +549 -0
- package/__tests__/sona.test.ts +445 -0
- package/docs/SONA_INTEGRATION.md +460 -0
- package/docs/SONA_QUICKSTART.md +168 -0
- package/examples/sona-usage.ts +318 -0
- package/package.json +23 -0
- package/src/algorithms/a2c.d.ts +86 -0
- package/src/algorithms/a2c.d.ts.map +1 -0
- package/src/algorithms/a2c.js +361 -0
- package/src/algorithms/a2c.js.map +1 -0
- package/src/algorithms/a2c.ts +478 -0
- package/src/algorithms/curiosity.d.ts +82 -0
- package/src/algorithms/curiosity.d.ts.map +1 -0
- package/src/algorithms/curiosity.js +392 -0
- package/src/algorithms/curiosity.js.map +1 -0
- package/src/algorithms/curiosity.ts +509 -0
- package/src/algorithms/decision-transformer.d.ts +82 -0
- package/src/algorithms/decision-transformer.d.ts.map +1 -0
- package/src/algorithms/decision-transformer.js +415 -0
- package/src/algorithms/decision-transformer.js.map +1 -0
- package/src/algorithms/decision-transformer.ts +521 -0
- package/src/algorithms/dqn.d.ts +72 -0
- package/src/algorithms/dqn.d.ts.map +1 -0
- package/src/algorithms/dqn.js +303 -0
- package/src/algorithms/dqn.js.map +1 -0
- package/src/algorithms/dqn.ts +382 -0
- package/src/algorithms/index.d.ts +32 -0
- package/src/algorithms/index.d.ts.map +1 -0
- package/src/algorithms/index.js +74 -0
- package/src/algorithms/index.js.map +1 -0
- package/src/algorithms/index.ts +122 -0
- package/src/algorithms/ppo.d.ts +72 -0
- package/src/algorithms/ppo.d.ts.map +1 -0
- package/src/algorithms/ppo.js +331 -0
- package/src/algorithms/ppo.js.map +1 -0
- package/src/algorithms/ppo.ts +429 -0
- package/src/algorithms/q-learning.d.ts +77 -0
- package/src/algorithms/q-learning.d.ts.map +1 -0
- package/src/algorithms/q-learning.js +259 -0
- package/src/algorithms/q-learning.js.map +1 -0
- package/src/algorithms/q-learning.ts +333 -0
- package/src/algorithms/sarsa.d.ts +82 -0
- package/src/algorithms/sarsa.d.ts.map +1 -0
- package/src/algorithms/sarsa.js +297 -0
- package/src/algorithms/sarsa.js.map +1 -0
- package/src/algorithms/sarsa.ts +383 -0
- package/src/algorithms/tmp.json +0 -0
- package/src/application/index.ts +11 -0
- package/src/application/services/neural-application-service.ts +217 -0
- package/src/domain/entities/pattern.ts +169 -0
- package/src/domain/index.ts +18 -0
- package/src/domain/services/learning-service.ts +256 -0
- package/src/index.d.ts +118 -0
- package/src/index.d.ts.map +1 -0
- package/src/index.js +201 -0
- package/src/index.js.map +1 -0
- package/src/index.ts +363 -0
- package/src/modes/balanced.d.ts +60 -0
- package/src/modes/balanced.d.ts.map +1 -0
- package/src/modes/balanced.js +234 -0
- package/src/modes/balanced.js.map +1 -0
- package/src/modes/balanced.ts +299 -0
- package/src/modes/base.ts +163 -0
- package/src/modes/batch.d.ts +82 -0
- package/src/modes/batch.d.ts.map +1 -0
- package/src/modes/batch.js +316 -0
- package/src/modes/batch.js.map +1 -0
- package/src/modes/batch.ts +434 -0
- package/src/modes/edge.d.ts +85 -0
- package/src/modes/edge.d.ts.map +1 -0
- package/src/modes/edge.js +310 -0
- package/src/modes/edge.js.map +1 -0
- package/src/modes/edge.ts +409 -0
- package/src/modes/index.d.ts +55 -0
- package/src/modes/index.d.ts.map +1 -0
- package/src/modes/index.js +83 -0
- package/src/modes/index.js.map +1 -0
- package/src/modes/index.ts +16 -0
- package/src/modes/real-time.d.ts +58 -0
- package/src/modes/real-time.d.ts.map +1 -0
- package/src/modes/real-time.js +196 -0
- package/src/modes/real-time.js.map +1 -0
- package/src/modes/real-time.ts +257 -0
- package/src/modes/research.d.ts +79 -0
- package/src/modes/research.d.ts.map +1 -0
- package/src/modes/research.js +389 -0
- package/src/modes/research.js.map +1 -0
- package/src/modes/research.ts +486 -0
- package/src/modes/tmp.json +0 -0
- package/src/pattern-learner.d.ts +117 -0
- package/src/pattern-learner.d.ts.map +1 -0
- package/src/pattern-learner.js +603 -0
- package/src/pattern-learner.js.map +1 -0
- package/src/pattern-learner.ts +757 -0
- package/src/reasoning-bank.d.ts +259 -0
- package/src/reasoning-bank.d.ts.map +1 -0
- package/src/reasoning-bank.js +993 -0
- package/src/reasoning-bank.js.map +1 -0
- package/src/reasoning-bank.ts +1279 -0
- package/src/reasoningbank-adapter.ts +697 -0
- package/src/sona-integration.d.ts +168 -0
- package/src/sona-integration.d.ts.map +1 -0
- package/src/sona-integration.js +316 -0
- package/src/sona-integration.js.map +1 -0
- package/src/sona-integration.ts +432 -0
- package/src/sona-manager.d.ts +147 -0
- package/src/sona-manager.d.ts.map +1 -0
- package/src/sona-manager.js +695 -0
- package/src/sona-manager.js.map +1 -0
- package/src/sona-manager.ts +835 -0
- package/src/tmp.json +0 -0
- package/src/types.d.ts +431 -0
- package/src/types.d.ts.map +1 -0
- package/src/types.js +11 -0
- package/src/types.js.map +1 -0
- package/src/types.ts +590 -0
- package/tmp.json +0 -0
- package/tsconfig.json +9 -0
- package/vitest.config.ts +19 -0
|
@@ -0,0 +1,163 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Base Mode Implementation
|
|
3
|
+
*
|
|
4
|
+
* Separated to avoid circular dependencies.
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
import type {
|
|
8
|
+
SONAModeConfig,
|
|
9
|
+
ModeOptimizations,
|
|
10
|
+
Trajectory,
|
|
11
|
+
Pattern,
|
|
12
|
+
PatternMatch,
|
|
13
|
+
LoRAWeights,
|
|
14
|
+
EWCState,
|
|
15
|
+
} from '../types.js';
|
|
16
|
+
|
|
17
|
+
/**
|
|
18
|
+
* Common interface for all mode implementations
|
|
19
|
+
*/
|
|
20
|
+
export interface ModeImplementation {
|
|
21
|
+
/** Mode identifier */
|
|
22
|
+
readonly mode: string;
|
|
23
|
+
|
|
24
|
+
/** Initialize the mode */
|
|
25
|
+
initialize(): Promise<void>;
|
|
26
|
+
|
|
27
|
+
/** Cleanup resources */
|
|
28
|
+
cleanup(): Promise<void>;
|
|
29
|
+
|
|
30
|
+
/** Find similar patterns (k-nearest) */
|
|
31
|
+
findPatterns(
|
|
32
|
+
embedding: Float32Array,
|
|
33
|
+
k: number,
|
|
34
|
+
patterns: Pattern[]
|
|
35
|
+
): Promise<PatternMatch[]>;
|
|
36
|
+
|
|
37
|
+
/** Perform a learning step */
|
|
38
|
+
learn(
|
|
39
|
+
trajectories: Trajectory[],
|
|
40
|
+
config: SONAModeConfig,
|
|
41
|
+
ewcState: EWCState
|
|
42
|
+
): Promise<number>;
|
|
43
|
+
|
|
44
|
+
/** Apply LoRA adaptations */
|
|
45
|
+
applyLoRA(
|
|
46
|
+
input: Float32Array,
|
|
47
|
+
weights?: LoRAWeights
|
|
48
|
+
): Promise<Float32Array>;
|
|
49
|
+
|
|
50
|
+
/** Get mode-specific stats */
|
|
51
|
+
getStats(): Record<string, number>;
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
/**
|
|
55
|
+
* Base class for mode implementations
|
|
56
|
+
*/
|
|
57
|
+
export abstract class BaseModeImplementation implements ModeImplementation {
|
|
58
|
+
abstract readonly mode: string;
|
|
59
|
+
|
|
60
|
+
protected config: SONAModeConfig;
|
|
61
|
+
protected optimizations: ModeOptimizations;
|
|
62
|
+
protected isInitialized = false;
|
|
63
|
+
|
|
64
|
+
constructor(config: SONAModeConfig, optimizations: ModeOptimizations) {
|
|
65
|
+
this.config = config;
|
|
66
|
+
this.optimizations = optimizations;
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
async initialize(): Promise<void> {
|
|
70
|
+
this.isInitialized = true;
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
async cleanup(): Promise<void> {
|
|
74
|
+
this.isInitialized = false;
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
/**
|
|
78
|
+
* Compute cosine similarity between two vectors (SIMD-optimized)
|
|
79
|
+
*/
|
|
80
|
+
protected cosineSimilarity(a: Float32Array, b: Float32Array): number {
|
|
81
|
+
if (a.length !== b.length) return 0;
|
|
82
|
+
|
|
83
|
+
let dotProduct = 0;
|
|
84
|
+
let normA = 0;
|
|
85
|
+
let normB = 0;
|
|
86
|
+
|
|
87
|
+
// Process 4 elements at a time for SIMD-like behavior
|
|
88
|
+
const len = a.length;
|
|
89
|
+
const simdLen = len - (len % 4);
|
|
90
|
+
|
|
91
|
+
for (let i = 0; i < simdLen; i += 4) {
|
|
92
|
+
dotProduct += a[i] * b[i] + a[i+1] * b[i+1] + a[i+2] * b[i+2] + a[i+3] * b[i+3];
|
|
93
|
+
normA += a[i] * a[i] + a[i+1] * a[i+1] + a[i+2] * a[i+2] + a[i+3] * a[i+3];
|
|
94
|
+
normB += b[i] * b[i] + b[i+1] * b[i+1] + b[i+2] * b[i+2] + b[i+3] * b[i+3];
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
// Handle remaining elements
|
|
98
|
+
for (let i = simdLen; i < len; i++) {
|
|
99
|
+
dotProduct += a[i] * b[i];
|
|
100
|
+
normA += a[i] * a[i];
|
|
101
|
+
normB += b[i] * b[i];
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
const denom = Math.sqrt(normA) * Math.sqrt(normB);
|
|
105
|
+
return denom > 0 ? dotProduct / denom : 0;
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
/**
|
|
109
|
+
* Apply LoRA: output = input + BA * input (simplified)
|
|
110
|
+
*/
|
|
111
|
+
protected applyLoRATransform(
|
|
112
|
+
input: Float32Array,
|
|
113
|
+
A: Float32Array,
|
|
114
|
+
B: Float32Array,
|
|
115
|
+
rank: number
|
|
116
|
+
): Float32Array {
|
|
117
|
+
const dim = input.length;
|
|
118
|
+
const output = new Float32Array(dim);
|
|
119
|
+
|
|
120
|
+
// Copy input to output
|
|
121
|
+
output.set(input);
|
|
122
|
+
|
|
123
|
+
// Compute A * input -> intermediate (rank dimensions)
|
|
124
|
+
const intermediate = new Float32Array(rank);
|
|
125
|
+
for (let r = 0; r < rank; r++) {
|
|
126
|
+
let sum = 0;
|
|
127
|
+
for (let d = 0; d < dim; d++) {
|
|
128
|
+
sum += A[d * rank + r] * input[d];
|
|
129
|
+
}
|
|
130
|
+
intermediate[r] = sum;
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
// Compute B * intermediate -> delta (dim dimensions)
|
|
134
|
+
for (let d = 0; d < dim; d++) {
|
|
135
|
+
let sum = 0;
|
|
136
|
+
for (let r = 0; r < rank; r++) {
|
|
137
|
+
sum += B[r * dim + d] * intermediate[r];
|
|
138
|
+
}
|
|
139
|
+
output[d] += sum;
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
return output;
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
abstract findPatterns(
|
|
146
|
+
embedding: Float32Array,
|
|
147
|
+
k: number,
|
|
148
|
+
patterns: Pattern[]
|
|
149
|
+
): Promise<PatternMatch[]>;
|
|
150
|
+
|
|
151
|
+
abstract learn(
|
|
152
|
+
trajectories: Trajectory[],
|
|
153
|
+
config: SONAModeConfig,
|
|
154
|
+
ewcState: EWCState
|
|
155
|
+
): Promise<number>;
|
|
156
|
+
|
|
157
|
+
abstract applyLoRA(
|
|
158
|
+
input: Float32Array,
|
|
159
|
+
weights?: LoRAWeights
|
|
160
|
+
): Promise<Float32Array>;
|
|
161
|
+
|
|
162
|
+
abstract getStats(): Record<string, number>;
|
|
163
|
+
}
|
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Batch Mode Implementation
|
|
3
|
+
*
|
|
4
|
+
* Optimized for high-throughput processing with:
|
|
5
|
+
* - Large batch sizes (128)
|
|
6
|
+
* - Rank-8 LoRA
|
|
7
|
+
* - Gradient accumulation
|
|
8
|
+
* - Async batch processing
|
|
9
|
+
* - 50ms latency budget
|
|
10
|
+
*/
|
|
11
|
+
import type { SONAModeConfig, Trajectory, Pattern, PatternMatch, LoRAWeights, EWCState } from '../types.js';
|
|
12
|
+
import { BaseModeImplementation } from './index.js';
|
|
13
|
+
/**
|
|
14
|
+
* Batch mode for high-throughput processing
|
|
15
|
+
*/
|
|
16
|
+
export declare class BatchMode extends BaseModeImplementation {
|
|
17
|
+
readonly mode = "batch";
|
|
18
|
+
private patternQueue;
|
|
19
|
+
private learningQueue;
|
|
20
|
+
private embeddingBuffer;
|
|
21
|
+
private batchEmbeddings;
|
|
22
|
+
private accumulatedGradients;
|
|
23
|
+
private gradientSteps;
|
|
24
|
+
private isBatchProcessing;
|
|
25
|
+
private batchTimer;
|
|
26
|
+
private totalBatches;
|
|
27
|
+
private totalItems;
|
|
28
|
+
private totalBatchTime;
|
|
29
|
+
private learnIterations;
|
|
30
|
+
initialize(): Promise<void>;
|
|
31
|
+
cleanup(): Promise<void>;
|
|
32
|
+
/**
|
|
33
|
+
* Find patterns - queues for batch processing
|
|
34
|
+
*/
|
|
35
|
+
findPatterns(embedding: Float32Array, k: number, patterns: Pattern[]): Promise<PatternMatch[]>;
|
|
36
|
+
/**
|
|
37
|
+
* Learn from trajectories - accumulates for batch
|
|
38
|
+
*/
|
|
39
|
+
learn(trajectories: Trajectory[], config: SONAModeConfig, ewcState: EWCState): Promise<number>;
|
|
40
|
+
/**
|
|
41
|
+
* Apply LoRA with rank-8
|
|
42
|
+
*/
|
|
43
|
+
applyLoRA(input: Float32Array, weights?: LoRAWeights): Promise<Float32Array>;
|
|
44
|
+
getStats(): Record<string, number>;
|
|
45
|
+
/**
|
|
46
|
+
* Direct pattern matching without batching
|
|
47
|
+
*/
|
|
48
|
+
private findPatternsDirect;
|
|
49
|
+
/**
|
|
50
|
+
* Direct LoRA application
|
|
51
|
+
*/
|
|
52
|
+
private applyLoRADirect;
|
|
53
|
+
/**
|
|
54
|
+
* Schedule batch processing
|
|
55
|
+
*/
|
|
56
|
+
private scheduleBatchProcessing;
|
|
57
|
+
/**
|
|
58
|
+
* Process pattern requests in batch
|
|
59
|
+
*/
|
|
60
|
+
private processBatchPatterns;
|
|
61
|
+
/**
|
|
62
|
+
* Batch similarity search
|
|
63
|
+
*/
|
|
64
|
+
private batchSimilaritySearch;
|
|
65
|
+
/**
|
|
66
|
+
* Process batch learning
|
|
67
|
+
*/
|
|
68
|
+
private processBatchLearning;
|
|
69
|
+
/**
|
|
70
|
+
* Accumulate gradient from trajectory
|
|
71
|
+
*/
|
|
72
|
+
private accumulateTrajectoryGradient;
|
|
73
|
+
/**
|
|
74
|
+
* Apply accumulated gradients with EWC
|
|
75
|
+
*/
|
|
76
|
+
private applyAccumulatedGradients;
|
|
77
|
+
/**
|
|
78
|
+
* Apply LoRA to batch of inputs
|
|
79
|
+
*/
|
|
80
|
+
private applyLoRABatch;
|
|
81
|
+
}
|
|
82
|
+
//# sourceMappingURL=batch.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"batch.d.ts","sourceRoot":"","sources":["batch.ts"],"names":[],"mappings":"AAAA;;;;;;;;;GASG;AAEH,OAAO,KAAK,EACV,cAAc,EAEd,UAAU,EACV,OAAO,EACP,YAAY,EACZ,WAAW,EACX,QAAQ,EACT,MAAM,aAAa,CAAC;AACrB,OAAO,EAAE,sBAAsB,EAAE,MAAM,YAAY,CAAC;AAEpD;;GAEG;AACH,qBAAa,SAAU,SAAQ,sBAAsB;IACnD,QAAQ,CAAC,IAAI,WAAW;IAGxB,OAAO,CAAC,YAAY,CAIZ;IACR,OAAO,CAAC,aAAa,CAAoB;IAGzC,OAAO,CAAC,eAAe,CAA6B;IACpD,OAAO,CAAC,eAAe,CAAsB;IAG7C,OAAO,CAAC,oBAAoB,CAAwC;IACpE,OAAO,CAAC,aAAa,CAAK;IAG1B,OAAO,CAAC,iBAAiB,CAAS;IAClC,OAAO,CAAC,UAAU,CAA8C;IAGhE,OAAO,CAAC,YAAY,CAAK;IACzB,OAAO,CAAC,UAAU,CAAK;IACvB,OAAO,CAAC,cAAc,CAAK;IAC3B,OAAO,CAAC,eAAe,CAAK;IAEtB,UAAU,IAAI,OAAO,CAAC,IAAI,CAAC;IAQ3B,OAAO,IAAI,OAAO,CAAC,IAAI,CAAC;IAU9B;;OAEG;IACG,YAAY,CAChB,SAAS,EAAE,YAAY,EACvB,CAAC,EAAE,MAAM,EACT,QAAQ,EAAE,OAAO,EAAE,GAClB,OAAO,CAAC,YAAY,EAAE,CAAC;IAa1B;;OAEG;IACG,KAAK,CACT,YAAY,EAAE,UAAU,EAAE,EAC1B,MAAM,EAAE,cAAc,EACtB,QAAQ,EAAE,QAAQ,GACjB,OAAO,CAAC,MAAM,CAAC;IAoBlB;;OAEG;IACG,SAAS,CACb,KAAK,EAAE,YAAY,EACnB,OAAO,CAAC,EAAE,WAAW,GACpB,OAAO,CAAC,YAAY,CAAC;IAqBxB,QAAQ,IAAI,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC;IAgBlC;;OAEG;IACH,OAAO,CAAC,kBAAkB;IAqB1B;;OAEG;YACW,eAAe;IA6B7B;;OAEG;IACH,OAAO,CAAC,uBAAuB;IAQ/B;;OAEG;YACW,oBAAoB;IA8BlC;;OAEG;IACH,OAAO,CAAC,qBAAqB;IAwB7B;;OAEG;YACW,oBAAoB;IAiDlC;;OAEG;IACH,OAAO,CAAC,4BAA4B;IAwBpC;;OAEG;YACW,yBAAyB;IA6BvC;;OAEG;YACW,cAAc;CA8B7B"}
|
|
@@ -0,0 +1,316 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Batch Mode Implementation
|
|
3
|
+
*
|
|
4
|
+
* Optimized for high-throughput processing with:
|
|
5
|
+
* - Large batch sizes (128)
|
|
6
|
+
* - Rank-8 LoRA
|
|
7
|
+
* - Gradient accumulation
|
|
8
|
+
* - Async batch processing
|
|
9
|
+
* - 50ms latency budget
|
|
10
|
+
*/
|
|
11
|
+
import { BaseModeImplementation } from './index.js';
|
|
12
|
+
/**
|
|
13
|
+
* Batch mode for high-throughput processing
|
|
14
|
+
*/
|
|
15
|
+
export class BatchMode extends BaseModeImplementation {
|
|
16
|
+
mode = 'batch';
|
|
17
|
+
// Batch processing queues
|
|
18
|
+
patternQueue = [];
|
|
19
|
+
learningQueue = [];
|
|
20
|
+
// Batch buffers
|
|
21
|
+
embeddingBuffer = null;
|
|
22
|
+
batchEmbeddings = [];
|
|
23
|
+
// Gradient accumulation
|
|
24
|
+
accumulatedGradients = new Map();
|
|
25
|
+
gradientSteps = 0;
|
|
26
|
+
// Batch processing state
|
|
27
|
+
isBatchProcessing = false;
|
|
28
|
+
batchTimer = null;
|
|
29
|
+
// Stats
|
|
30
|
+
totalBatches = 0;
|
|
31
|
+
totalItems = 0;
|
|
32
|
+
totalBatchTime = 0;
|
|
33
|
+
learnIterations = 0;
|
|
34
|
+
async initialize() {
|
|
35
|
+
await super.initialize();
|
|
36
|
+
this.patternQueue = [];
|
|
37
|
+
this.learningQueue = [];
|
|
38
|
+
this.accumulatedGradients.clear();
|
|
39
|
+
this.gradientSteps = 0;
|
|
40
|
+
}
|
|
41
|
+
async cleanup() {
|
|
42
|
+
if (this.batchTimer) {
|
|
43
|
+
clearTimeout(this.batchTimer);
|
|
44
|
+
}
|
|
45
|
+
this.patternQueue = [];
|
|
46
|
+
this.learningQueue = [];
|
|
47
|
+
this.accumulatedGradients.clear();
|
|
48
|
+
await super.cleanup();
|
|
49
|
+
}
|
|
50
|
+
/**
|
|
51
|
+
* Find patterns - queues for batch processing
|
|
52
|
+
*/
|
|
53
|
+
async findPatterns(embedding, k, patterns) {
|
|
54
|
+
// For immediate needs, process synchronously
|
|
55
|
+
if (patterns.length < 100) {
|
|
56
|
+
return this.findPatternsDirect(embedding, k, patterns);
|
|
57
|
+
}
|
|
58
|
+
// Queue for batch processing
|
|
59
|
+
return new Promise(resolve => {
|
|
60
|
+
this.patternQueue.push({ embedding, k, resolve });
|
|
61
|
+
this.scheduleBatchProcessing(patterns);
|
|
62
|
+
});
|
|
63
|
+
}
|
|
64
|
+
/**
|
|
65
|
+
* Learn from trajectories - accumulates for batch
|
|
66
|
+
*/
|
|
67
|
+
async learn(trajectories, config, ewcState) {
|
|
68
|
+
const startTime = performance.now();
|
|
69
|
+
if (trajectories.length === 0)
|
|
70
|
+
return 0;
|
|
71
|
+
// Add to learning queue
|
|
72
|
+
this.learningQueue.push(...trajectories);
|
|
73
|
+
// Process when queue is full
|
|
74
|
+
if (this.learningQueue.length >= config.batchSize) {
|
|
75
|
+
return this.processBatchLearning(config, ewcState);
|
|
76
|
+
}
|
|
77
|
+
// Return estimated improvement
|
|
78
|
+
const avgQuality = trajectories.reduce((s, t) => s + t.qualityScore, 0) / trajectories.length;
|
|
79
|
+
this.totalBatchTime += performance.now() - startTime;
|
|
80
|
+
return Math.max(0, avgQuality - 0.5) * 0.5; // Partial estimate
|
|
81
|
+
}
|
|
82
|
+
/**
|
|
83
|
+
* Apply LoRA with rank-8
|
|
84
|
+
*/
|
|
85
|
+
async applyLoRA(input, weights) {
|
|
86
|
+
if (!weights) {
|
|
87
|
+
return input;
|
|
88
|
+
}
|
|
89
|
+
// Batch mode can process multiple inputs efficiently
|
|
90
|
+
this.batchEmbeddings.push(new Float32Array(input));
|
|
91
|
+
// Process immediately for single requests
|
|
92
|
+
if (this.batchEmbeddings.length === 1) {
|
|
93
|
+
const output = await this.applyLoRADirect(input, weights);
|
|
94
|
+
this.batchEmbeddings = [];
|
|
95
|
+
return output;
|
|
96
|
+
}
|
|
97
|
+
// For multiple inputs, process as batch
|
|
98
|
+
const outputs = await this.applyLoRABatch(this.batchEmbeddings, weights);
|
|
99
|
+
this.batchEmbeddings = [];
|
|
100
|
+
return outputs[outputs.length - 1];
|
|
101
|
+
}
|
|
102
|
+
getStats() {
|
|
103
|
+
return {
|
|
104
|
+
totalBatches: this.totalBatches,
|
|
105
|
+
avgItemsPerBatch: this.totalBatches > 0 ? this.totalItems / this.totalBatches : 0,
|
|
106
|
+
avgBatchTimeMs: this.totalBatches > 0 ? this.totalBatchTime / this.totalBatches : 0,
|
|
107
|
+
pendingPatternRequests: this.patternQueue.length,
|
|
108
|
+
pendingTrajectories: this.learningQueue.length,
|
|
109
|
+
accumulatedGradientSteps: this.gradientSteps,
|
|
110
|
+
learnIterations: this.learnIterations,
|
|
111
|
+
};
|
|
112
|
+
}
|
|
113
|
+
// ========================================================================
|
|
114
|
+
// Direct processing (for small batches)
|
|
115
|
+
// ========================================================================
|
|
116
|
+
/**
|
|
117
|
+
* Direct pattern matching without batching
|
|
118
|
+
*/
|
|
119
|
+
findPatternsDirect(embedding, k, patterns) {
|
|
120
|
+
const matches = [];
|
|
121
|
+
for (const pattern of patterns) {
|
|
122
|
+
const similarity = this.cosineSimilarity(embedding, pattern.embedding);
|
|
123
|
+
matches.push({
|
|
124
|
+
pattern,
|
|
125
|
+
similarity,
|
|
126
|
+
confidence: similarity * pattern.successRate,
|
|
127
|
+
latencyMs: 0,
|
|
128
|
+
});
|
|
129
|
+
}
|
|
130
|
+
matches.sort((a, b) => b.similarity - a.similarity);
|
|
131
|
+
return matches.slice(0, k);
|
|
132
|
+
}
|
|
133
|
+
/**
|
|
134
|
+
* Direct LoRA application
|
|
135
|
+
*/
|
|
136
|
+
async applyLoRADirect(input, weights) {
|
|
137
|
+
const output = new Float32Array(input.length);
|
|
138
|
+
output.set(input);
|
|
139
|
+
const rank = this.config.loraRank;
|
|
140
|
+
for (const module of ['q_proj', 'v_proj', 'k_proj', 'o_proj']) {
|
|
141
|
+
const A = weights.A.get(module);
|
|
142
|
+
const B = weights.B.get(module);
|
|
143
|
+
if (A && B) {
|
|
144
|
+
const adapted = this.applyLoRATransform(input, A, B, rank);
|
|
145
|
+
const alpha = 0.25;
|
|
146
|
+
for (let i = 0; i < output.length; i++) {
|
|
147
|
+
output[i] = output[i] * (1 - alpha) + adapted[i] * alpha;
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
return output;
|
|
152
|
+
}
|
|
153
|
+
// ========================================================================
|
|
154
|
+
// Batch processing
|
|
155
|
+
// ========================================================================
|
|
156
|
+
/**
|
|
157
|
+
* Schedule batch processing
|
|
158
|
+
*/
|
|
159
|
+
scheduleBatchProcessing(patterns) {
|
|
160
|
+
if (this.batchTimer)
|
|
161
|
+
return;
|
|
162
|
+
this.batchTimer = setTimeout(() => {
|
|
163
|
+
this.processBatchPatterns(patterns);
|
|
164
|
+
}, 10); // Wait 10ms to accumulate requests
|
|
165
|
+
}
|
|
166
|
+
/**
|
|
167
|
+
* Process pattern requests in batch
|
|
168
|
+
*/
|
|
169
|
+
async processBatchPatterns(patterns) {
|
|
170
|
+
this.batchTimer = null;
|
|
171
|
+
if (this.patternQueue.length === 0)
|
|
172
|
+
return;
|
|
173
|
+
const startTime = performance.now();
|
|
174
|
+
this.isBatchProcessing = true;
|
|
175
|
+
const batch = this.patternQueue;
|
|
176
|
+
this.patternQueue = [];
|
|
177
|
+
// Pre-compute pattern embeddings matrix
|
|
178
|
+
const patternMatrix = patterns.map(p => p.embedding);
|
|
179
|
+
// Process all queries in batch
|
|
180
|
+
for (const request of batch) {
|
|
181
|
+
const matches = this.batchSimilaritySearch(request.embedding, request.k, patterns, patternMatrix);
|
|
182
|
+
request.resolve(matches);
|
|
183
|
+
}
|
|
184
|
+
this.totalBatches++;
|
|
185
|
+
this.totalItems += batch.length;
|
|
186
|
+
this.totalBatchTime += performance.now() - startTime;
|
|
187
|
+
this.isBatchProcessing = false;
|
|
188
|
+
}
|
|
189
|
+
/**
|
|
190
|
+
* Batch similarity search
|
|
191
|
+
*/
|
|
192
|
+
batchSimilaritySearch(query, k, patterns, patternMatrix) {
|
|
193
|
+
const similarities = [];
|
|
194
|
+
for (let i = 0; i < patternMatrix.length; i++) {
|
|
195
|
+
const sim = this.cosineSimilarity(query, patternMatrix[i]);
|
|
196
|
+
similarities.push({ idx: i, sim });
|
|
197
|
+
}
|
|
198
|
+
similarities.sort((a, b) => b.sim - a.sim);
|
|
199
|
+
const topK = similarities.slice(0, k);
|
|
200
|
+
return topK.map(s => ({
|
|
201
|
+
pattern: patterns[s.idx],
|
|
202
|
+
similarity: s.sim,
|
|
203
|
+
confidence: s.sim * patterns[s.idx].successRate,
|
|
204
|
+
latencyMs: 0,
|
|
205
|
+
}));
|
|
206
|
+
}
|
|
207
|
+
/**
|
|
208
|
+
* Process batch learning
|
|
209
|
+
*/
|
|
210
|
+
async processBatchLearning(config, ewcState) {
|
|
211
|
+
const startTime = performance.now();
|
|
212
|
+
const batch = this.learningQueue.slice(0, config.batchSize);
|
|
213
|
+
this.learningQueue = this.learningQueue.slice(config.batchSize);
|
|
214
|
+
const qualityThreshold = config.qualityThreshold;
|
|
215
|
+
const learningRate = config.learningRate;
|
|
216
|
+
// Separate by quality
|
|
217
|
+
const good = batch.filter(t => t.qualityScore >= qualityThreshold);
|
|
218
|
+
const bad = batch.filter(t => t.qualityScore < qualityThreshold);
|
|
219
|
+
if (good.length === 0) {
|
|
220
|
+
this.totalBatchTime += performance.now() - startTime;
|
|
221
|
+
return 0;
|
|
222
|
+
}
|
|
223
|
+
// Accumulate gradients
|
|
224
|
+
for (const trajectory of good) {
|
|
225
|
+
this.accumulateTrajectoryGradient(trajectory, learningRate);
|
|
226
|
+
}
|
|
227
|
+
// Contrastive learning from bad examples
|
|
228
|
+
for (const trajectory of bad.slice(0, good.length)) {
|
|
229
|
+
this.accumulateTrajectoryGradient(trajectory, -learningRate * 0.3);
|
|
230
|
+
}
|
|
231
|
+
this.gradientSteps++;
|
|
232
|
+
// Apply accumulated gradients every N steps
|
|
233
|
+
if (this.gradientSteps >= 4) {
|
|
234
|
+
await this.applyAccumulatedGradients(ewcState, config.ewcLambda);
|
|
235
|
+
this.gradientSteps = 0;
|
|
236
|
+
}
|
|
237
|
+
// Compute improvement
|
|
238
|
+
const avgQuality = good.reduce((s, t) => s + t.qualityScore, 0) / good.length;
|
|
239
|
+
const improvement = avgQuality - 0.5;
|
|
240
|
+
this.learnIterations++;
|
|
241
|
+
this.totalBatchTime += performance.now() - startTime;
|
|
242
|
+
return Math.max(0, improvement);
|
|
243
|
+
}
|
|
244
|
+
/**
|
|
245
|
+
* Accumulate gradient from trajectory
|
|
246
|
+
*/
|
|
247
|
+
accumulateTrajectoryGradient(trajectory, scale) {
|
|
248
|
+
if (trajectory.steps.length === 0)
|
|
249
|
+
return;
|
|
250
|
+
const key = trajectory.domain;
|
|
251
|
+
let gradient = this.accumulatedGradients.get(key);
|
|
252
|
+
if (!gradient) {
|
|
253
|
+
const dim = trajectory.steps[0].stateAfter.length;
|
|
254
|
+
gradient = new Float32Array(dim);
|
|
255
|
+
this.accumulatedGradients.set(key, gradient);
|
|
256
|
+
}
|
|
257
|
+
// Add trajectory contribution
|
|
258
|
+
const weight = trajectory.qualityScore * scale;
|
|
259
|
+
for (const step of trajectory.steps) {
|
|
260
|
+
for (let i = 0; i < Math.min(gradient.length, step.stateAfter.length); i++) {
|
|
261
|
+
gradient[i] += step.stateAfter[i] * weight * step.reward;
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
/**
|
|
266
|
+
* Apply accumulated gradients with EWC
|
|
267
|
+
*/
|
|
268
|
+
async applyAccumulatedGradients(ewcState, ewcLambda) {
|
|
269
|
+
for (const [key, gradient] of this.accumulatedGradients) {
|
|
270
|
+
// Normalize gradient
|
|
271
|
+
const norm = Math.sqrt(gradient.reduce((s, v) => s + v * v, 0));
|
|
272
|
+
if (norm > 0) {
|
|
273
|
+
for (let i = 0; i < gradient.length; i++) {
|
|
274
|
+
gradient[i] /= norm;
|
|
275
|
+
}
|
|
276
|
+
}
|
|
277
|
+
// Apply EWC penalty
|
|
278
|
+
const fisher = ewcState.fisher.get(key);
|
|
279
|
+
const means = ewcState.means.get(key);
|
|
280
|
+
if (fisher && means) {
|
|
281
|
+
for (let i = 0; i < gradient.length; i++) {
|
|
282
|
+
const penalty = ewcLambda * fisher[i] * (gradient[i] - means[i]);
|
|
283
|
+
gradient[i] -= penalty;
|
|
284
|
+
}
|
|
285
|
+
}
|
|
286
|
+
// Clear gradient for next accumulation
|
|
287
|
+
gradient.fill(0);
|
|
288
|
+
}
|
|
289
|
+
}
|
|
290
|
+
/**
|
|
291
|
+
* Apply LoRA to batch of inputs
|
|
292
|
+
*/
|
|
293
|
+
async applyLoRABatch(inputs, weights) {
|
|
294
|
+
const outputs = [];
|
|
295
|
+
const rank = this.config.loraRank;
|
|
296
|
+
// Process all inputs together for cache efficiency
|
|
297
|
+
for (const input of inputs) {
|
|
298
|
+
const output = new Float32Array(input.length);
|
|
299
|
+
output.set(input);
|
|
300
|
+
for (const module of ['q_proj', 'v_proj', 'k_proj', 'o_proj']) {
|
|
301
|
+
const A = weights.A.get(module);
|
|
302
|
+
const B = weights.B.get(module);
|
|
303
|
+
if (A && B) {
|
|
304
|
+
const adapted = this.applyLoRATransform(input, A, B, rank);
|
|
305
|
+
const alpha = 0.25;
|
|
306
|
+
for (let i = 0; i < output.length; i++) {
|
|
307
|
+
output[i] = output[i] * (1 - alpha) + adapted[i] * alpha;
|
|
308
|
+
}
|
|
309
|
+
}
|
|
310
|
+
}
|
|
311
|
+
outputs.push(output);
|
|
312
|
+
}
|
|
313
|
+
return outputs;
|
|
314
|
+
}
|
|
315
|
+
}
|
|
316
|
+
//# sourceMappingURL=batch.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"batch.js","sourceRoot":"","sources":["batch.ts"],"names":[],"mappings":"AAAA;;;;;;;;;GASG;AAWH,OAAO,EAAE,sBAAsB,EAAE,MAAM,YAAY,CAAC;AAEpD;;GAEG;AACH,MAAM,OAAO,SAAU,SAAQ,sBAAsB;IAC1C,IAAI,GAAG,OAAO,CAAC;IAExB,0BAA0B;IAClB,YAAY,GAIf,EAAE,CAAC;IACA,aAAa,GAAiB,EAAE,CAAC;IAEzC,gBAAgB;IACR,eAAe,GAAwB,IAAI,CAAC;IAC5C,eAAe,GAAmB,EAAE,CAAC;IAE7C,wBAAwB;IAChB,oBAAoB,GAA8B,IAAI,GAAG,EAAE,CAAC;IAC5D,aAAa,GAAG,CAAC,CAAC;IAE1B,yBAAyB;IACjB,iBAAiB,GAAG,KAAK,CAAC;IAC1B,UAAU,GAAyC,IAAI,CAAC;IAEhE,QAAQ;IACA,YAAY,GAAG,CAAC,CAAC;IACjB,UAAU,GAAG,CAAC,CAAC;IACf,cAAc,GAAG,CAAC,CAAC;IACnB,eAAe,GAAG,CAAC,CAAC;IAE5B,KAAK,CAAC,UAAU;QACd,MAAM,KAAK,CAAC,UAAU,EAAE,CAAC;QACzB,IAAI,CAAC,YAAY,GAAG,EAAE,CAAC;QACvB,IAAI,CAAC,aAAa,GAAG,EAAE,CAAC;QACxB,IAAI,CAAC,oBAAoB,CAAC,KAAK,EAAE,CAAC;QAClC,IAAI,CAAC,aAAa,GAAG,CAAC,CAAC;IACzB,CAAC;IAED,KAAK,CAAC,OAAO;QACX,IAAI,IAAI,CAAC,UAAU,EAAE,CAAC;YACpB,YAAY,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;QAChC,CAAC;QACD,IAAI,CAAC,YAAY,GAAG,EAAE,CAAC;QACvB,IAAI,CAAC,aAAa,GAAG,EAAE,CAAC;QACxB,IAAI,CAAC,oBAAoB,CAAC,KAAK,EAAE,CAAC;QAClC,MAAM,KAAK,CAAC,OAAO,EAAE,CAAC;IACxB,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,YAAY,CAChB,SAAuB,EACvB,CAAS,EACT,QAAmB;QAEnB,6CAA6C;QAC7C,IAAI,QAAQ,CAAC,MAAM,GAAG,GAAG,EAAE,CAAC;YAC1B,OAAO,IAAI,CAAC,kBAAkB,CAAC,SAAS,EAAE,CAAC,EAAE,QAAQ,CAAC,CAAC;QACzD,CAAC;QAED,6BAA6B;QAC7B,OAAO,IAAI,OAAO,CAAC,OAAO,CAAC,EAAE;YAC3B,IAAI,CAAC,YAAY,CAAC,IAAI,CAAC,EAAE,SAAS,EAAE,CAAC,EAAE,OAAO,EAAE,CAAC,CAAC;YAClD,IAAI,CAAC,uBAAuB,CAAC,QAAQ,CAAC,CAAC;QACzC,CAAC,CAAC,CAAC;IACL,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,KAAK,CACT,YAA0B,EAC1B,MAAsB,EACtB,QAAkB;QAElB,MAAM,SAAS,GAAG,WAAW,CAAC,GAAG,EAAE,CAAC;QAEpC,IAAI,YAAY,CAAC,MAAM,KAAK,CAAC;YAAE,OAAO,CAAC,CAAC;QAExC,wBAAwB;QACxB,IAAI,CAAC,aAAa,CAAC,IAAI,CAAC,GAAG,YAAY,CAAC,CAAC;QAEzC,6BAA6B;QAC7B,IAAI,IAAI,CAAC,aAAa,CAAC,MAAM,IAAI,MAAM,CAAC,SAAS,EAAE,CAAC;YAClD,OAAO,IAAI,CAAC,oBAAoB,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;QACrD,CAAC;QAED,+BAA+B;QAC/B,MAAM,UAAU,GAAG,YAAY,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,YAAY,EAAE,CAAC,CAAC,GAAG,YAAY,CAAC,MAAM,CAAC;QAE9F,IAAI,CAAC,cAAc,IAAI,WAAW,CAAC,GAAG,EAAE,GAAG,SAAS,CAAC;QACrD,OAAO,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,UAAU,GAAG,GAAG,CAAC,GAAG,GAAG,CAAC,CAAC,mBAAmB;IACjE,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,SAAS,CACb,KAAmB,EACnB,OAAqB;QAErB,IAAI,CAAC,OAAO,EAAE,CAAC;YACb,OAAO,KAAK,CAAC;QACf,CAAC;QAED,qDAAqD;QACrD,IAAI,CAAC,eAAe,CAAC,IAAI,CAAC,IAAI,YAAY,CAAC,KAAK,CAAC,CAAC,CAAC;QAEnD,0CAA0C;QAC1C,IAAI,IAAI,CAAC,eAAe,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YACtC,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,eAAe,CAAC,KAAK,EAAE,OAAO,CAAC,CAAC;YAC1D,IAAI,CAAC,eAAe,GAAG,EAAE,CAAC;YAC1B,OAAO,MAAM,CAAC;QAChB,CAAC;QAED,wCAAwC;QACxC,MAAM,OAAO,GAAG,MAAM,IAAI,CAAC,cAAc,CAAC,IAAI,CAAC,eAAe,EAAE,OAAO,CAAC,CAAC;QACzE,IAAI,CAAC,eAAe,GAAG,EAAE,CAAC;QAC1B,OAAO,OAAO,CAAC,OAAO,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;IACrC,CAAC;IAED,QAAQ;QACN,OAAO;YACL,YAAY,EAAE,IAAI,CAAC,YAAY;YAC/B,gBAAgB,EAAE,IAAI,CAAC,YAAY,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC;YACjF,cAAc,EAAE,IAAI,CAAC,YAAY,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC;YACnF,sBAAsB,EAAE,IAAI,CAAC,YAAY,CAAC,MAAM;YAChD,mBAAmB,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;YAC9C,wBAAwB,EAAE,IAAI,CAAC,aAAa;YAC5C,eAAe,EAAE,IAAI,CAAC,eAAe;SACtC,CAAC;IACJ,CAAC;IAED,2EAA2E;IAC3E,wCAAwC;IACxC,2EAA2E;IAE3E;;OAEG;IACK,kBAAkB,CACxB,SAAuB,EACvB,CAAS,EACT,QAAmB;QAEnB,MAAM,OAAO,GAAmB,EAAE,CAAC;QAEnC,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE,CAAC;YAC/B,MAAM,UAAU,GAAG,IAAI,CAAC,gBAAgB,CAAC,SAAS,EAAE,OAAO,CAAC,SAAS,CAAC,CAAC;YACvE,OAAO,CAAC,IAAI,CAAC;gBACX,OAAO;gBACP,UAAU;gBACV,UAAU,EAAE,UAAU,GAAG,OAAO,CAAC,WAAW;gBAC5C,SAAS,EAAE,CAAC;aACb,CAAC,CAAC;QACL,CAAC;QAED,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,UAAU,GAAG,CAAC,CAAC,UAAU,CAAC,CAAC;QACpD,OAAO,OAAO,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;IAC7B,CAAC;IAED;;OAEG;IACK,KAAK,CAAC,eAAe,CAC3B,KAAmB,EACnB,OAAoB;QAEpB,MAAM,MAAM,GAAG,IAAI,YAAY,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;QAC9C,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QAElB,MAAM,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC;QAElC,KAAK,MAAM,MAAM,IAAI,CAAC,QAAQ,EAAE,QAAQ,EAAE,QAAQ,EAAE,QAAQ,CAAC,EAAE,CAAC;YAC9D,MAAM,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;YAChC,MAAM,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;YAEhC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC;gBACX,MAAM,OAAO,GAAG,IAAI,CAAC,kBAAkB,CAAC,KAAK,EAAE,CAAC,EAAE,CAAC,EAAE,IAAI,CAAC,CAAC;gBAC3D,MAAM,KAAK,GAAG,IAAI,CAAC;gBACnB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;oBACvC,MAAM,CAAC,CAAC,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,KAAK,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC;gBAC3D,CAAC;YACH,CAAC;QACH,CAAC;QAED,OAAO,MAAM,CAAC;IAChB,CAAC;IAED,2EAA2E;IAC3E,mBAAmB;IACnB,2EAA2E;IAE3E;;OAEG;IACK,uBAAuB,CAAC,QAAmB;QACjD,IAAI,IAAI,CAAC,UAAU;YAAE,OAAO;QAE5B,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC,GAAG,EAAE;YAChC,IAAI,CAAC,oBAAoB,CAAC,QAAQ,CAAC,CAAC;QACtC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,mCAAmC;IAC7C,CAAC;IAED;;OAEG;IACK,KAAK,CAAC,oBAAoB,CAAC,QAAmB;QACpD,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC;QACvB,IAAI,IAAI,CAAC,YAAY,CAAC,MAAM,KAAK,CAAC;YAAE,OAAO;QAE3C,MAAM,SAAS,GAAG,WAAW,CAAC,GAAG,EAAE,CAAC;QACpC,IAAI,CAAC,iBAAiB,GAAG,IAAI,CAAC;QAE9B,MAAM,KAAK,GAAG,IAAI,CAAC,YAAY,CAAC;QAChC,IAAI,CAAC,YAAY,GAAG,EAAE,CAAC;QAEvB,wCAAwC;QACxC,MAAM,aAAa,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC;QAErD,+BAA+B;QAC/B,KAAK,MAAM,OAAO,IAAI,KAAK,EAAE,CAAC;YAC5B,MAAM,OAAO,GAAG,IAAI,CAAC,qBAAqB,CACxC,OAAO,CAAC,SAAS,EACjB,OAAO,CAAC,CAAC,EACT,QAAQ,EACR,aAAa,CACd,CAAC;YACF,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC3B,CAAC;QAED,IAAI,CAAC,YAAY,EAAE,CAAC;QACpB,IAAI,CAAC,UAAU,IAAI,KAAK,CAAC,MAAM,CAAC;QAChC,IAAI,CAAC,cAAc,IAAI,WAAW,CAAC,GAAG,EAAE,GAAG,SAAS,CAAC;QACrD,IAAI,CAAC,iBAAiB,GAAG,KAAK,CAAC;IACjC,CAAC;IAED;;OAEG;IACK,qBAAqB,CAC3B,KAAmB,EACnB,CAAS,EACT,QAAmB,EACnB,aAA6B;QAE7B,MAAM,YAAY,GAAwC,EAAE,CAAC;QAE7D,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,aAAa,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;YAC9C,MAAM,GAAG,GAAG,IAAI,CAAC,gBAAgB,CAAC,KAAK,EAAE,aAAa,CAAC,CAAC,CAAC,CAAC,CAAC;YAC3D,YAAY,CAAC,IAAI,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,CAAC;QACrC,CAAC;QAED,YAAY,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,GAAG,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC;QAC3C,MAAM,IAAI,GAAG,YAAY,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;QAEtC,OAAO,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;YACpB,OAAO,EAAE,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC;YACxB,UAAU,EAAE,CAAC,CAAC,GAAG;YACjB,UAAU,EAAE,CAAC,CAAC,GAAG,GAAG,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,WAAW;YAC/C,SAAS,EAAE,CAAC;SACb,CAAC,CAAC,CAAC;IACN,CAAC;IAED;;OAEG;IACK,KAAK,CAAC,oBAAoB,CAChC,MAAsB,EACtB,QAAkB;QAElB,MAAM,SAAS,GAAG,WAAW,CAAC,GAAG,EAAE,CAAC;QAEpC,MAAM,KAAK,GAAG,IAAI,CAAC,aAAa,CAAC,KAAK,CAAC,CAAC,EAAE,MAAM,CAAC,SAAS,CAAC,CAAC;QAC5D,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,aAAa,CAAC,KAAK,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;QAEhE,MAAM,gBAAgB,GAAG,MAAM,CAAC,gBAAgB,CAAC;QACjD,MAAM,YAAY,GAAG,MAAM,CAAC,YAAY,CAAC;QAEzC,sBAAsB;QACtB,MAAM,IAAI,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,YAAY,IAAI,gBAAgB,CAAC,CAAC;QACnE,MAAM,GAAG,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,YAAY,GAAG,gBAAgB,CAAC,CAAC;QAEjE,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YACtB,IAAI,CAAC,cAAc,IAAI,WAAW,CAAC,GAAG,EAAE,GAAG,SAAS,CAAC;YACrD,OAAO,CAAC,CAAC;QACX,CAAC;QAED,uBAAuB;QACvB,KAAK,MAAM,UAAU,IAAI,IAAI,EAAE,CAAC;YAC9B,IAAI,CAAC,4BAA4B,CAAC,UAAU,EAAE,YAAY,CAAC,CAAC;QAC9D,CAAC;QAED,yCAAyC;QACzC,KAAK,MAAM,UAAU,IAAI,GAAG,CAAC,KAAK,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC;YACnD,IAAI,CAAC,4BAA4B,CAAC,UAAU,EAAE,CAAC,YAAY,GAAG,GAAG,CAAC,CAAC;QACrE,CAAC;QAED,IAAI,CAAC,aAAa,EAAE,CAAC;QAErB,4CAA4C;QAC5C,IAAI,IAAI,CAAC,aAAa,IAAI,CAAC,EAAE,CAAC;YAC5B,MAAM,IAAI,CAAC,yBAAyB,CAAC,QAAQ,EAAE,MAAM,CAAC,SAAS,CAAC,CAAC;YACjE,IAAI,CAAC,aAAa,GAAG,CAAC,CAAC;QACzB,CAAC;QAED,sBAAsB;QACtB,MAAM,UAAU,GAAG,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,YAAY,EAAE,CAAC,CAAC,GAAG,IAAI,CAAC,MAAM,CAAC;QAC9E,MAAM,WAAW,GAAG,UAAU,GAAG,GAAG,CAAC;QAErC,IAAI,CAAC,eAAe,EAAE,CAAC;QACvB,IAAI,CAAC,cAAc,IAAI,WAAW,CAAC,GAAG,EAAE,GAAG,SAAS,CAAC;QAErD,OAAO,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,WAAW,CAAC,CAAC;IAClC,CAAC;IAED;;OAEG;IACK,4BAA4B,CAClC,UAAsB,EACtB,KAAa;QAEb,IAAI,UAAU,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC;YAAE,OAAO;QAE1C,MAAM,GAAG,GAAG,UAAU,CAAC,MAAM,CAAC;QAC9B,IAAI,QAAQ,GAAG,IAAI,CAAC,oBAAoB,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;QAElD,IAAI,CAAC,QAAQ,EAAE,CAAC;YACd,MAAM,GAAG,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC,MAAM,CAAC;YAClD,QAAQ,GAAG,IAAI,YAAY,CAAC,GAAG,CAAC,CAAC;YACjC,IAAI,CAAC,oBAAoB,CAAC,GAAG,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;QAC/C,CAAC;QAED,8BAA8B;QAC9B,MAAM,MAAM,GAAG,UAAU,CAAC,YAAY,GAAG,KAAK,CAAC;QAC/C,KAAK,MAAM,IAAI,IAAI,UAAU,CAAC,KAAK,EAAE,CAAC;YACpC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,MAAM,EAAE,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;gBAC3E,QAAQ,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,GAAG,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC;YAC3D,CAAC;QACH,CAAC;IACH,CAAC;IAED;;OAEG;IACK,KAAK,CAAC,yBAAyB,CACrC,QAAkB,EAClB,SAAiB;QAEjB,KAAK,MAAM,CAAC,GAAG,EAAE,QAAQ,CAAC,IAAI,IAAI,CAAC,oBAAoB,EAAE,CAAC;YACxD,qBAAqB;YACrB,MAAM,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;YAChE,IAAI,IAAI,GAAG,CAAC,EAAE,CAAC;gBACb,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;oBACzC,QAAQ,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC;gBACtB,CAAC;YACH,CAAC;YAED,oBAAoB;YACpB,MAAM,MAAM,GAAG,QAAQ,CAAC,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;YACxC,MAAM,KAAK,GAAG,QAAQ,CAAC,KAAK,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;YAEtC,IAAI,MAAM,IAAI,KAAK,EAAE,CAAC;gBACpB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;oBACzC,MAAM,OAAO,GAAG,SAAS,GAAG,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;oBACjE,QAAQ,CAAC,CAAC,CAAC,IAAI,OAAO,CAAC;gBACzB,CAAC;YACH,CAAC;YAED,uCAAuC;YACvC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;QACnB,CAAC;IACH,CAAC;IAED;;OAEG;IACK,KAAK,CAAC,cAAc,CAC1B,MAAsB,EACtB,OAAoB;QAEpB,MAAM,OAAO,GAAmB,EAAE,CAAC;QACnC,MAAM,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC;QAElC,mDAAmD;QACnD,KAAK,MAAM,KAAK,IAAI,MAAM,EAAE,CAAC;YAC3B,MAAM,MAAM,GAAG,IAAI,YAAY,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;YAC9C,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;YAElB,KAAK,MAAM,MAAM,IAAI,CAAC,QAAQ,EAAE,QAAQ,EAAE,QAAQ,EAAE,QAAQ,CAAC,EAAE,CAAC;gBAC9D,MAAM,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;gBAChC,MAAM,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;gBAEhC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC;oBACX,MAAM,OAAO,GAAG,IAAI,CAAC,kBAAkB,CAAC,KAAK,EAAE,CAAC,EAAE,CAAC,EAAE,IAAI,CAAC,CAAC;oBAC3D,MAAM,KAAK,GAAG,IAAI,CAAC;oBACnB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;wBACvC,MAAM,CAAC,CAAC,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,KAAK,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC;oBAC3D,CAAC;gBACH,CAAC;YACH,CAAC;YAED,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QACvB,CAAC;QAED,OAAO,OAAO,CAAC;IACjB,CAAC;CACF"}
|