@sparkleideas/neural 3.0.0-alpha.7-patch.25 → 3.0.0-alpha.7-patch.27
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/algorithms/a2c.d.ts +86 -0
- package/dist/algorithms/a2c.d.ts.map +1 -0
- package/dist/algorithms/a2c.js +361 -0
- package/dist/algorithms/a2c.js.map +1 -0
- package/dist/algorithms/curiosity.d.ts +82 -0
- package/dist/algorithms/curiosity.d.ts.map +1 -0
- package/dist/algorithms/curiosity.js +392 -0
- package/dist/algorithms/curiosity.js.map +1 -0
- package/dist/algorithms/decision-transformer.d.ts +82 -0
- package/dist/algorithms/decision-transformer.d.ts.map +1 -0
- package/dist/algorithms/decision-transformer.js +415 -0
- package/dist/algorithms/decision-transformer.js.map +1 -0
- package/dist/algorithms/dqn.d.ts +72 -0
- package/dist/algorithms/dqn.d.ts.map +1 -0
- package/dist/algorithms/dqn.js +303 -0
- package/dist/algorithms/dqn.js.map +1 -0
- package/dist/algorithms/index.d.ts +32 -0
- package/dist/algorithms/index.d.ts.map +1 -0
- package/dist/algorithms/index.js +74 -0
- package/dist/algorithms/index.js.map +1 -0
- package/dist/algorithms/ppo.d.ts +72 -0
- package/dist/algorithms/ppo.d.ts.map +1 -0
- package/dist/algorithms/ppo.js +331 -0
- package/dist/algorithms/ppo.js.map +1 -0
- package/dist/algorithms/q-learning.d.ts +77 -0
- package/dist/algorithms/q-learning.d.ts.map +1 -0
- package/dist/algorithms/q-learning.js +259 -0
- package/dist/algorithms/q-learning.js.map +1 -0
- package/dist/algorithms/sarsa.d.ts +82 -0
- package/dist/algorithms/sarsa.d.ts.map +1 -0
- package/dist/algorithms/sarsa.js +297 -0
- package/dist/algorithms/sarsa.js.map +1 -0
- package/dist/application/index.d.ts +7 -0
- package/dist/application/index.d.ts.map +1 -0
- package/dist/application/index.js +7 -0
- package/dist/application/index.js.map +1 -0
- package/dist/application/services/neural-application-service.d.ts +92 -0
- package/dist/application/services/neural-application-service.d.ts.map +1 -0
- package/dist/application/services/neural-application-service.js +161 -0
- package/dist/application/services/neural-application-service.js.map +1 -0
- package/dist/domain/entities/pattern.d.ts +89 -0
- package/dist/domain/entities/pattern.d.ts.map +1 -0
- package/dist/domain/entities/pattern.js +134 -0
- package/dist/domain/entities/pattern.js.map +1 -0
- package/dist/domain/index.d.ts +8 -0
- package/dist/domain/index.d.ts.map +1 -0
- package/dist/domain/index.js +8 -0
- package/dist/domain/index.js.map +1 -0
- package/dist/domain/services/learning-service.d.ts +90 -0
- package/dist/domain/services/learning-service.d.ts.map +1 -0
- package/dist/domain/services/learning-service.js +195 -0
- package/dist/domain/services/learning-service.js.map +1 -0
- package/dist/index.d.ts +118 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +201 -0
- package/dist/index.js.map +1 -0
- package/dist/modes/balanced.d.ts +60 -0
- package/dist/modes/balanced.d.ts.map +1 -0
- package/dist/modes/balanced.js +234 -0
- package/dist/modes/balanced.js.map +1 -0
- package/dist/modes/base.d.ts +50 -0
- package/dist/modes/base.d.ts.map +1 -0
- package/dist/modes/base.js +77 -0
- package/dist/modes/base.js.map +1 -0
- package/dist/modes/batch.d.ts +82 -0
- package/dist/modes/batch.d.ts.map +1 -0
- package/dist/modes/batch.js +316 -0
- package/dist/modes/batch.js.map +1 -0
- package/dist/modes/edge.d.ts +85 -0
- package/dist/modes/edge.d.ts.map +1 -0
- package/dist/modes/edge.js +310 -0
- package/dist/modes/edge.js.map +1 -0
- package/dist/modes/index.d.ts +13 -0
- package/dist/modes/index.d.ts.map +1 -0
- package/dist/modes/index.js +13 -0
- package/dist/modes/index.js.map +1 -0
- package/dist/modes/real-time.d.ts +58 -0
- package/dist/modes/real-time.d.ts.map +1 -0
- package/dist/modes/real-time.js +196 -0
- package/dist/modes/real-time.js.map +1 -0
- package/dist/modes/research.d.ts +79 -0
- package/dist/modes/research.d.ts.map +1 -0
- package/dist/modes/research.js +389 -0
- package/dist/modes/research.js.map +1 -0
- package/dist/pattern-learner.d.ts +117 -0
- package/dist/pattern-learner.d.ts.map +1 -0
- package/dist/pattern-learner.js +603 -0
- package/dist/pattern-learner.js.map +1 -0
- package/dist/reasoning-bank.d.ts +259 -0
- package/dist/reasoning-bank.d.ts.map +1 -0
- package/dist/reasoning-bank.js +993 -0
- package/dist/reasoning-bank.js.map +1 -0
- package/dist/reasoningbank-adapter.d.ts +168 -0
- package/dist/reasoningbank-adapter.d.ts.map +1 -0
- package/dist/reasoningbank-adapter.js +463 -0
- package/dist/reasoningbank-adapter.js.map +1 -0
- package/dist/sona-integration.d.ts +168 -0
- package/dist/sona-integration.d.ts.map +1 -0
- package/dist/sona-integration.js +316 -0
- package/dist/sona-integration.js.map +1 -0
- package/dist/sona-manager.d.ts +147 -0
- package/dist/sona-manager.d.ts.map +1 -0
- package/dist/sona-manager.js +695 -0
- package/dist/sona-manager.js.map +1 -0
- package/dist/types.d.ts +431 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/types.js +11 -0
- package/dist/types.js.map +1 -0
- package/package.json +3 -3
- package/tsconfig.build.tsbuildinfo +1 -0
|
@@ -0,0 +1,234 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Balanced Mode Implementation
|
|
3
|
+
*
|
|
4
|
+
* General-purpose mode with:
|
|
5
|
+
* - +25% quality improvement
|
|
6
|
+
* - 18ms overhead
|
|
7
|
+
* - Rank-4 LoRA
|
|
8
|
+
* - Pattern caching
|
|
9
|
+
* - Standard learning pipeline
|
|
10
|
+
*/
|
|
11
|
+
import { BaseModeImplementation } from './base.js';
|
|
12
|
+
/**
|
|
13
|
+
* Balanced mode for general-purpose learning
|
|
14
|
+
*/
|
|
15
|
+
export class BalancedMode extends BaseModeImplementation {
|
|
16
|
+
mode = 'balanced';
|
|
17
|
+
// Pattern cache
|
|
18
|
+
patternCache = new Map();
|
|
19
|
+
cacheHits = 0;
|
|
20
|
+
cacheMisses = 0;
|
|
21
|
+
// Learning state
|
|
22
|
+
gradientAccumulator = new Map();
|
|
23
|
+
momentumBuffers = new Map();
|
|
24
|
+
// Stats
|
|
25
|
+
totalPatternMatches = 0;
|
|
26
|
+
totalPatternTime = 0;
|
|
27
|
+
totalLearnTime = 0;
|
|
28
|
+
learnIterations = 0;
|
|
29
|
+
qualityImprovements = [];
|
|
30
|
+
async initialize() {
|
|
31
|
+
await super.initialize();
|
|
32
|
+
this.patternCache.clear();
|
|
33
|
+
this.gradientAccumulator.clear();
|
|
34
|
+
this.momentumBuffers.clear();
|
|
35
|
+
}
|
|
36
|
+
async cleanup() {
|
|
37
|
+
this.patternCache.clear();
|
|
38
|
+
this.gradientAccumulator.clear();
|
|
39
|
+
this.momentumBuffers.clear();
|
|
40
|
+
await super.cleanup();
|
|
41
|
+
}
|
|
42
|
+
/**
|
|
43
|
+
* Find patterns using similarity search with caching
|
|
44
|
+
*/
|
|
45
|
+
async findPatterns(embedding, k, patterns) {
|
|
46
|
+
const startTime = performance.now();
|
|
47
|
+
// Check cache
|
|
48
|
+
const cacheKey = this.computeCacheKey(embedding);
|
|
49
|
+
const cached = this.patternCache.get(cacheKey);
|
|
50
|
+
if (cached && cached.length >= k) {
|
|
51
|
+
this.cacheHits++;
|
|
52
|
+
this.totalPatternTime += performance.now() - startTime;
|
|
53
|
+
this.totalPatternMatches++;
|
|
54
|
+
return cached.slice(0, k);
|
|
55
|
+
}
|
|
56
|
+
this.cacheMisses++;
|
|
57
|
+
// Compute similarities for all patterns
|
|
58
|
+
const matches = [];
|
|
59
|
+
for (const pattern of patterns) {
|
|
60
|
+
const similarity = this.cosineSimilarity(embedding, pattern.embedding);
|
|
61
|
+
matches.push({
|
|
62
|
+
pattern,
|
|
63
|
+
similarity,
|
|
64
|
+
confidence: similarity * pattern.successRate,
|
|
65
|
+
latencyMs: 0,
|
|
66
|
+
});
|
|
67
|
+
}
|
|
68
|
+
// Sort by similarity descending
|
|
69
|
+
matches.sort((a, b) => b.similarity - a.similarity);
|
|
70
|
+
const topK = matches.slice(0, k);
|
|
71
|
+
// Cache result
|
|
72
|
+
if (this.patternCache.size > 500) {
|
|
73
|
+
const firstKey = this.patternCache.keys().next().value;
|
|
74
|
+
if (firstKey)
|
|
75
|
+
this.patternCache.delete(firstKey);
|
|
76
|
+
}
|
|
77
|
+
this.patternCache.set(cacheKey, topK);
|
|
78
|
+
this.totalPatternTime += performance.now() - startTime;
|
|
79
|
+
this.totalPatternMatches++;
|
|
80
|
+
return topK;
|
|
81
|
+
}
|
|
82
|
+
/**
|
|
83
|
+
* Learn from trajectories using standard gradient descent
|
|
84
|
+
*/
|
|
85
|
+
async learn(trajectories, config, ewcState) {
|
|
86
|
+
const startTime = performance.now();
|
|
87
|
+
if (trajectories.length === 0)
|
|
88
|
+
return 0;
|
|
89
|
+
const qualityThreshold = config.qualityThreshold;
|
|
90
|
+
const learningRate = config.learningRate;
|
|
91
|
+
// Separate positive and negative examples
|
|
92
|
+
const goodTrajectories = trajectories.filter(t => t.qualityScore >= qualityThreshold);
|
|
93
|
+
const badTrajectories = trajectories.filter(t => t.qualityScore < qualityThreshold);
|
|
94
|
+
if (goodTrajectories.length === 0)
|
|
95
|
+
return 0;
|
|
96
|
+
// Compute gradients from trajectory pairs
|
|
97
|
+
let totalGradientNorm = 0;
|
|
98
|
+
for (const good of goodTrajectories) {
|
|
99
|
+
// Use last step embedding as "goal state"
|
|
100
|
+
if (good.steps.length === 0)
|
|
101
|
+
continue;
|
|
102
|
+
const goalState = good.steps[good.steps.length - 1].stateAfter;
|
|
103
|
+
// Positive gradient: move toward good outcomes
|
|
104
|
+
const posGradient = this.computeGradient(goalState, good.qualityScore);
|
|
105
|
+
totalGradientNorm += this.accumulateGradient('positive', posGradient, learningRate);
|
|
106
|
+
// Negative gradient: move away from bad outcomes (contrastive)
|
|
107
|
+
for (const bad of badTrajectories.slice(0, 3)) {
|
|
108
|
+
if (bad.steps.length === 0)
|
|
109
|
+
continue;
|
|
110
|
+
const badState = bad.steps[bad.steps.length - 1].stateAfter;
|
|
111
|
+
const negGradient = this.computeGradient(badState, -bad.qualityScore);
|
|
112
|
+
totalGradientNorm += this.accumulateGradient('negative', negGradient, learningRate * 0.5);
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
// Apply EWC regularization
|
|
116
|
+
const ewcPenalty = this.computeEWCPenalty(ewcState, config.ewcLambda);
|
|
117
|
+
totalGradientNorm += ewcPenalty;
|
|
118
|
+
// Compute improvement delta
|
|
119
|
+
const avgGoodQuality = goodTrajectories.reduce((s, t) => s + t.qualityScore, 0) / goodTrajectories.length;
|
|
120
|
+
const baselineQuality = 0.5;
|
|
121
|
+
const improvementDelta = avgGoodQuality - baselineQuality;
|
|
122
|
+
this.qualityImprovements.push(improvementDelta);
|
|
123
|
+
if (this.qualityImprovements.length > 100) {
|
|
124
|
+
this.qualityImprovements = this.qualityImprovements.slice(-100);
|
|
125
|
+
}
|
|
126
|
+
this.totalLearnTime += performance.now() - startTime;
|
|
127
|
+
this.learnIterations++;
|
|
128
|
+
return Math.max(0, improvementDelta);
|
|
129
|
+
}
|
|
130
|
+
/**
|
|
131
|
+
* Apply LoRA adaptations with rank-4
|
|
132
|
+
*/
|
|
133
|
+
async applyLoRA(input, weights) {
|
|
134
|
+
if (!weights) {
|
|
135
|
+
return input;
|
|
136
|
+
}
|
|
137
|
+
const output = new Float32Array(input.length);
|
|
138
|
+
output.set(input);
|
|
139
|
+
const rank = this.config.loraRank;
|
|
140
|
+
// Apply to all target modules
|
|
141
|
+
for (const module of ['q_proj', 'v_proj', 'k_proj', 'o_proj']) {
|
|
142
|
+
const A = weights.A.get(module);
|
|
143
|
+
const B = weights.B.get(module);
|
|
144
|
+
if (A && B) {
|
|
145
|
+
const adapted = this.applyLoRATransform(input, A, B, rank);
|
|
146
|
+
const alpha = 0.2; // Moderate blending
|
|
147
|
+
for (let i = 0; i < output.length; i++) {
|
|
148
|
+
output[i] = output[i] * (1 - alpha) + adapted[i] * alpha;
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
return output;
|
|
153
|
+
}
|
|
154
|
+
getStats() {
|
|
155
|
+
const avgImprovement = this.qualityImprovements.length > 0
|
|
156
|
+
? this.qualityImprovements.reduce((a, b) => a + b, 0) / this.qualityImprovements.length
|
|
157
|
+
: 0;
|
|
158
|
+
return {
|
|
159
|
+
cacheHitRate: this.cacheHits + this.cacheMisses > 0
|
|
160
|
+
? this.cacheHits / (this.cacheHits + this.cacheMisses)
|
|
161
|
+
: 0,
|
|
162
|
+
avgPatternMatchMs: this.totalPatternMatches > 0
|
|
163
|
+
? this.totalPatternTime / this.totalPatternMatches
|
|
164
|
+
: 0,
|
|
165
|
+
avgLearnMs: this.learnIterations > 0
|
|
166
|
+
? this.totalLearnTime / this.learnIterations
|
|
167
|
+
: 0,
|
|
168
|
+
avgImprovement,
|
|
169
|
+
patternCacheSize: this.patternCache.size,
|
|
170
|
+
learnIterations: this.learnIterations,
|
|
171
|
+
};
|
|
172
|
+
}
|
|
173
|
+
/**
|
|
174
|
+
* Compute cache key from embedding
|
|
175
|
+
*/
|
|
176
|
+
computeCacheKey(embedding) {
|
|
177
|
+
const keyParts = [];
|
|
178
|
+
for (let i = 0; i < Math.min(16, embedding.length); i++) {
|
|
179
|
+
keyParts.push(embedding[i].toFixed(2));
|
|
180
|
+
}
|
|
181
|
+
return keyParts.join(',');
|
|
182
|
+
}
|
|
183
|
+
/**
|
|
184
|
+
* Compute gradient from state and reward
|
|
185
|
+
*/
|
|
186
|
+
computeGradient(state, reward) {
|
|
187
|
+
const gradient = new Float32Array(state.length);
|
|
188
|
+
for (let i = 0; i < state.length; i++) {
|
|
189
|
+
gradient[i] = state[i] * reward;
|
|
190
|
+
}
|
|
191
|
+
return gradient;
|
|
192
|
+
}
|
|
193
|
+
/**
|
|
194
|
+
* Accumulate gradient with momentum
|
|
195
|
+
*/
|
|
196
|
+
accumulateGradient(key, gradient, lr) {
|
|
197
|
+
let momentum = this.momentumBuffers.get(key);
|
|
198
|
+
if (!momentum) {
|
|
199
|
+
momentum = new Float32Array(gradient.length);
|
|
200
|
+
this.momentumBuffers.set(key, momentum);
|
|
201
|
+
}
|
|
202
|
+
let accumulator = this.gradientAccumulator.get(key);
|
|
203
|
+
if (!accumulator) {
|
|
204
|
+
accumulator = new Float32Array(gradient.length);
|
|
205
|
+
this.gradientAccumulator.set(key, accumulator);
|
|
206
|
+
}
|
|
207
|
+
const beta = 0.9; // Momentum coefficient
|
|
208
|
+
let norm = 0;
|
|
209
|
+
for (let i = 0; i < gradient.length; i++) {
|
|
210
|
+
momentum[i] = beta * momentum[i] + (1 - beta) * gradient[i];
|
|
211
|
+
accumulator[i] += lr * momentum[i];
|
|
212
|
+
norm += momentum[i] * momentum[i];
|
|
213
|
+
}
|
|
214
|
+
return Math.sqrt(norm);
|
|
215
|
+
}
|
|
216
|
+
/**
|
|
217
|
+
* Compute EWC penalty for continual learning
|
|
218
|
+
*/
|
|
219
|
+
computeEWCPenalty(ewcState, lambda) {
|
|
220
|
+
let penalty = 0;
|
|
221
|
+
for (const [key, fisher] of ewcState.fisher) {
|
|
222
|
+
const means = ewcState.means.get(key);
|
|
223
|
+
const current = this.gradientAccumulator.get(key);
|
|
224
|
+
if (means && current) {
|
|
225
|
+
for (let i = 0; i < Math.min(fisher.length, means.length, current.length); i++) {
|
|
226
|
+
const diff = current[i] - means[i];
|
|
227
|
+
penalty += fisher[i] * diff * diff;
|
|
228
|
+
}
|
|
229
|
+
}
|
|
230
|
+
}
|
|
231
|
+
return lambda * penalty * 0.5;
|
|
232
|
+
}
|
|
233
|
+
}
|
|
234
|
+
//# sourceMappingURL=balanced.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"balanced.js","sourceRoot":"","sources":["../../src/modes/balanced.ts"],"names":[],"mappings":"AAAA;;;;;;;;;GASG;AAWH,OAAO,EAAE,sBAAsB,EAAE,MAAM,WAAW,CAAC;AAEnD;;GAEG;AACH,MAAM,OAAO,YAAa,SAAQ,sBAAsB;IAC7C,IAAI,GAAG,UAAU,CAAC;IAE3B,gBAAgB;IACR,YAAY,GAAgC,IAAI,GAAG,EAAE,CAAC;IACtD,SAAS,GAAG,CAAC,CAAC;IACd,WAAW,GAAG,CAAC,CAAC;IAExB,iBAAiB;IACT,mBAAmB,GAA8B,IAAI,GAAG,EAAE,CAAC;IAC3D,eAAe,GAA8B,IAAI,GAAG,EAAE,CAAC;IAE/D,QAAQ;IACA,mBAAmB,GAAG,CAAC,CAAC;IACxB,gBAAgB,GAAG,CAAC,CAAC;IACrB,cAAc,GAAG,CAAC,CAAC;IACnB,eAAe,GAAG,CAAC,CAAC;IACpB,mBAAmB,GAAa,EAAE,CAAC;IAE3C,KAAK,CAAC,UAAU;QACd,MAAM,KAAK,CAAC,UAAU,EAAE,CAAC;QACzB,IAAI,CAAC,YAAY,CAAC,KAAK,EAAE,CAAC;QAC1B,IAAI,CAAC,mBAAmB,CAAC,KAAK,EAAE,CAAC;QACjC,IAAI,CAAC,eAAe,CAAC,KAAK,EAAE,CAAC;IAC/B,CAAC;IAED,KAAK,CAAC,OAAO;QACX,IAAI,CAAC,YAAY,CAAC,KAAK,EAAE,CAAC;QAC1B,IAAI,CAAC,mBAAmB,CAAC,KAAK,EAAE,CAAC;QACjC,IAAI,CAAC,eAAe,CAAC,KAAK,EAAE,CAAC;QAC7B,MAAM,KAAK,CAAC,OAAO,EAAE,CAAC;IACxB,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,YAAY,CAChB,SAAuB,EACvB,CAAS,EACT,QAAmB;QAEnB,MAAM,SAAS,GAAG,WAAW,CAAC,GAAG,EAAE,CAAC;QAEpC,cAAc;QACd,MAAM,QAAQ,GAAG,IAAI,CAAC,eAAe,CAAC,SAAS,CAAC,CAAC;QACjD,MAAM,MAAM,GAAG,IAAI,CAAC,YAAY,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC;QAE/C,IAAI,MAAM,IAAI,MAAM,CAAC,MAAM,IAAI,CAAC,EAAE,CAAC;YACjC,IAAI,CAAC,SAAS,EAAE,CAAC;YACjB,IAAI,CAAC,gBAAgB,IAAI,WAAW,CAAC,GAAG,EAAE,GAAG,SAAS,CAAC;YACvD,IAAI,CAAC,mBAAmB,EAAE,CAAC;YAC3B,OAAO,MAAM,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;QAC5B,CAAC;QAED,IAAI,CAAC,WAAW,EAAE,CAAC;QAEnB,wCAAwC;QACxC,MAAM,OAAO,GAAmB,EAAE,CAAC;QAEnC,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE,CAAC;YAC/B,MAAM,UAAU,GAAG,IAAI,CAAC,gBAAgB,CAAC,SAAS,EAAE,OAAO,CAAC,SAAS,CAAC,CAAC;YACvE,OAAO,CAAC,IAAI,CAAC;gBACX,OAAO;gBACP,UAAU;gBACV,UAAU,EAAE,UAAU,GAAG,OAAO,CAAC,WAAW;gBAC5C,SAAS,EAAE,CAAC;aACb,CAAC,CAAC;QACL,CAAC;QAED,gCAAgC;QAChC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,UAAU,GAAG,CAAC,CAAC,UAAU,CAAC,CAAC;QACpD,MAAM,IAAI,GAAG,OAAO,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;QAEjC,eAAe;QACf,IAAI,IAAI,CAAC,YAAY,CAAC,IAAI,GAAG,GAAG,EAAE,CAAC;YACjC,MAAM,QAAQ,GAAG,IAAI,CAAC,YAAY,CAAC,IAAI,EAAE,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC;YACvD,IAAI,QAAQ;gBAAE,IAAI,CAAC,YAAY,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC;QACnD,CAAC;QACD,IAAI,CAAC,YAAY,CAAC,GAAG,CAAC,QAAQ,EAAE,IAAI,CAAC,CAAC;QAEtC,IAAI,CAAC,gBAAgB,IAAI,WAAW,CAAC,GAAG,EAAE,GAAG,SAAS,CAAC;QACvD,IAAI,CAAC,mBAAmB,EAAE,CAAC;QAE3B,OAAO,IAAI,CAAC;IACd,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,KAAK,CACT,YAA0B,EAC1B,MAAsB,EACtB,QAAkB;QAElB,MAAM,SAAS,GAAG,WAAW,CAAC,GAAG,EAAE,CAAC;QAEpC,IAAI,YAAY,CAAC,MAAM,KAAK,CAAC;YAAE,OAAO,CAAC,CAAC;QAExC,MAAM,gBAAgB,GAAG,MAAM,CAAC,gBAAgB,CAAC;QACjD,MAAM,YAAY,GAAG,MAAM,CAAC,YAAY,CAAC;QAEzC,0CAA0C;QAC1C,MAAM,gBAAgB,GAAG,YAAY,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,YAAY,IAAI,gBAAgB,CAAC,CAAC;QACtF,MAAM,eAAe,GAAG,YAAY,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,YAAY,GAAG,gBAAgB,CAAC,CAAC;QAEpF,IAAI,gBAAgB,CAAC,MAAM,KAAK,CAAC;YAAE,OAAO,CAAC,CAAC;QAE5C,0CAA0C;QAC1C,IAAI,iBAAiB,GAAG,CAAC,CAAC;QAE1B,KAAK,MAAM,IAAI,IAAI,gBAAgB,EAAE,CAAC;YACpC,0CAA0C;YAC1C,IAAI,IAAI,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC;gBAAE,SAAS;YAEtC,MAAM,SAAS,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,UAAU,CAAC;YAE/D,+CAA+C;YAC/C,MAAM,WAAW,GAAG,IAAI,CAAC,eAAe,CAAC,SAAS,EAAE,IAAI,CAAC,YAAY,CAAC,CAAC;YACvE,iBAAiB,IAAI,IAAI,CAAC,kBAAkB,CAAC,UAAU,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC;YAEpF,+DAA+D;YAC/D,KAAK,MAAM,GAAG,IAAI,eAAe,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC;gBAC9C,IAAI,GAAG,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC;oBAAE,SAAS;gBACrC,MAAM,QAAQ,GAAG,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,UAAU,CAAC;gBAC5D,MAAM,WAAW,GAAG,IAAI,CAAC,eAAe,CAAC,QAAQ,EAAE,CAAC,GAAG,CAAC,YAAY,CAAC,CAAC;gBACtE,iBAAiB,IAAI,IAAI,CAAC,kBAAkB,CAAC,UAAU,EAAE,WAAW,EAAE,YAAY,GAAG,GAAG,CAAC,CAAC;YAC5F,CAAC;QACH,CAAC;QAED,2BAA2B;QAC3B,MAAM,UAAU,GAAG,IAAI,CAAC,iBAAiB,CAAC,QAAQ,EAAE,MAAM,CAAC,SAAS,CAAC,CAAC;QACtE,iBAAiB,IAAI,UAAU,CAAC;QAEhC,4BAA4B;QAC5B,MAAM,cAAc,GAAG,gBAAgB,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,YAAY,EAAE,CAAC,CAAC,GAAG,gBAAgB,CAAC,MAAM,CAAC;QAC1G,MAAM,eAAe,GAAG,GAAG,CAAC;QAC5B,MAAM,gBAAgB,GAAG,cAAc,GAAG,eAAe,CAAC;QAE1D,IAAI,CAAC,mBAAmB,CAAC,IAAI,CAAC,gBAAgB,CAAC,CAAC;QAChD,IAAI,IAAI,CAAC,mBAAmB,CAAC,MAAM,GAAG,GAAG,EAAE,CAAC;YAC1C,IAAI,CAAC,mBAAmB,GAAG,IAAI,CAAC,mBAAmB,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC;QAClE,CAAC;QAED,IAAI,CAAC,cAAc,IAAI,WAAW,CAAC,GAAG,EAAE,GAAG,SAAS,CAAC;QACrD,IAAI,CAAC,eAAe,EAAE,CAAC;QAEvB,OAAO,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,gBAAgB,CAAC,CAAC;IACvC,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,SAAS,CACb,KAAmB,EACnB,OAAqB;QAErB,IAAI,CAAC,OAAO,EAAE,CAAC;YACb,OAAO,KAAK,CAAC;QACf,CAAC;QAED,MAAM,MAAM,GAAG,IAAI,YAAY,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;QAC9C,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QAElB,MAAM,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC;QAElC,8BAA8B;QAC9B,KAAK,MAAM,MAAM,IAAI,CAAC,QAAQ,EAAE,QAAQ,EAAE,QAAQ,EAAE,QAAQ,CAAC,EAAE,CAAC;YAC9D,MAAM,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;YAChC,MAAM,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;YAEhC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC;gBACX,MAAM,OAAO,GAAG,IAAI,CAAC,kBAAkB,CAAC,KAAK,EAAE,CAAC,EAAE,CAAC,EAAE,IAAI,CAAC,CAAC;gBAC3D,MAAM,KAAK,GAAG,GAAG,CAAC,CAAC,oBAAoB;gBACvC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;oBACvC,MAAM,CAAC,CAAC,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,KAAK,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC;gBAC3D,CAAC;YACH,CAAC;QACH,CAAC;QAED,OAAO,MAAM,CAAC;IAChB,CAAC;IAED,QAAQ;QACN,MAAM,cAAc,GAAG,IAAI,CAAC,mBAAmB,CAAC,MAAM,GAAG,CAAC;YACxD,CAAC,CAAC,IAAI,CAAC,mBAAmB,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,GAAG,IAAI,CAAC,mBAAmB,CAAC,MAAM;YACvF,CAAC,CAAC,CAAC,CAAC;QAEN,OAAO;YACL,YAAY,EAAE,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,WAAW,GAAG,CAAC;gBACjD,CAAC,CAAC,IAAI,CAAC,SAAS,GAAG,CAAC,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,WAAW,CAAC;gBACtD,CAAC,CAAC,CAAC;YACL,iBAAiB,EAAE,IAAI,CAAC,mBAAmB,GAAG,CAAC;gBAC7C,CAAC,CAAC,IAAI,CAAC,gBAAgB,GAAG,IAAI,CAAC,mBAAmB;gBAClD,CAAC,CAAC,CAAC;YACL,UAAU,EAAE,IAAI,CAAC,eAAe,GAAG,CAAC;gBAClC,CAAC,CAAC,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,eAAe;gBAC5C,CAAC,CAAC,CAAC;YACL,cAAc;YACd,gBAAgB,EAAE,IAAI,CAAC,YAAY,CAAC,IAAI;YACxC,eAAe,EAAE,IAAI,CAAC,eAAe;SACtC,CAAC;IACJ,CAAC;IAED;;OAEG;IACK,eAAe,CAAC,SAAuB;QAC7C,MAAM,QAAQ,GAAa,EAAE,CAAC;QAC9B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,EAAE,EAAE,SAAS,CAAC,MAAM,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;YACxD,QAAQ,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC;QACzC,CAAC;QACD,OAAO,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAC5B,CAAC;IAED;;OAEG;IACK,eAAe,CAAC,KAAmB,EAAE,MAAc;QACzD,MAAM,QAAQ,GAAG,IAAI,YAAY,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;QAChD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;YACtC,QAAQ,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,GAAG,MAAM,CAAC;QAClC,CAAC;QACD,OAAO,QAAQ,CAAC;IAClB,CAAC;IAED;;OAEG;IACK,kBAAkB,CAAC,GAAW,EAAE,QAAsB,EAAE,EAAU;QACxE,IAAI,QAAQ,GAAG,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;QAC7C,IAAI,CAAC,QAAQ,EAAE,CAAC;YACd,QAAQ,GAAG,IAAI,YAAY,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC;YAC7C,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;QAC1C,CAAC;QAED,IAAI,WAAW,GAAG,IAAI,CAAC,mBAAmB,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;QACpD,IAAI,CAAC,WAAW,EAAE,CAAC;YACjB,WAAW,GAAG,IAAI,YAAY,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC;YAChD,IAAI,CAAC,mBAAmB,CAAC,GAAG,CAAC,GAAG,EAAE,WAAW,CAAC,CAAC;QACjD,CAAC;QAED,MAAM,IAAI,GAAG,GAAG,CAAC,CAAC,uBAAuB;QACzC,IAAI,IAAI,GAAG,CAAC,CAAC;QAEb,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;YACzC,QAAQ,CAAC,CAAC,CAAC,GAAG,IAAI,GAAG,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;YAC5D,WAAW,CAAC,CAAC,CAAC,IAAI,EAAE,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;YACnC,IAAI,IAAI,QAAQ,CAAC,CAAC,CAAC,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;QACpC,CAAC;QAED,OAAO,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IACzB,CAAC;IAED;;OAEG;IACK,iBAAiB,CAAC,QAAkB,EAAE,MAAc;QAC1D,IAAI,OAAO,GAAG,CAAC,CAAC;QAEhB,KAAK,MAAM,CAAC,GAAG,EAAE,MAAM,CAAC,IAAI,QAAQ,CAAC,MAAM,EAAE,CAAC;YAC5C,MAAM,KAAK,GAAG,QAAQ,CAAC,KAAK,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;YACtC,MAAM,OAAO,GAAG,IAAI,CAAC,mBAAmB,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;YAElD,IAAI,KAAK,IAAI,OAAO,EAAE,CAAC;gBACrB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,MAAM,EAAE,KAAK,CAAC,MAAM,EAAE,OAAO,CAAC,MAAM,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;oBAC/E,MAAM,IAAI,GAAG,OAAO,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;oBACnC,OAAO,IAAI,MAAM,CAAC,CAAC,CAAC,GAAG,IAAI,GAAG,IAAI,CAAC;gBACrC,CAAC;YACH,CAAC;QACH,CAAC;QAED,OAAO,MAAM,GAAG,OAAO,GAAG,GAAG,CAAC;IAChC,CAAC;CACF"}
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Base Mode Implementation
|
|
3
|
+
*
|
|
4
|
+
* Separated to avoid circular dependencies.
|
|
5
|
+
*/
|
|
6
|
+
import type { SONAModeConfig, ModeOptimizations, Trajectory, Pattern, PatternMatch, LoRAWeights, EWCState } from '../types.js';
|
|
7
|
+
/**
|
|
8
|
+
* Common interface for all mode implementations
|
|
9
|
+
*/
|
|
10
|
+
export interface ModeImplementation {
|
|
11
|
+
/** Mode identifier */
|
|
12
|
+
readonly mode: string;
|
|
13
|
+
/** Initialize the mode */
|
|
14
|
+
initialize(): Promise<void>;
|
|
15
|
+
/** Cleanup resources */
|
|
16
|
+
cleanup(): Promise<void>;
|
|
17
|
+
/** Find similar patterns (k-nearest) */
|
|
18
|
+
findPatterns(embedding: Float32Array, k: number, patterns: Pattern[]): Promise<PatternMatch[]>;
|
|
19
|
+
/** Perform a learning step */
|
|
20
|
+
learn(trajectories: Trajectory[], config: SONAModeConfig, ewcState: EWCState): Promise<number>;
|
|
21
|
+
/** Apply LoRA adaptations */
|
|
22
|
+
applyLoRA(input: Float32Array, weights?: LoRAWeights): Promise<Float32Array>;
|
|
23
|
+
/** Get mode-specific stats */
|
|
24
|
+
getStats(): Record<string, number>;
|
|
25
|
+
}
|
|
26
|
+
/**
|
|
27
|
+
* Base class for mode implementations
|
|
28
|
+
*/
|
|
29
|
+
export declare abstract class BaseModeImplementation implements ModeImplementation {
|
|
30
|
+
abstract readonly mode: string;
|
|
31
|
+
protected config: SONAModeConfig;
|
|
32
|
+
protected optimizations: ModeOptimizations;
|
|
33
|
+
protected isInitialized: boolean;
|
|
34
|
+
constructor(config: SONAModeConfig, optimizations: ModeOptimizations);
|
|
35
|
+
initialize(): Promise<void>;
|
|
36
|
+
cleanup(): Promise<void>;
|
|
37
|
+
/**
|
|
38
|
+
* Compute cosine similarity between two vectors (SIMD-optimized)
|
|
39
|
+
*/
|
|
40
|
+
protected cosineSimilarity(a: Float32Array, b: Float32Array): number;
|
|
41
|
+
/**
|
|
42
|
+
* Apply LoRA: output = input + BA * input (simplified)
|
|
43
|
+
*/
|
|
44
|
+
protected applyLoRATransform(input: Float32Array, A: Float32Array, B: Float32Array, rank: number): Float32Array;
|
|
45
|
+
abstract findPatterns(embedding: Float32Array, k: number, patterns: Pattern[]): Promise<PatternMatch[]>;
|
|
46
|
+
abstract learn(trajectories: Trajectory[], config: SONAModeConfig, ewcState: EWCState): Promise<number>;
|
|
47
|
+
abstract applyLoRA(input: Float32Array, weights?: LoRAWeights): Promise<Float32Array>;
|
|
48
|
+
abstract getStats(): Record<string, number>;
|
|
49
|
+
}
|
|
50
|
+
//# sourceMappingURL=base.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"base.d.ts","sourceRoot":"","sources":["../../src/modes/base.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,OAAO,KAAK,EACV,cAAc,EACd,iBAAiB,EACjB,UAAU,EACV,OAAO,EACP,YAAY,EACZ,WAAW,EACX,QAAQ,EACT,MAAM,aAAa,CAAC;AAErB;;GAEG;AACH,MAAM,WAAW,kBAAkB;IACjC,sBAAsB;IACtB,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC;IAEtB,0BAA0B;IAC1B,UAAU,IAAI,OAAO,CAAC,IAAI,CAAC,CAAC;IAE5B,wBAAwB;IACxB,OAAO,IAAI,OAAO,CAAC,IAAI,CAAC,CAAC;IAEzB,wCAAwC;IACxC,YAAY,CACV,SAAS,EAAE,YAAY,EACvB,CAAC,EAAE,MAAM,EACT,QAAQ,EAAE,OAAO,EAAE,GAClB,OAAO,CAAC,YAAY,EAAE,CAAC,CAAC;IAE3B,8BAA8B;IAC9B,KAAK,CACH,YAAY,EAAE,UAAU,EAAE,EAC1B,MAAM,EAAE,cAAc,EACtB,QAAQ,EAAE,QAAQ,GACjB,OAAO,CAAC,MAAM,CAAC,CAAC;IAEnB,6BAA6B;IAC7B,SAAS,CACP,KAAK,EAAE,YAAY,EACnB,OAAO,CAAC,EAAE,WAAW,GACpB,OAAO,CAAC,YAAY,CAAC,CAAC;IAEzB,8BAA8B;IAC9B,QAAQ,IAAI,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;CACpC;AAED;;GAEG;AACH,8BAAsB,sBAAuB,YAAW,kBAAkB;IACxE,QAAQ,CAAC,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC;IAE/B,SAAS,CAAC,MAAM,EAAE,cAAc,CAAC;IACjC,SAAS,CAAC,aAAa,EAAE,iBAAiB,CAAC;IAC3C,SAAS,CAAC,aAAa,UAAS;gBAEpB,MAAM,EAAE,cAAc,EAAE,aAAa,EAAE,iBAAiB;IAK9D,UAAU,IAAI,OAAO,CAAC,IAAI,CAAC;IAI3B,OAAO,IAAI,OAAO,CAAC,IAAI,CAAC;IAI9B;;OAEG;IACH,SAAS,CAAC,gBAAgB,CAAC,CAAC,EAAE,YAAY,EAAE,CAAC,EAAE,YAAY,GAAG,MAAM;IA4BpE;;OAEG;IACH,SAAS,CAAC,kBAAkB,CAC1B,KAAK,EAAE,YAAY,EACnB,CAAC,EAAE,YAAY,EACf,CAAC,EAAE,YAAY,EACf,IAAI,EAAE,MAAM,GACX,YAAY;IA6Bf,QAAQ,CAAC,YAAY,CACnB,SAAS,EAAE,YAAY,EACvB,CAAC,EAAE,MAAM,EACT,QAAQ,EAAE,OAAO,EAAE,GAClB,OAAO,CAAC,YAAY,EAAE,CAAC;IAE1B,QAAQ,CAAC,KAAK,CACZ,YAAY,EAAE,UAAU,EAAE,EAC1B,MAAM,EAAE,cAAc,EACtB,QAAQ,EAAE,QAAQ,GACjB,OAAO,CAAC,MAAM,CAAC;IAElB,QAAQ,CAAC,SAAS,CAChB,KAAK,EAAE,YAAY,EACnB,OAAO,CAAC,EAAE,WAAW,GACpB,OAAO,CAAC,YAAY,CAAC;IAExB,QAAQ,CAAC,QAAQ,IAAI,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC;CAC5C"}
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Base Mode Implementation
|
|
3
|
+
*
|
|
4
|
+
* Separated to avoid circular dependencies.
|
|
5
|
+
*/
|
|
6
|
+
/**
|
|
7
|
+
* Base class for mode implementations
|
|
8
|
+
*/
|
|
9
|
+
export class BaseModeImplementation {
|
|
10
|
+
config;
|
|
11
|
+
optimizations;
|
|
12
|
+
isInitialized = false;
|
|
13
|
+
constructor(config, optimizations) {
|
|
14
|
+
this.config = config;
|
|
15
|
+
this.optimizations = optimizations;
|
|
16
|
+
}
|
|
17
|
+
async initialize() {
|
|
18
|
+
this.isInitialized = true;
|
|
19
|
+
}
|
|
20
|
+
async cleanup() {
|
|
21
|
+
this.isInitialized = false;
|
|
22
|
+
}
|
|
23
|
+
/**
|
|
24
|
+
* Compute cosine similarity between two vectors (SIMD-optimized)
|
|
25
|
+
*/
|
|
26
|
+
cosineSimilarity(a, b) {
|
|
27
|
+
if (a.length !== b.length)
|
|
28
|
+
return 0;
|
|
29
|
+
let dotProduct = 0;
|
|
30
|
+
let normA = 0;
|
|
31
|
+
let normB = 0;
|
|
32
|
+
// Process 4 elements at a time for SIMD-like behavior
|
|
33
|
+
const len = a.length;
|
|
34
|
+
const simdLen = len - (len % 4);
|
|
35
|
+
for (let i = 0; i < simdLen; i += 4) {
|
|
36
|
+
dotProduct += a[i] * b[i] + a[i + 1] * b[i + 1] + a[i + 2] * b[i + 2] + a[i + 3] * b[i + 3];
|
|
37
|
+
normA += a[i] * a[i] + a[i + 1] * a[i + 1] + a[i + 2] * a[i + 2] + a[i + 3] * a[i + 3];
|
|
38
|
+
normB += b[i] * b[i] + b[i + 1] * b[i + 1] + b[i + 2] * b[i + 2] + b[i + 3] * b[i + 3];
|
|
39
|
+
}
|
|
40
|
+
// Handle remaining elements
|
|
41
|
+
for (let i = simdLen; i < len; i++) {
|
|
42
|
+
dotProduct += a[i] * b[i];
|
|
43
|
+
normA += a[i] * a[i];
|
|
44
|
+
normB += b[i] * b[i];
|
|
45
|
+
}
|
|
46
|
+
const denom = Math.sqrt(normA) * Math.sqrt(normB);
|
|
47
|
+
return denom > 0 ? dotProduct / denom : 0;
|
|
48
|
+
}
|
|
49
|
+
/**
|
|
50
|
+
* Apply LoRA: output = input + BA * input (simplified)
|
|
51
|
+
*/
|
|
52
|
+
applyLoRATransform(input, A, B, rank) {
|
|
53
|
+
const dim = input.length;
|
|
54
|
+
const output = new Float32Array(dim);
|
|
55
|
+
// Copy input to output
|
|
56
|
+
output.set(input);
|
|
57
|
+
// Compute A * input -> intermediate (rank dimensions)
|
|
58
|
+
const intermediate = new Float32Array(rank);
|
|
59
|
+
for (let r = 0; r < rank; r++) {
|
|
60
|
+
let sum = 0;
|
|
61
|
+
for (let d = 0; d < dim; d++) {
|
|
62
|
+
sum += A[d * rank + r] * input[d];
|
|
63
|
+
}
|
|
64
|
+
intermediate[r] = sum;
|
|
65
|
+
}
|
|
66
|
+
// Compute B * intermediate -> delta (dim dimensions)
|
|
67
|
+
for (let d = 0; d < dim; d++) {
|
|
68
|
+
let sum = 0;
|
|
69
|
+
for (let r = 0; r < rank; r++) {
|
|
70
|
+
sum += B[r * dim + d] * intermediate[r];
|
|
71
|
+
}
|
|
72
|
+
output[d] += sum;
|
|
73
|
+
}
|
|
74
|
+
return output;
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
//# sourceMappingURL=base.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"base.js","sourceRoot":"","sources":["../../src/modes/base.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAiDH;;GAEG;AACH,MAAM,OAAgB,sBAAsB;IAGhC,MAAM,CAAiB;IACvB,aAAa,CAAoB;IACjC,aAAa,GAAG,KAAK,CAAC;IAEhC,YAAY,MAAsB,EAAE,aAAgC;QAClE,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACrB,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;IACrC,CAAC;IAED,KAAK,CAAC,UAAU;QACd,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC;IAC5B,CAAC;IAED,KAAK,CAAC,OAAO;QACX,IAAI,CAAC,aAAa,GAAG,KAAK,CAAC;IAC7B,CAAC;IAED;;OAEG;IACO,gBAAgB,CAAC,CAAe,EAAE,CAAe;QACzD,IAAI,CAAC,CAAC,MAAM,KAAK,CAAC,CAAC,MAAM;YAAE,OAAO,CAAC,CAAC;QAEpC,IAAI,UAAU,GAAG,CAAC,CAAC;QACnB,IAAI,KAAK,GAAG,CAAC,CAAC;QACd,IAAI,KAAK,GAAG,CAAC,CAAC;QAEd,sDAAsD;QACtD,MAAM,GAAG,GAAG,CAAC,CAAC,MAAM,CAAC;QACrB,MAAM,OAAO,GAAG,GAAG,GAAG,CAAC,GAAG,GAAG,CAAC,CAAC,CAAC;QAEhC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC;YACpC,UAAU,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAC,CAAC,CAAC,CAAC;YAChF,KAAK,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAC,CAAC,CAAC,CAAC;YAC3E,KAAK,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAC,CAAC,CAAC,CAAC;QAC7E,CAAC;QAED,4BAA4B;QAC5B,KAAK,IAAI,CAAC,GAAG,OAAO,EAAE,CAAC,GAAG,GAAG,EAAE,CAAC,EAAE,EAAE,CAAC;YACnC,UAAU,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;YAC1B,KAAK,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;YACrB,KAAK,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;QACvB,CAAC;QAED,MAAM,KAAK,GAAG,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QAClD,OAAO,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,UAAU,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;IAC5C,CAAC;IAED;;OAEG;IACO,kBAAkB,CAC1B,KAAmB,EACnB,CAAe,EACf,CAAe,EACf,IAAY;QAEZ,MAAM,GAAG,GAAG,KAAK,CAAC,MAAM,CAAC;QACzB,MAAM,MAAM,GAAG,IAAI,YAAY,CAAC,GAAG,CAAC,CAAC;QAErC,uBAAuB;QACvB,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QAElB,sDAAsD;QACtD,MAAM,YAAY,GAAG,IAAI,YAAY,CAAC,IAAI,CAAC,CAAC;QAC5C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,EAAE,CAAC,EAAE,EAAE,CAAC;YAC9B,IAAI,GAAG,GAAG,CAAC,CAAC;YACZ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,EAAE,CAAC,EAAE,EAAE,CAAC;gBAC7B,GAAG,IAAI,CAAC,CAAC,CAAC,GAAG,IAAI,GAAG,CAAC,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;YACpC,CAAC;YACD,YAAY,CAAC,CAAC,CAAC,GAAG,GAAG,CAAC;QACxB,CAAC;QAED,qDAAqD;QACrD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,EAAE,CAAC,EAAE,EAAE,CAAC;YAC7B,IAAI,GAAG,GAAG,CAAC,CAAC;YACZ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,EAAE,CAAC,EAAE,EAAE,CAAC;gBAC9B,GAAG,IAAI,CAAC,CAAC,CAAC,GAAG,GAAG,GAAG,CAAC,CAAC,GAAG,YAAY,CAAC,CAAC,CAAC,CAAC;YAC1C,CAAC;YACD,MAAM,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC;QACnB,CAAC;QAED,OAAO,MAAM,CAAC;IAChB,CAAC;CAoBF"}
|
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Batch Mode Implementation
|
|
3
|
+
*
|
|
4
|
+
* Optimized for high-throughput processing with:
|
|
5
|
+
* - Large batch sizes (128)
|
|
6
|
+
* - Rank-8 LoRA
|
|
7
|
+
* - Gradient accumulation
|
|
8
|
+
* - Async batch processing
|
|
9
|
+
* - 50ms latency budget
|
|
10
|
+
*/
|
|
11
|
+
import type { SONAModeConfig, Trajectory, Pattern, PatternMatch, LoRAWeights, EWCState } from '../types.js';
|
|
12
|
+
import { BaseModeImplementation } from './base.js';
|
|
13
|
+
/**
|
|
14
|
+
* Batch mode for high-throughput processing
|
|
15
|
+
*/
|
|
16
|
+
export declare class BatchMode extends BaseModeImplementation {
|
|
17
|
+
readonly mode = "batch";
|
|
18
|
+
private patternQueue;
|
|
19
|
+
private learningQueue;
|
|
20
|
+
private embeddingBuffer;
|
|
21
|
+
private batchEmbeddings;
|
|
22
|
+
private accumulatedGradients;
|
|
23
|
+
private gradientSteps;
|
|
24
|
+
private isBatchProcessing;
|
|
25
|
+
private batchTimer;
|
|
26
|
+
private totalBatches;
|
|
27
|
+
private totalItems;
|
|
28
|
+
private totalBatchTime;
|
|
29
|
+
private learnIterations;
|
|
30
|
+
initialize(): Promise<void>;
|
|
31
|
+
cleanup(): Promise<void>;
|
|
32
|
+
/**
|
|
33
|
+
* Find patterns - queues for batch processing
|
|
34
|
+
*/
|
|
35
|
+
findPatterns(embedding: Float32Array, k: number, patterns: Pattern[]): Promise<PatternMatch[]>;
|
|
36
|
+
/**
|
|
37
|
+
* Learn from trajectories - accumulates for batch
|
|
38
|
+
*/
|
|
39
|
+
learn(trajectories: Trajectory[], config: SONAModeConfig, ewcState: EWCState): Promise<number>;
|
|
40
|
+
/**
|
|
41
|
+
* Apply LoRA with rank-8
|
|
42
|
+
*/
|
|
43
|
+
applyLoRA(input: Float32Array, weights?: LoRAWeights): Promise<Float32Array>;
|
|
44
|
+
getStats(): Record<string, number>;
|
|
45
|
+
/**
|
|
46
|
+
* Direct pattern matching without batching
|
|
47
|
+
*/
|
|
48
|
+
private findPatternsDirect;
|
|
49
|
+
/**
|
|
50
|
+
* Direct LoRA application
|
|
51
|
+
*/
|
|
52
|
+
private applyLoRADirect;
|
|
53
|
+
/**
|
|
54
|
+
* Schedule batch processing
|
|
55
|
+
*/
|
|
56
|
+
private scheduleBatchProcessing;
|
|
57
|
+
/**
|
|
58
|
+
* Process pattern requests in batch
|
|
59
|
+
*/
|
|
60
|
+
private processBatchPatterns;
|
|
61
|
+
/**
|
|
62
|
+
* Batch similarity search
|
|
63
|
+
*/
|
|
64
|
+
private batchSimilaritySearch;
|
|
65
|
+
/**
|
|
66
|
+
* Process batch learning
|
|
67
|
+
*/
|
|
68
|
+
private processBatchLearning;
|
|
69
|
+
/**
|
|
70
|
+
* Accumulate gradient from trajectory
|
|
71
|
+
*/
|
|
72
|
+
private accumulateTrajectoryGradient;
|
|
73
|
+
/**
|
|
74
|
+
* Apply accumulated gradients with EWC
|
|
75
|
+
*/
|
|
76
|
+
private applyAccumulatedGradients;
|
|
77
|
+
/**
|
|
78
|
+
* Apply LoRA to batch of inputs
|
|
79
|
+
*/
|
|
80
|
+
private applyLoRABatch;
|
|
81
|
+
}
|
|
82
|
+
//# sourceMappingURL=batch.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"batch.d.ts","sourceRoot":"","sources":["../../src/modes/batch.ts"],"names":[],"mappings":"AAAA;;;;;;;;;GASG;AAEH,OAAO,KAAK,EACV,cAAc,EAEd,UAAU,EACV,OAAO,EACP,YAAY,EACZ,WAAW,EACX,QAAQ,EACT,MAAM,aAAa,CAAC;AACrB,OAAO,EAAE,sBAAsB,EAAE,MAAM,WAAW,CAAC;AAEnD;;GAEG;AACH,qBAAa,SAAU,SAAQ,sBAAsB;IACnD,QAAQ,CAAC,IAAI,WAAW;IAGxB,OAAO,CAAC,YAAY,CAIZ;IACR,OAAO,CAAC,aAAa,CAAoB;IAGzC,OAAO,CAAC,eAAe,CAA6B;IACpD,OAAO,CAAC,eAAe,CAAsB;IAG7C,OAAO,CAAC,oBAAoB,CAAwC;IACpE,OAAO,CAAC,aAAa,CAAK;IAG1B,OAAO,CAAC,iBAAiB,CAAS;IAClC,OAAO,CAAC,UAAU,CAA8C;IAGhE,OAAO,CAAC,YAAY,CAAK;IACzB,OAAO,CAAC,UAAU,CAAK;IACvB,OAAO,CAAC,cAAc,CAAK;IAC3B,OAAO,CAAC,eAAe,CAAK;IAEtB,UAAU,IAAI,OAAO,CAAC,IAAI,CAAC;IAQ3B,OAAO,IAAI,OAAO,CAAC,IAAI,CAAC;IAU9B;;OAEG;IACG,YAAY,CAChB,SAAS,EAAE,YAAY,EACvB,CAAC,EAAE,MAAM,EACT,QAAQ,EAAE,OAAO,EAAE,GAClB,OAAO,CAAC,YAAY,EAAE,CAAC;IAa1B;;OAEG;IACG,KAAK,CACT,YAAY,EAAE,UAAU,EAAE,EAC1B,MAAM,EAAE,cAAc,EACtB,QAAQ,EAAE,QAAQ,GACjB,OAAO,CAAC,MAAM,CAAC;IAoBlB;;OAEG;IACG,SAAS,CACb,KAAK,EAAE,YAAY,EACnB,OAAO,CAAC,EAAE,WAAW,GACpB,OAAO,CAAC,YAAY,CAAC;IAqBxB,QAAQ,IAAI,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC;IAgBlC;;OAEG;IACH,OAAO,CAAC,kBAAkB;IAqB1B;;OAEG;YACW,eAAe;IA6B7B;;OAEG;IACH,OAAO,CAAC,uBAAuB;IAQ/B;;OAEG;YACW,oBAAoB;IA8BlC;;OAEG;IACH,OAAO,CAAC,qBAAqB;IAwB7B;;OAEG;YACW,oBAAoB;IAiDlC;;OAEG;IACH,OAAO,CAAC,4BAA4B;IAwBpC;;OAEG;YACW,yBAAyB;IA6BvC;;OAEG;YACW,cAAc;CA8B7B"}
|