@arcanea/guardian-evolution 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/algorithms/a2c.d.ts +86 -0
- package/dist/algorithms/a2c.d.ts.map +1 -0
- package/dist/algorithms/a2c.js +361 -0
- package/dist/algorithms/a2c.js.map +1 -0
- package/dist/algorithms/curiosity.d.ts +82 -0
- package/dist/algorithms/curiosity.d.ts.map +1 -0
- package/dist/algorithms/curiosity.js +392 -0
- package/dist/algorithms/curiosity.js.map +1 -0
- package/dist/algorithms/decision-transformer.d.ts +82 -0
- package/dist/algorithms/decision-transformer.d.ts.map +1 -0
- package/dist/algorithms/decision-transformer.js +415 -0
- package/dist/algorithms/decision-transformer.js.map +1 -0
- package/dist/algorithms/dqn.d.ts +72 -0
- package/dist/algorithms/dqn.d.ts.map +1 -0
- package/dist/algorithms/dqn.js +303 -0
- package/dist/algorithms/dqn.js.map +1 -0
- package/dist/algorithms/index.d.ts +32 -0
- package/dist/algorithms/index.d.ts.map +1 -0
- package/dist/algorithms/index.js +74 -0
- package/dist/algorithms/index.js.map +1 -0
- package/dist/algorithms/ppo.d.ts +72 -0
- package/dist/algorithms/ppo.d.ts.map +1 -0
- package/dist/algorithms/ppo.js +331 -0
- package/dist/algorithms/ppo.js.map +1 -0
- package/dist/algorithms/q-learning.d.ts +77 -0
- package/dist/algorithms/q-learning.d.ts.map +1 -0
- package/dist/algorithms/q-learning.js +259 -0
- package/dist/algorithms/q-learning.js.map +1 -0
- package/dist/algorithms/sarsa.d.ts +82 -0
- package/dist/algorithms/sarsa.d.ts.map +1 -0
- package/dist/algorithms/sarsa.js +297 -0
- package/dist/algorithms/sarsa.js.map +1 -0
- package/dist/index.d.ts +118 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +201 -0
- package/dist/index.js.map +1 -0
- package/dist/modes/balanced.d.ts +60 -0
- package/dist/modes/balanced.d.ts.map +1 -0
- package/dist/modes/balanced.js +234 -0
- package/dist/modes/balanced.js.map +1 -0
- package/dist/modes/batch.d.ts +82 -0
- package/dist/modes/batch.d.ts.map +1 -0
- package/dist/modes/batch.js +316 -0
- package/dist/modes/batch.js.map +1 -0
- package/dist/modes/edge.d.ts +85 -0
- package/dist/modes/edge.d.ts.map +1 -0
- package/dist/modes/edge.js +310 -0
- package/dist/modes/edge.js.map +1 -0
- package/dist/modes/index.d.ts +55 -0
- package/dist/modes/index.d.ts.map +1 -0
- package/dist/modes/index.js +83 -0
- package/dist/modes/index.js.map +1 -0
- package/dist/modes/real-time.d.ts +58 -0
- package/dist/modes/real-time.d.ts.map +1 -0
- package/dist/modes/real-time.js +196 -0
- package/dist/modes/real-time.js.map +1 -0
- package/dist/modes/research.d.ts +79 -0
- package/dist/modes/research.d.ts.map +1 -0
- package/dist/modes/research.js +389 -0
- package/dist/modes/research.js.map +1 -0
- package/dist/pattern-learner.d.ts +117 -0
- package/dist/pattern-learner.d.ts.map +1 -0
- package/dist/pattern-learner.js +603 -0
- package/dist/pattern-learner.js.map +1 -0
- package/dist/reasoning-bank.d.ts +259 -0
- package/dist/reasoning-bank.d.ts.map +1 -0
- package/dist/reasoning-bank.js +993 -0
- package/dist/reasoning-bank.js.map +1 -0
- package/dist/reasoningbank-adapter.d.ts +168 -0
- package/dist/reasoningbank-adapter.d.ts.map +1 -0
- package/dist/reasoningbank-adapter.js +463 -0
- package/dist/reasoningbank-adapter.js.map +1 -0
- package/dist/sona-integration.d.ts +168 -0
- package/dist/sona-integration.d.ts.map +1 -0
- package/dist/sona-integration.js +316 -0
- package/dist/sona-integration.js.map +1 -0
- package/dist/sona-manager.d.ts +147 -0
- package/dist/sona-manager.d.ts.map +1 -0
- package/dist/sona-manager.js +695 -0
- package/dist/sona-manager.js.map +1 -0
- package/dist/types.d.ts +431 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/types.js +11 -0
- package/dist/types.js.map +1 -0
- package/package.json +47 -0
|
@@ -0,0 +1,316 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Batch Mode Implementation
|
|
3
|
+
*
|
|
4
|
+
* Optimized for high-throughput processing with:
|
|
5
|
+
* - Large batch sizes (128)
|
|
6
|
+
* - Rank-8 LoRA
|
|
7
|
+
* - Gradient accumulation
|
|
8
|
+
* - Async batch processing
|
|
9
|
+
* - 50ms latency budget
|
|
10
|
+
*/
|
|
11
|
+
import { BaseModeImplementation } from './index.js';
|
|
12
|
+
/**
|
|
13
|
+
* Batch mode for high-throughput processing
|
|
14
|
+
*/
|
|
15
|
+
export class BatchMode extends BaseModeImplementation {
|
|
16
|
+
mode = 'batch';
|
|
17
|
+
// Batch processing queues
|
|
18
|
+
patternQueue = [];
|
|
19
|
+
learningQueue = [];
|
|
20
|
+
// Batch buffers
|
|
21
|
+
embeddingBuffer = null;
|
|
22
|
+
batchEmbeddings = [];
|
|
23
|
+
// Gradient accumulation
|
|
24
|
+
accumulatedGradients = new Map();
|
|
25
|
+
gradientSteps = 0;
|
|
26
|
+
// Batch processing state
|
|
27
|
+
isBatchProcessing = false;
|
|
28
|
+
batchTimer = null;
|
|
29
|
+
// Stats
|
|
30
|
+
totalBatches = 0;
|
|
31
|
+
totalItems = 0;
|
|
32
|
+
totalBatchTime = 0;
|
|
33
|
+
learnIterations = 0;
|
|
34
|
+
async initialize() {
|
|
35
|
+
await super.initialize();
|
|
36
|
+
this.patternQueue = [];
|
|
37
|
+
this.learningQueue = [];
|
|
38
|
+
this.accumulatedGradients.clear();
|
|
39
|
+
this.gradientSteps = 0;
|
|
40
|
+
}
|
|
41
|
+
async cleanup() {
|
|
42
|
+
if (this.batchTimer) {
|
|
43
|
+
clearTimeout(this.batchTimer);
|
|
44
|
+
}
|
|
45
|
+
this.patternQueue = [];
|
|
46
|
+
this.learningQueue = [];
|
|
47
|
+
this.accumulatedGradients.clear();
|
|
48
|
+
await super.cleanup();
|
|
49
|
+
}
|
|
50
|
+
/**
|
|
51
|
+
* Find patterns - queues for batch processing
|
|
52
|
+
*/
|
|
53
|
+
async findPatterns(embedding, k, patterns) {
|
|
54
|
+
// For immediate needs, process synchronously
|
|
55
|
+
if (patterns.length < 100) {
|
|
56
|
+
return this.findPatternsDirect(embedding, k, patterns);
|
|
57
|
+
}
|
|
58
|
+
// Queue for batch processing
|
|
59
|
+
return new Promise(resolve => {
|
|
60
|
+
this.patternQueue.push({ embedding, k, resolve });
|
|
61
|
+
this.scheduleBatchProcessing(patterns);
|
|
62
|
+
});
|
|
63
|
+
}
|
|
64
|
+
/**
|
|
65
|
+
* Learn from trajectories - accumulates for batch
|
|
66
|
+
*/
|
|
67
|
+
async learn(trajectories, config, ewcState) {
|
|
68
|
+
const startTime = performance.now();
|
|
69
|
+
if (trajectories.length === 0)
|
|
70
|
+
return 0;
|
|
71
|
+
// Add to learning queue
|
|
72
|
+
this.learningQueue.push(...trajectories);
|
|
73
|
+
// Process when queue is full
|
|
74
|
+
if (this.learningQueue.length >= config.batchSize) {
|
|
75
|
+
return this.processBatchLearning(config, ewcState);
|
|
76
|
+
}
|
|
77
|
+
// Return estimated improvement
|
|
78
|
+
const avgQuality = trajectories.reduce((s, t) => s + t.qualityScore, 0) / trajectories.length;
|
|
79
|
+
this.totalBatchTime += performance.now() - startTime;
|
|
80
|
+
return Math.max(0, avgQuality - 0.5) * 0.5; // Partial estimate
|
|
81
|
+
}
|
|
82
|
+
/**
|
|
83
|
+
* Apply LoRA with rank-8
|
|
84
|
+
*/
|
|
85
|
+
async applyLoRA(input, weights) {
|
|
86
|
+
if (!weights) {
|
|
87
|
+
return input;
|
|
88
|
+
}
|
|
89
|
+
// Batch mode can process multiple inputs efficiently
|
|
90
|
+
this.batchEmbeddings.push(new Float32Array(input));
|
|
91
|
+
// Process immediately for single requests
|
|
92
|
+
if (this.batchEmbeddings.length === 1) {
|
|
93
|
+
const output = await this.applyLoRADirect(input, weights);
|
|
94
|
+
this.batchEmbeddings = [];
|
|
95
|
+
return output;
|
|
96
|
+
}
|
|
97
|
+
// For multiple inputs, process as batch
|
|
98
|
+
const outputs = await this.applyLoRABatch(this.batchEmbeddings, weights);
|
|
99
|
+
this.batchEmbeddings = [];
|
|
100
|
+
return outputs[outputs.length - 1];
|
|
101
|
+
}
|
|
102
|
+
getStats() {
|
|
103
|
+
return {
|
|
104
|
+
totalBatches: this.totalBatches,
|
|
105
|
+
avgItemsPerBatch: this.totalBatches > 0 ? this.totalItems / this.totalBatches : 0,
|
|
106
|
+
avgBatchTimeMs: this.totalBatches > 0 ? this.totalBatchTime / this.totalBatches : 0,
|
|
107
|
+
pendingPatternRequests: this.patternQueue.length,
|
|
108
|
+
pendingTrajectories: this.learningQueue.length,
|
|
109
|
+
accumulatedGradientSteps: this.gradientSteps,
|
|
110
|
+
learnIterations: this.learnIterations,
|
|
111
|
+
};
|
|
112
|
+
}
|
|
113
|
+
// ========================================================================
|
|
114
|
+
// Direct processing (for small batches)
|
|
115
|
+
// ========================================================================
|
|
116
|
+
/**
|
|
117
|
+
* Direct pattern matching without batching
|
|
118
|
+
*/
|
|
119
|
+
findPatternsDirect(embedding, k, patterns) {
|
|
120
|
+
const matches = [];
|
|
121
|
+
for (const pattern of patterns) {
|
|
122
|
+
const similarity = this.cosineSimilarity(embedding, pattern.embedding);
|
|
123
|
+
matches.push({
|
|
124
|
+
pattern,
|
|
125
|
+
similarity,
|
|
126
|
+
confidence: similarity * pattern.successRate,
|
|
127
|
+
latencyMs: 0,
|
|
128
|
+
});
|
|
129
|
+
}
|
|
130
|
+
matches.sort((a, b) => b.similarity - a.similarity);
|
|
131
|
+
return matches.slice(0, k);
|
|
132
|
+
}
|
|
133
|
+
/**
|
|
134
|
+
* Direct LoRA application
|
|
135
|
+
*/
|
|
136
|
+
async applyLoRADirect(input, weights) {
|
|
137
|
+
const output = new Float32Array(input.length);
|
|
138
|
+
output.set(input);
|
|
139
|
+
const rank = this.config.loraRank;
|
|
140
|
+
for (const module of ['q_proj', 'v_proj', 'k_proj', 'o_proj']) {
|
|
141
|
+
const A = weights.A.get(module);
|
|
142
|
+
const B = weights.B.get(module);
|
|
143
|
+
if (A && B) {
|
|
144
|
+
const adapted = this.applyLoRATransform(input, A, B, rank);
|
|
145
|
+
const alpha = 0.25;
|
|
146
|
+
for (let i = 0; i < output.length; i++) {
|
|
147
|
+
output[i] = output[i] * (1 - alpha) + adapted[i] * alpha;
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
return output;
|
|
152
|
+
}
|
|
153
|
+
// ========================================================================
|
|
154
|
+
// Batch processing
|
|
155
|
+
// ========================================================================
|
|
156
|
+
/**
|
|
157
|
+
* Schedule batch processing
|
|
158
|
+
*/
|
|
159
|
+
scheduleBatchProcessing(patterns) {
|
|
160
|
+
if (this.batchTimer)
|
|
161
|
+
return;
|
|
162
|
+
this.batchTimer = setTimeout(() => {
|
|
163
|
+
this.processBatchPatterns(patterns);
|
|
164
|
+
}, 10); // Wait 10ms to accumulate requests
|
|
165
|
+
}
|
|
166
|
+
/**
|
|
167
|
+
* Process pattern requests in batch
|
|
168
|
+
*/
|
|
169
|
+
async processBatchPatterns(patterns) {
|
|
170
|
+
this.batchTimer = null;
|
|
171
|
+
if (this.patternQueue.length === 0)
|
|
172
|
+
return;
|
|
173
|
+
const startTime = performance.now();
|
|
174
|
+
this.isBatchProcessing = true;
|
|
175
|
+
const batch = this.patternQueue;
|
|
176
|
+
this.patternQueue = [];
|
|
177
|
+
// Pre-compute pattern embeddings matrix
|
|
178
|
+
const patternMatrix = patterns.map(p => p.embedding);
|
|
179
|
+
// Process all queries in batch
|
|
180
|
+
for (const request of batch) {
|
|
181
|
+
const matches = this.batchSimilaritySearch(request.embedding, request.k, patterns, patternMatrix);
|
|
182
|
+
request.resolve(matches);
|
|
183
|
+
}
|
|
184
|
+
this.totalBatches++;
|
|
185
|
+
this.totalItems += batch.length;
|
|
186
|
+
this.totalBatchTime += performance.now() - startTime;
|
|
187
|
+
this.isBatchProcessing = false;
|
|
188
|
+
}
|
|
189
|
+
/**
|
|
190
|
+
* Batch similarity search
|
|
191
|
+
*/
|
|
192
|
+
batchSimilaritySearch(query, k, patterns, patternMatrix) {
|
|
193
|
+
const similarities = [];
|
|
194
|
+
for (let i = 0; i < patternMatrix.length; i++) {
|
|
195
|
+
const sim = this.cosineSimilarity(query, patternMatrix[i]);
|
|
196
|
+
similarities.push({ idx: i, sim });
|
|
197
|
+
}
|
|
198
|
+
similarities.sort((a, b) => b.sim - a.sim);
|
|
199
|
+
const topK = similarities.slice(0, k);
|
|
200
|
+
return topK.map(s => ({
|
|
201
|
+
pattern: patterns[s.idx],
|
|
202
|
+
similarity: s.sim,
|
|
203
|
+
confidence: s.sim * patterns[s.idx].successRate,
|
|
204
|
+
latencyMs: 0,
|
|
205
|
+
}));
|
|
206
|
+
}
|
|
207
|
+
/**
|
|
208
|
+
* Process batch learning
|
|
209
|
+
*/
|
|
210
|
+
async processBatchLearning(config, ewcState) {
|
|
211
|
+
const startTime = performance.now();
|
|
212
|
+
const batch = this.learningQueue.slice(0, config.batchSize);
|
|
213
|
+
this.learningQueue = this.learningQueue.slice(config.batchSize);
|
|
214
|
+
const qualityThreshold = config.qualityThreshold;
|
|
215
|
+
const learningRate = config.learningRate;
|
|
216
|
+
// Separate by quality
|
|
217
|
+
const good = batch.filter(t => t.qualityScore >= qualityThreshold);
|
|
218
|
+
const bad = batch.filter(t => t.qualityScore < qualityThreshold);
|
|
219
|
+
if (good.length === 0) {
|
|
220
|
+
this.totalBatchTime += performance.now() - startTime;
|
|
221
|
+
return 0;
|
|
222
|
+
}
|
|
223
|
+
// Accumulate gradients
|
|
224
|
+
for (const trajectory of good) {
|
|
225
|
+
this.accumulateTrajectoryGradient(trajectory, learningRate);
|
|
226
|
+
}
|
|
227
|
+
// Contrastive learning from bad examples
|
|
228
|
+
for (const trajectory of bad.slice(0, good.length)) {
|
|
229
|
+
this.accumulateTrajectoryGradient(trajectory, -learningRate * 0.3);
|
|
230
|
+
}
|
|
231
|
+
this.gradientSteps++;
|
|
232
|
+
// Apply accumulated gradients every N steps
|
|
233
|
+
if (this.gradientSteps >= 4) {
|
|
234
|
+
await this.applyAccumulatedGradients(ewcState, config.ewcLambda);
|
|
235
|
+
this.gradientSteps = 0;
|
|
236
|
+
}
|
|
237
|
+
// Compute improvement
|
|
238
|
+
const avgQuality = good.reduce((s, t) => s + t.qualityScore, 0) / good.length;
|
|
239
|
+
const improvement = avgQuality - 0.5;
|
|
240
|
+
this.learnIterations++;
|
|
241
|
+
this.totalBatchTime += performance.now() - startTime;
|
|
242
|
+
return Math.max(0, improvement);
|
|
243
|
+
}
|
|
244
|
+
/**
|
|
245
|
+
* Accumulate gradient from trajectory
|
|
246
|
+
*/
|
|
247
|
+
accumulateTrajectoryGradient(trajectory, scale) {
|
|
248
|
+
if (trajectory.steps.length === 0)
|
|
249
|
+
return;
|
|
250
|
+
const key = trajectory.domain;
|
|
251
|
+
let gradient = this.accumulatedGradients.get(key);
|
|
252
|
+
if (!gradient) {
|
|
253
|
+
const dim = trajectory.steps[0].stateAfter.length;
|
|
254
|
+
gradient = new Float32Array(dim);
|
|
255
|
+
this.accumulatedGradients.set(key, gradient);
|
|
256
|
+
}
|
|
257
|
+
// Add trajectory contribution
|
|
258
|
+
const weight = trajectory.qualityScore * scale;
|
|
259
|
+
for (const step of trajectory.steps) {
|
|
260
|
+
for (let i = 0; i < Math.min(gradient.length, step.stateAfter.length); i++) {
|
|
261
|
+
gradient[i] += step.stateAfter[i] * weight * step.reward;
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
/**
|
|
266
|
+
* Apply accumulated gradients with EWC
|
|
267
|
+
*/
|
|
268
|
+
async applyAccumulatedGradients(ewcState, ewcLambda) {
|
|
269
|
+
for (const [key, gradient] of this.accumulatedGradients) {
|
|
270
|
+
// Normalize gradient
|
|
271
|
+
const norm = Math.sqrt(gradient.reduce((s, v) => s + v * v, 0));
|
|
272
|
+
if (norm > 0) {
|
|
273
|
+
for (let i = 0; i < gradient.length; i++) {
|
|
274
|
+
gradient[i] /= norm;
|
|
275
|
+
}
|
|
276
|
+
}
|
|
277
|
+
// Apply EWC penalty
|
|
278
|
+
const fisher = ewcState.fisher.get(key);
|
|
279
|
+
const means = ewcState.means.get(key);
|
|
280
|
+
if (fisher && means) {
|
|
281
|
+
for (let i = 0; i < gradient.length; i++) {
|
|
282
|
+
const penalty = ewcLambda * fisher[i] * (gradient[i] - means[i]);
|
|
283
|
+
gradient[i] -= penalty;
|
|
284
|
+
}
|
|
285
|
+
}
|
|
286
|
+
// Clear gradient for next accumulation
|
|
287
|
+
gradient.fill(0);
|
|
288
|
+
}
|
|
289
|
+
}
|
|
290
|
+
/**
|
|
291
|
+
* Apply LoRA to batch of inputs
|
|
292
|
+
*/
|
|
293
|
+
async applyLoRABatch(inputs, weights) {
|
|
294
|
+
const outputs = [];
|
|
295
|
+
const rank = this.config.loraRank;
|
|
296
|
+
// Process all inputs together for cache efficiency
|
|
297
|
+
for (const input of inputs) {
|
|
298
|
+
const output = new Float32Array(input.length);
|
|
299
|
+
output.set(input);
|
|
300
|
+
for (const module of ['q_proj', 'v_proj', 'k_proj', 'o_proj']) {
|
|
301
|
+
const A = weights.A.get(module);
|
|
302
|
+
const B = weights.B.get(module);
|
|
303
|
+
if (A && B) {
|
|
304
|
+
const adapted = this.applyLoRATransform(input, A, B, rank);
|
|
305
|
+
const alpha = 0.25;
|
|
306
|
+
for (let i = 0; i < output.length; i++) {
|
|
307
|
+
output[i] = output[i] * (1 - alpha) + adapted[i] * alpha;
|
|
308
|
+
}
|
|
309
|
+
}
|
|
310
|
+
}
|
|
311
|
+
outputs.push(output);
|
|
312
|
+
}
|
|
313
|
+
return outputs;
|
|
314
|
+
}
|
|
315
|
+
}
|
|
316
|
+
//# sourceMappingURL=batch.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"batch.js","sourceRoot":"","sources":["../../src/modes/batch.ts"],"names":[],"mappings":"AAAA;;;;;;;;;GASG;AAWH,OAAO,EAAE,sBAAsB,EAAE,MAAM,YAAY,CAAC;AAEpD;;GAEG;AACH,MAAM,OAAO,SAAU,SAAQ,sBAAsB;IAC1C,IAAI,GAAG,OAAO,CAAC;IAExB,0BAA0B;IAClB,YAAY,GAIf,EAAE,CAAC;IACA,aAAa,GAAiB,EAAE,CAAC;IAEzC,gBAAgB;IACR,eAAe,GAAwB,IAAI,CAAC;IAC5C,eAAe,GAAmB,EAAE,CAAC;IAE7C,wBAAwB;IAChB,oBAAoB,GAA8B,IAAI,GAAG,EAAE,CAAC;IAC5D,aAAa,GAAG,CAAC,CAAC;IAE1B,yBAAyB;IACjB,iBAAiB,GAAG,KAAK,CAAC;IAC1B,UAAU,GAAyC,IAAI,CAAC;IAEhE,QAAQ;IACA,YAAY,GAAG,CAAC,CAAC;IACjB,UAAU,GAAG,CAAC,CAAC;IACf,cAAc,GAAG,CAAC,CAAC;IACnB,eAAe,GAAG,CAAC,CAAC;IAE5B,KAAK,CAAC,UAAU;QACd,MAAM,KAAK,CAAC,UAAU,EAAE,CAAC;QACzB,IAAI,CAAC,YAAY,GAAG,EAAE,CAAC;QACvB,IAAI,CAAC,aAAa,GAAG,EAAE,CAAC;QACxB,IAAI,CAAC,oBAAoB,CAAC,KAAK,EAAE,CAAC;QAClC,IAAI,CAAC,aAAa,GAAG,CAAC,CAAC;IACzB,CAAC;IAED,KAAK,CAAC,OAAO;QACX,IAAI,IAAI,CAAC,UAAU,EAAE,CAAC;YACpB,YAAY,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;QAChC,CAAC;QACD,IAAI,CAAC,YAAY,GAAG,EAAE,CAAC;QACvB,IAAI,CAAC,aAAa,GAAG,EAAE,CAAC;QACxB,IAAI,CAAC,oBAAoB,CAAC,KAAK,EAAE,CAAC;QAClC,MAAM,KAAK,CAAC,OAAO,EAAE,CAAC;IACxB,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,YAAY,CAChB,SAAuB,EACvB,CAAS,EACT,QAAmB;QAEnB,6CAA6C;QAC7C,IAAI,QAAQ,CAAC,MAAM,GAAG,GAAG,EAAE,CAAC;YAC1B,OAAO,IAAI,CAAC,kBAAkB,CAAC,SAAS,EAAE,CAAC,EAAE,QAAQ,CAAC,CAAC;QACzD,CAAC;QAED,6BAA6B;QAC7B,OAAO,IAAI,OAAO,CAAC,OAAO,CAAC,EAAE;YAC3B,IAAI,CAAC,YAAY,CAAC,IAAI,CAAC,EAAE,SAAS,EAAE,CAAC,EAAE,OAAO,EAAE,CAAC,CAAC;YAClD,IAAI,CAAC,uBAAuB,CAAC,QAAQ,CAAC,CAAC;QACzC,CAAC,CAAC,CAAC;IACL,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,KAAK,CACT,YAA0B,EAC1B,MAAsB,EACtB,QAAkB;QAElB,MAAM,SAAS,GAAG,WAAW,CAAC,GAAG,EAAE,CAAC;QAEpC,IAAI,YAAY,CAAC,MAAM,KAAK,CAAC;YAAE,OAAO,CAAC,CAAC;QAExC,wBAAwB;QACxB,IAAI,CAAC,aAAa,CAAC,IAAI,CAAC,GAAG,YAAY,CAAC,CAAC;QAEzC,6BAA6B;QAC7B,IAAI,IAAI,CAAC,aAAa,CAAC,MAAM,IAAI,MAAM,CAAC,SAAS,EAAE,CAAC;YAClD,OAAO,IAAI,CAAC,oBAAoB,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;QACrD,CAAC;QAED,+BAA+B;QAC/B,MAAM,UAAU,GAAG,YAAY,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,YAAY,EAAE,CAAC,CAAC,GAAG,YAAY,CAAC,MAAM,CAAC;QAE9F,IAAI,CAAC,cAAc,IAAI,WAAW,CAAC,GAAG,EAAE,GAAG,SAAS,CAAC;QACrD,OAAO,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,UAAU,GAAG,GAAG,CAAC,GAAG,GAAG,CAAC,CAAC,mBAAmB;IACjE,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,SAAS,CACb,KAAmB,EACnB,OAAqB;QAErB,IAAI,CAAC,OAAO,EAAE,CAAC;YACb,OAAO,KAAK,CAAC;QACf,CAAC;QAED,qDAAqD;QACrD,IAAI,CAAC,eAAe,CAAC,IAAI,CAAC,IAAI,YAAY,CAAC,KAAK,CAAC,CAAC,CAAC;QAEnD,0CAA0C;QAC1C,IAAI,IAAI,CAAC,eAAe,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YACtC,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,eAAe,CAAC,KAAK,EAAE,OAAO,CAAC,CAAC;YAC1D,IAAI,CAAC,eAAe,GAAG,EAAE,CAAC;YAC1B,OAAO,MAAM,CAAC;QAChB,CAAC;QAED,wCAAwC;QACxC,MAAM,OAAO,GAAG,MAAM,IAAI,CAAC,cAAc,CAAC,IAAI,CAAC,eAAe,EAAE,OAAO,CAAC,CAAC;QACzE,IAAI,CAAC,eAAe,GAAG,EAAE,CAAC;QAC1B,OAAO,OAAO,CAAC,OAAO,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;IACrC,CAAC;IAED,QAAQ;QACN,OAAO;YACL,YAAY,EAAE,IAAI,CAAC,YAAY;YAC/B,gBAAgB,EAAE,IAAI,CAAC,YAAY,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC;YACjF,cAAc,EAAE,IAAI,CAAC,YAAY,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC;YACnF,sBAAsB,EAAE,IAAI,CAAC,YAAY,CAAC,MAAM;YAChD,mBAAmB,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;YAC9C,wBAAwB,EAAE,IAAI,CAAC,aAAa;YAC5C,eAAe,EAAE,IAAI,CAAC,eAAe;SACtC,CAAC;IACJ,CAAC;IAED,2EAA2E;IAC3E,wCAAwC;IACxC,2EAA2E;IAE3E;;OAEG;IACK,kBAAkB,CACxB,SAAuB,EACvB,CAAS,EACT,QAAmB;QAEnB,MAAM,OAAO,GAAmB,EAAE,CAAC;QAEnC,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE,CAAC;YAC/B,MAAM,UAAU,GAAG,IAAI,CAAC,gBAAgB,CAAC,SAAS,EAAE,OAAO,CAAC,SAAS,CAAC,CAAC;YACvE,OAAO,CAAC,IAAI,CAAC;gBACX,OAAO;gBACP,UAAU;gBACV,UAAU,EAAE,UAAU,GAAG,OAAO,CAAC,WAAW;gBAC5C,SAAS,EAAE,CAAC;aACb,CAAC,CAAC;QACL,CAAC;QAED,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,UAAU,GAAG,CAAC,CAAC,UAAU,CAAC,CAAC;QACpD,OAAO,OAAO,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;IAC7B,CAAC;IAED;;OAEG;IACK,KAAK,CAAC,eAAe,CAC3B,KAAmB,EACnB,OAAoB;QAEpB,MAAM,MAAM,GAAG,IAAI,YAAY,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;QAC9C,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QAElB,MAAM,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC;QAElC,KAAK,MAAM,MAAM,IAAI,CAAC,QAAQ,EAAE,QAAQ,EAAE,QAAQ,EAAE,QAAQ,CAAC,EAAE,CAAC;YAC9D,MAAM,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;YAChC,MAAM,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;YAEhC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC;gBACX,MAAM,OAAO,GAAG,IAAI,CAAC,kBAAkB,CAAC,KAAK,EAAE,CAAC,EAAE,CAAC,EAAE,IAAI,CAAC,CAAC;gBAC3D,MAAM,KAAK,GAAG,IAAI,CAAC;gBACnB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;oBACvC,MAAM,CAAC,CAAC,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,KAAK,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC;gBAC3D,CAAC;YACH,CAAC;QACH,CAAC;QAED,OAAO,MAAM,CAAC;IAChB,CAAC;IAED,2EAA2E;IAC3E,mBAAmB;IACnB,2EAA2E;IAE3E;;OAEG;IACK,uBAAuB,CAAC,QAAmB;QACjD,IAAI,IAAI,CAAC,UAAU;YAAE,OAAO;QAE5B,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC,GAAG,EAAE;YAChC,IAAI,CAAC,oBAAoB,CAAC,QAAQ,CAAC,CAAC;QACtC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,mCAAmC;IAC7C,CAAC;IAED;;OAEG;IACK,KAAK,CAAC,oBAAoB,CAAC,QAAmB;QACpD,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC;QACvB,IAAI,IAAI,CAAC,YAAY,CAAC,MAAM,KAAK,CAAC;YAAE,OAAO;QAE3C,MAAM,SAAS,GAAG,WAAW,CAAC,GAAG,EAAE,CAAC;QACpC,IAAI,CAAC,iBAAiB,GAAG,IAAI,CAAC;QAE9B,MAAM,KAAK,GAAG,IAAI,CAAC,YAAY,CAAC;QAChC,IAAI,CAAC,YAAY,GAAG,EAAE,CAAC;QAEvB,wCAAwC;QACxC,MAAM,aAAa,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC;QAErD,+BAA+B;QAC/B,KAAK,MAAM,OAAO,IAAI,KAAK,EAAE,CAAC;YAC5B,MAAM,OAAO,GAAG,IAAI,CAAC,qBAAqB,CACxC,OAAO,CAAC,SAAS,EACjB,OAAO,CAAC,CAAC,EACT,QAAQ,EACR,aAAa,CACd,CAAC;YACF,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC3B,CAAC;QAED,IAAI,CAAC,YAAY,EAAE,CAAC;QACpB,IAAI,CAAC,UAAU,IAAI,KAAK,CAAC,MAAM,CAAC;QAChC,IAAI,CAAC,cAAc,IAAI,WAAW,CAAC,GAAG,EAAE,GAAG,SAAS,CAAC;QACrD,IAAI,CAAC,iBAAiB,GAAG,KAAK,CAAC;IACjC,CAAC;IAED;;OAEG;IACK,qBAAqB,CAC3B,KAAmB,EACnB,CAAS,EACT,QAAmB,EACnB,aAA6B;QAE7B,MAAM,YAAY,GAAwC,EAAE,CAAC;QAE7D,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,aAAa,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;YAC9C,MAAM,GAAG,GAAG,IAAI,CAAC,gBAAgB,CAAC,KAAK,EAAE,aAAa,CAAC,CAAC,CAAC,CAAC,CAAC;YAC3D,YAAY,CAAC,IAAI,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,CAAC;QACrC,CAAC;QAED,YAAY,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,GAAG,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC;QAC3C,MAAM,IAAI,GAAG,YAAY,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;QAEtC,OAAO,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;YACpB,OAAO,EAAE,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC;YACxB,UAAU,EAAE,CAAC,CAAC,GAAG;YACjB,UAAU,EAAE,CAAC,CAAC,GAAG,GAAG,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,WAAW;YAC/C,SAAS,EAAE,CAAC;SACb,CAAC,CAAC,CAAC;IACN,CAAC;IAED;;OAEG;IACK,KAAK,CAAC,oBAAoB,CAChC,MAAsB,EACtB,QAAkB;QAElB,MAAM,SAAS,GAAG,WAAW,CAAC,GAAG,EAAE,CAAC;QAEpC,MAAM,KAAK,GAAG,IAAI,CAAC,aAAa,CAAC,KAAK,CAAC,CAAC,EAAE,MAAM,CAAC,SAAS,CAAC,CAAC;QAC5D,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,aAAa,CAAC,KAAK,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;QAEhE,MAAM,gBAAgB,GAAG,MAAM,CAAC,gBAAgB,CAAC;QACjD,MAAM,YAAY,GAAG,MAAM,CAAC,YAAY,CAAC;QAEzC,sBAAsB;QACtB,MAAM,IAAI,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,YAAY,IAAI,gBAAgB,CAAC,CAAC;QACnE,MAAM,GAAG,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,YAAY,GAAG,gBAAgB,CAAC,CAAC;QAEjE,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YACtB,IAAI,CAAC,cAAc,IAAI,WAAW,CAAC,GAAG,EAAE,GAAG,SAAS,CAAC;YACrD,OAAO,CAAC,CAAC;QACX,CAAC;QAED,uBAAuB;QACvB,KAAK,MAAM,UAAU,IAAI,IAAI,EAAE,CAAC;YAC9B,IAAI,CAAC,4BAA4B,CAAC,UAAU,EAAE,YAAY,CAAC,CAAC;QAC9D,CAAC;QAED,yCAAyC;QACzC,KAAK,MAAM,UAAU,IAAI,GAAG,CAAC,KAAK,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC;YACnD,IAAI,CAAC,4BAA4B,CAAC,UAAU,EAAE,CAAC,YAAY,GAAG,GAAG,CAAC,CAAC;QACrE,CAAC;QAED,IAAI,CAAC,aAAa,EAAE,CAAC;QAErB,4CAA4C;QAC5C,IAAI,IAAI,CAAC,aAAa,IAAI,CAAC,EAAE,CAAC;YAC5B,MAAM,IAAI,CAAC,yBAAyB,CAAC,QAAQ,EAAE,MAAM,CAAC,SAAS,CAAC,CAAC;YACjE,IAAI,CAAC,aAAa,GAAG,CAAC,CAAC;QACzB,CAAC;QAED,sBAAsB;QACtB,MAAM,UAAU,GAAG,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,YAAY,EAAE,CAAC,CAAC,GAAG,IAAI,CAAC,MAAM,CAAC;QAC9E,MAAM,WAAW,GAAG,UAAU,GAAG,GAAG,CAAC;QAErC,IAAI,CAAC,eAAe,EAAE,CAAC;QACvB,IAAI,CAAC,cAAc,IAAI,WAAW,CAAC,GAAG,EAAE,GAAG,SAAS,CAAC;QAErD,OAAO,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,WAAW,CAAC,CAAC;IAClC,CAAC;IAED;;OAEG;IACK,4BAA4B,CAClC,UAAsB,EACtB,KAAa;QAEb,IAAI,UAAU,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC;YAAE,OAAO;QAE1C,MAAM,GAAG,GAAG,UAAU,CAAC,MAAM,CAAC;QAC9B,IAAI,QAAQ,GAAG,IAAI,CAAC,oBAAoB,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;QAElD,IAAI,CAAC,QAAQ,EAAE,CAAC;YACd,MAAM,GAAG,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC,MAAM,CAAC;YAClD,QAAQ,GAAG,IAAI,YAAY,CAAC,GAAG,CAAC,CAAC;YACjC,IAAI,CAAC,oBAAoB,CAAC,GAAG,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;QAC/C,CAAC;QAED,8BAA8B;QAC9B,MAAM,MAAM,GAAG,UAAU,CAAC,YAAY,GAAG,KAAK,CAAC;QAC/C,KAAK,MAAM,IAAI,IAAI,UAAU,CAAC,KAAK,EAAE,CAAC;YACpC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,MAAM,EAAE,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;gBAC3E,QAAQ,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,GAAG,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC;YAC3D,CAAC;QACH,CAAC;IACH,CAAC;IAED;;OAEG;IACK,KAAK,CAAC,yBAAyB,CACrC,QAAkB,EAClB,SAAiB;QAEjB,KAAK,MAAM,CAAC,GAAG,EAAE,QAAQ,CAAC,IAAI,IAAI,CAAC,oBAAoB,EAAE,CAAC;YACxD,qBAAqB;YACrB,MAAM,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;YAChE,IAAI,IAAI,GAAG,CAAC,EAAE,CAAC;gBACb,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;oBACzC,QAAQ,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC;gBACtB,CAAC;YACH,CAAC;YAED,oBAAoB;YACpB,MAAM,MAAM,GAAG,QAAQ,CAAC,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;YACxC,MAAM,KAAK,GAAG,QAAQ,CAAC,KAAK,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;YAEtC,IAAI,MAAM,IAAI,KAAK,EAAE,CAAC;gBACpB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;oBACzC,MAAM,OAAO,GAAG,SAAS,GAAG,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;oBACjE,QAAQ,CAAC,CAAC,CAAC,IAAI,OAAO,CAAC;gBACzB,CAAC;YACH,CAAC;YAED,uCAAuC;YACvC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;QACnB,CAAC;IACH,CAAC;IAED;;OAEG;IACK,KAAK,CAAC,cAAc,CAC1B,MAAsB,EACtB,OAAoB;QAEpB,MAAM,OAAO,GAAmB,EAAE,CAAC;QACnC,MAAM,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC;QAElC,mDAAmD;QACnD,KAAK,MAAM,KAAK,IAAI,MAAM,EAAE,CAAC;YAC3B,MAAM,MAAM,GAAG,IAAI,YAAY,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;YAC9C,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;YAElB,KAAK,MAAM,MAAM,IAAI,CAAC,QAAQ,EAAE,QAAQ,EAAE,QAAQ,EAAE,QAAQ,CAAC,EAAE,CAAC;gBAC9D,MAAM,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;gBAChC,MAAM,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;gBAEhC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC;oBACX,MAAM,OAAO,GAAG,IAAI,CAAC,kBAAkB,CAAC,KAAK,EAAE,CAAC,EAAE,CAAC,EAAE,IAAI,CAAC,CAAC;oBAC3D,MAAM,KAAK,GAAG,IAAI,CAAC;oBACnB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;wBACvC,MAAM,CAAC,CAAC,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,KAAK,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC;oBAC3D,CAAC;gBACH,CAAC;YACH,CAAC;YAED,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QACvB,CAAC;QAED,OAAO,OAAO,CAAC;IACjB,CAAC;CACF"}
|
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Edge Mode Implementation
|
|
3
|
+
*
|
|
4
|
+
* Optimized for resource-constrained environments with:
|
|
5
|
+
* - <5MB memory footprint
|
|
6
|
+
* - Minimal latency (<1ms)
|
|
7
|
+
* - Micro-LoRA (rank-1)
|
|
8
|
+
* - Aggressive pruning
|
|
9
|
+
* - Async updates
|
|
10
|
+
*/
|
|
11
|
+
import type { SONAModeConfig, Trajectory, Pattern, PatternMatch, LoRAWeights, EWCState } from '../types.js';
|
|
12
|
+
import { BaseModeImplementation } from './index.js';
|
|
13
|
+
/**
|
|
14
|
+
* Edge mode for resource-constrained devices
|
|
15
|
+
*/
|
|
16
|
+
export declare class EdgeMode extends BaseModeImplementation {
|
|
17
|
+
readonly mode = "edge";
|
|
18
|
+
private compressedPatterns;
|
|
19
|
+
private quantizedWeights;
|
|
20
|
+
private quantizationScale;
|
|
21
|
+
private pendingUpdates;
|
|
22
|
+
private updateTimer;
|
|
23
|
+
private totalOps;
|
|
24
|
+
private totalTime;
|
|
25
|
+
initialize(): Promise<void>;
|
|
26
|
+
cleanup(): Promise<void>;
|
|
27
|
+
/**
|
|
28
|
+
* Find patterns using compressed embeddings
|
|
29
|
+
*/
|
|
30
|
+
findPatterns(embedding: Float32Array, k: number, patterns: Pattern[]): Promise<PatternMatch[]>;
|
|
31
|
+
/**
|
|
32
|
+
* Lightweight learning with async updates
|
|
33
|
+
*/
|
|
34
|
+
learn(trajectories: Trajectory[], config: SONAModeConfig, ewcState: EWCState): Promise<number>;
|
|
35
|
+
/**
|
|
36
|
+
* Apply quantized LoRA
|
|
37
|
+
*/
|
|
38
|
+
applyLoRA(input: Float32Array, weights?: LoRAWeights): Promise<Float32Array>;
|
|
39
|
+
getStats(): Record<string, number>;
|
|
40
|
+
/**
|
|
41
|
+
* Compress embedding to 8-bit representation
|
|
42
|
+
*/
|
|
43
|
+
private compressEmbedding;
|
|
44
|
+
/**
|
|
45
|
+
* Create compressed pattern representation
|
|
46
|
+
*/
|
|
47
|
+
private createCompressedPattern;
|
|
48
|
+
/**
|
|
49
|
+
* Fast similarity on compressed embeddings
|
|
50
|
+
*/
|
|
51
|
+
private compressedSimilarity;
|
|
52
|
+
/**
|
|
53
|
+
* Get or create quantized weights
|
|
54
|
+
*/
|
|
55
|
+
private getOrQuantize;
|
|
56
|
+
/**
|
|
57
|
+
* Quantize float weights to int8
|
|
58
|
+
*/
|
|
59
|
+
private quantizeWeights;
|
|
60
|
+
/**
|
|
61
|
+
* Apply LoRA with quantized weights
|
|
62
|
+
*/
|
|
63
|
+
private applyQuantizedLoRA;
|
|
64
|
+
/**
|
|
65
|
+
* Queue an async update
|
|
66
|
+
*/
|
|
67
|
+
private queueAsyncUpdate;
|
|
68
|
+
/**
|
|
69
|
+
* Process pending async updates
|
|
70
|
+
*/
|
|
71
|
+
private processAsyncUpdates;
|
|
72
|
+
/**
|
|
73
|
+
* Perform lightweight parameter update
|
|
74
|
+
*/
|
|
75
|
+
private performLightweightUpdate;
|
|
76
|
+
/**
|
|
77
|
+
* Find most similar compressed pattern
|
|
78
|
+
*/
|
|
79
|
+
private findSimilarCompressedPattern;
|
|
80
|
+
/**
|
|
81
|
+
* Estimate memory usage in MB
|
|
82
|
+
*/
|
|
83
|
+
private estimateMemoryUsage;
|
|
84
|
+
}
|
|
85
|
+
//# sourceMappingURL=edge.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"edge.d.ts","sourceRoot":"","sources":["../../src/modes/edge.ts"],"names":[],"mappings":"AAAA;;;;;;;;;GASG;AAEH,OAAO,KAAK,EACV,cAAc,EAEd,UAAU,EACV,OAAO,EACP,YAAY,EACZ,WAAW,EACX,QAAQ,EACT,MAAM,aAAa,CAAC;AACrB,OAAO,EAAE,sBAAsB,EAAE,MAAM,YAAY,CAAC;AAEpD;;GAEG;AACH,qBAAa,QAAS,SAAQ,sBAAsB;IAClD,QAAQ,CAAC,IAAI,UAAU;IAGvB,OAAO,CAAC,kBAAkB,CAA6C;IAGvE,OAAO,CAAC,gBAAgB,CAAqC;IAC7D,OAAO,CAAC,iBAAiB,CAAe;IAGxC,OAAO,CAAC,cAAc,CAAkC;IACxD,OAAO,CAAC,WAAW,CAA8C;IAGjE,OAAO,CAAC,QAAQ,CAAK;IACrB,OAAO,CAAC,SAAS,CAAK;IAEhB,UAAU,IAAI,OAAO,CAAC,IAAI,CAAC;IAO3B,OAAO,IAAI,OAAO,CAAC,IAAI,CAAC;IAU9B;;OAEG;IACG,YAAY,CAChB,SAAS,EAAE,YAAY,EACvB,CAAC,EAAE,MAAM,EACT,QAAQ,EAAE,OAAO,EAAE,GAClB,OAAO,CAAC,YAAY,EAAE,CAAC;IAoC1B;;OAEG;IACG,KAAK,CACT,YAAY,EAAE,UAAU,EAAE,EAC1B,MAAM,EAAE,cAAc,EACtB,QAAQ,EAAE,QAAQ,GACjB,OAAO,CAAC,MAAM,CAAC;IAwBlB;;OAEG;IACG,SAAS,CACb,KAAK,EAAE,YAAY,EACnB,OAAO,CAAC,EAAE,WAAW,GACpB,OAAO,CAAC,YAAY,CAAC;IA6BxB,QAAQ,IAAI,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC;IAclC;;OAEG;IACH,OAAO,CAAC,iBAAiB;IAYzB;;OAEG;IACH,OAAO,CAAC,uBAAuB;IAS/B;;OAEG;IACH,OAAO,CAAC,oBAAoB;IAmB5B;;OAEG;IACH,OAAO,CAAC,aAAa;IASrB;;OAEG;IACH,OAAO,CAAC,eAAe;IAYvB;;OAEG;IACH,OAAO,CAAC,kBAAkB;IA6B1B;;OAEG;IACH,OAAO,CAAC,gBAAgB;IAWxB;;OAEG;YACW,mBAAmB;IAwBjC;;OAEG;YACW,wBAAwB;IAuBtC;;OAEG;IACH,OAAO,CAAC,4BAA4B;IAgBpC;;OAEG;IACH,OAAO,CAAC,mBAAmB;CAkB5B"}
|