@sparkleideas/plugins 3.0.0-alpha.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +401 -0
- package/__tests__/collection-manager.test.ts +332 -0
- package/__tests__/dependency-graph.test.ts +434 -0
- package/__tests__/enhanced-plugin-registry.test.ts +488 -0
- package/__tests__/plugin-registry.test.ts +368 -0
- package/__tests__/ruvector-bridge.test.ts +2429 -0
- package/__tests__/ruvector-integration.test.ts +1602 -0
- package/__tests__/ruvector-migrations.test.ts +1099 -0
- package/__tests__/ruvector-quantization.test.ts +846 -0
- package/__tests__/ruvector-streaming.test.ts +1088 -0
- package/__tests__/sdk.test.ts +325 -0
- package/__tests__/security.test.ts +348 -0
- package/__tests__/utils/ruvector-test-utils.ts +860 -0
- package/examples/plugin-creator/index.ts +636 -0
- package/examples/plugin-creator/plugin-creator.test.ts +312 -0
- package/examples/ruvector/README.md +288 -0
- package/examples/ruvector/attention-patterns.ts +394 -0
- package/examples/ruvector/basic-usage.ts +288 -0
- package/examples/ruvector/docker-compose.yml +75 -0
- package/examples/ruvector/gnn-analysis.ts +501 -0
- package/examples/ruvector/hyperbolic-hierarchies.ts +557 -0
- package/examples/ruvector/init-db.sql +119 -0
- package/examples/ruvector/quantization.ts +680 -0
- package/examples/ruvector/self-learning.ts +447 -0
- package/examples/ruvector/semantic-search.ts +576 -0
- package/examples/ruvector/streaming-large-data.ts +507 -0
- package/examples/ruvector/transactions.ts +594 -0
- package/examples/ruvector-plugins/hook-pattern-library.ts +486 -0
- package/examples/ruvector-plugins/index.ts +79 -0
- package/examples/ruvector-plugins/intent-router.ts +354 -0
- package/examples/ruvector-plugins/mcp-tool-optimizer.ts +424 -0
- package/examples/ruvector-plugins/reasoning-bank.ts +657 -0
- package/examples/ruvector-plugins/ruvector-plugins.test.ts +518 -0
- package/examples/ruvector-plugins/semantic-code-search.ts +498 -0
- package/examples/ruvector-plugins/shared/index.ts +20 -0
- package/examples/ruvector-plugins/shared/vector-utils.ts +257 -0
- package/examples/ruvector-plugins/sona-learning.ts +445 -0
- package/package.json +97 -0
- package/src/collections/collection-manager.ts +661 -0
- package/src/collections/index.ts +56 -0
- package/src/collections/official/index.ts +1040 -0
- package/src/core/base-plugin.ts +416 -0
- package/src/core/plugin-interface.ts +215 -0
- package/src/hooks/index.ts +685 -0
- package/src/index.ts +378 -0
- package/src/integrations/agentic-flow.ts +743 -0
- package/src/integrations/index.ts +88 -0
- package/src/integrations/ruvector/ARCHITECTURE.md +1245 -0
- package/src/integrations/ruvector/attention-advanced.ts +1040 -0
- package/src/integrations/ruvector/attention-executor.ts +782 -0
- package/src/integrations/ruvector/attention-mechanisms.ts +757 -0
- package/src/integrations/ruvector/attention.ts +1063 -0
- package/src/integrations/ruvector/gnn.ts +3050 -0
- package/src/integrations/ruvector/hyperbolic.ts +1948 -0
- package/src/integrations/ruvector/index.ts +394 -0
- package/src/integrations/ruvector/migrations/001_create_extension.sql +135 -0
- package/src/integrations/ruvector/migrations/002_create_vector_tables.sql +259 -0
- package/src/integrations/ruvector/migrations/003_create_indices.sql +328 -0
- package/src/integrations/ruvector/migrations/004_create_functions.sql +598 -0
- package/src/integrations/ruvector/migrations/005_create_attention_functions.sql +654 -0
- package/src/integrations/ruvector/migrations/006_create_gnn_functions.sql +728 -0
- package/src/integrations/ruvector/migrations/007_create_hyperbolic_functions.sql +762 -0
- package/src/integrations/ruvector/migrations/index.ts +35 -0
- package/src/integrations/ruvector/migrations/migrations.ts +647 -0
- package/src/integrations/ruvector/quantization.ts +2036 -0
- package/src/integrations/ruvector/ruvector-bridge.ts +2000 -0
- package/src/integrations/ruvector/self-learning.ts +2376 -0
- package/src/integrations/ruvector/streaming.ts +1737 -0
- package/src/integrations/ruvector/types.ts +1945 -0
- package/src/providers/index.ts +643 -0
- package/src/registry/dependency-graph.ts +568 -0
- package/src/registry/enhanced-plugin-registry.ts +994 -0
- package/src/registry/plugin-registry.ts +604 -0
- package/src/sdk/index.ts +563 -0
- package/src/security/index.ts +594 -0
- package/src/types/index.ts +446 -0
- package/src/workers/index.ts +700 -0
- package/tmp.json +0 -0
- package/tsconfig.json +25 -0
- package/vitest.config.ts +23 -0
|
@@ -0,0 +1,257 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Shared Vector Database Utilities
|
|
3
|
+
*
|
|
4
|
+
* Consolidated implementations for all RuVector plugins.
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
// ============================================================================
|
|
8
|
+
// Interfaces
|
|
9
|
+
// ============================================================================
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* Vector database interface for HNSW operations
|
|
13
|
+
*/
|
|
14
|
+
export interface IVectorDB {
|
|
15
|
+
insert(vector: Float32Array, id: string, metadata?: Record<string, unknown>): string;
|
|
16
|
+
search(query: Float32Array, k: number, filter?: Record<string, unknown>): Array<{
|
|
17
|
+
id: string;
|
|
18
|
+
score: number;
|
|
19
|
+
metadata?: Record<string, unknown>;
|
|
20
|
+
}>;
|
|
21
|
+
get?(id: string): { vector: Float32Array; metadata: Record<string, unknown> } | null;
|
|
22
|
+
delete(id: string): boolean;
|
|
23
|
+
size(): number;
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
/**
|
|
27
|
+
* LoRA engine interface for neural adaptation
|
|
28
|
+
*/
|
|
29
|
+
export interface ILoRAEngine {
|
|
30
|
+
createAdapter(category: string, rank: number): Promise<LoRAAdapter>;
|
|
31
|
+
updateAdapter(adapterId: string, gradient: Float32Array, learningRate: number): Promise<void>;
|
|
32
|
+
applyEWC?(adapterId: string, lambda: number): Promise<void>;
|
|
33
|
+
computeGradient(input: Float32Array, target: Float32Array): Float32Array;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
export interface LoRAAdapter {
|
|
37
|
+
id: string;
|
|
38
|
+
category: string;
|
|
39
|
+
rank: number;
|
|
40
|
+
alpha: number;
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
// ============================================================================
|
|
44
|
+
// Fallback Implementations
|
|
45
|
+
// ============================================================================
|
|
46
|
+
|
|
47
|
+
/**
|
|
48
|
+
* Fallback vector database when @ruvector/wasm is not available.
|
|
49
|
+
* Uses in-memory Map with brute-force cosine similarity search.
|
|
50
|
+
*/
|
|
51
|
+
export class FallbackVectorDB implements IVectorDB {
|
|
52
|
+
private vectors = new Map<string, { vector: Float32Array; metadata: Record<string, unknown> }>();
|
|
53
|
+
|
|
54
|
+
constructor(private dimensions: number) {}
|
|
55
|
+
|
|
56
|
+
insert(vector: Float32Array, id: string, metadata: Record<string, unknown> = {}): string {
|
|
57
|
+
this.vectors.set(id, { vector, metadata });
|
|
58
|
+
return id;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
search(query: Float32Array, k: number): Array<{ id: string; score: number; metadata?: Record<string, unknown> }> {
|
|
62
|
+
const results: Array<{ id: string; score: number; metadata?: Record<string, unknown> }> = [];
|
|
63
|
+
|
|
64
|
+
for (const [id, entry] of this.vectors) {
|
|
65
|
+
const score = cosineSimilarity(query, entry.vector);
|
|
66
|
+
results.push({ id, score, metadata: entry.metadata });
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
return results.sort((a, b) => b.score - a.score).slice(0, k);
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
get(id: string): { vector: Float32Array; metadata: Record<string, unknown> } | null {
|
|
73
|
+
return this.vectors.get(id) ?? null;
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
delete(id: string): boolean {
|
|
77
|
+
return this.vectors.delete(id);
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
size(): number {
|
|
81
|
+
return this.vectors.size;
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
/**
|
|
86
|
+
* Fallback LoRA engine when @ruvector/learning-wasm is not available.
|
|
87
|
+
* Uses simple gradient descent with in-memory weights.
|
|
88
|
+
*/
|
|
89
|
+
export class FallbackLoRAEngine implements ILoRAEngine {
|
|
90
|
+
private adapters = new Map<string, LoRAAdapter>();
|
|
91
|
+
private adapterWeights = new Map<string, Float32Array>();
|
|
92
|
+
private nextId = 1;
|
|
93
|
+
|
|
94
|
+
async createAdapter(category: string, rank: number): Promise<LoRAAdapter> {
|
|
95
|
+
const adapter: LoRAAdapter = {
|
|
96
|
+
id: `adapter-${this.nextId++}`,
|
|
97
|
+
category,
|
|
98
|
+
rank,
|
|
99
|
+
alpha: 16,
|
|
100
|
+
};
|
|
101
|
+
this.adapters.set(adapter.id, adapter);
|
|
102
|
+
this.adapterWeights.set(adapter.id, new Float32Array(rank * 768));
|
|
103
|
+
return adapter;
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
async updateAdapter(adapterId: string, gradient: Float32Array, learningRate: number): Promise<void> {
|
|
107
|
+
const weights = this.adapterWeights.get(adapterId);
|
|
108
|
+
if (weights) {
|
|
109
|
+
const len = Math.min(weights.length, gradient.length);
|
|
110
|
+
for (let i = 0; i < len; i++) {
|
|
111
|
+
weights[i] -= learningRate * gradient[i];
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
async applyEWC(adapterId: string, lambda: number): Promise<void> {
|
|
117
|
+
const weights = this.adapterWeights.get(adapterId);
|
|
118
|
+
if (weights) {
|
|
119
|
+
for (let i = 0; i < weights.length; i++) {
|
|
120
|
+
weights[i] *= 1 - lambda * 0.01;
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
computeGradient(input: Float32Array, target: Float32Array): Float32Array {
|
|
126
|
+
const gradient = new Float32Array(input.length);
|
|
127
|
+
for (let i = 0; i < input.length; i++) {
|
|
128
|
+
gradient[i] = (input[i] - (target[i] || 0)) * 0.01;
|
|
129
|
+
}
|
|
130
|
+
return gradient;
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
// ============================================================================
|
|
135
|
+
// Factory Functions
|
|
136
|
+
// ============================================================================
|
|
137
|
+
|
|
138
|
+
/**
|
|
139
|
+
* Create a vector database - uses @ruvector/wasm in production, fallback otherwise.
|
|
140
|
+
*/
|
|
141
|
+
export async function createVectorDB(dimensions: number): Promise<IVectorDB> {
|
|
142
|
+
try {
|
|
143
|
+
// @ts-expect-error - @ruvector/wasm types may not be available
|
|
144
|
+
const { VectorDB: RuVectorDB } = await import('@ruvector/wasm');
|
|
145
|
+
const db = new RuVectorDB({
|
|
146
|
+
dimensions,
|
|
147
|
+
indexType: 'hnsw',
|
|
148
|
+
metric: 'cosine',
|
|
149
|
+
efConstruction: 200,
|
|
150
|
+
m: 16,
|
|
151
|
+
});
|
|
152
|
+
await db.initialize?.();
|
|
153
|
+
return db as IVectorDB;
|
|
154
|
+
} catch {
|
|
155
|
+
console.warn('[@sparkleideas/plugins] @ruvector/wasm not available, using fallback');
|
|
156
|
+
return new FallbackVectorDB(dimensions);
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
/**
|
|
161
|
+
* Create a LoRA engine - uses @ruvector/learning-wasm in production, fallback otherwise.
|
|
162
|
+
*/
|
|
163
|
+
export async function createLoRAEngine(): Promise<ILoRAEngine> {
|
|
164
|
+
try {
|
|
165
|
+
// @ts-expect-error - @ruvector/learning-wasm types may not be available
|
|
166
|
+
const { LoRAEngine } = await import('@ruvector/learning-wasm');
|
|
167
|
+
const engine = new LoRAEngine({ defaultRank: 8, defaultAlpha: 16 });
|
|
168
|
+
await engine.initialize?.();
|
|
169
|
+
return engine as ILoRAEngine;
|
|
170
|
+
} catch {
|
|
171
|
+
console.warn('[@sparkleideas/plugins] @ruvector/learning-wasm not available, using fallback');
|
|
172
|
+
return new FallbackLoRAEngine();
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
// ============================================================================
|
|
177
|
+
// Utility Functions
|
|
178
|
+
// ============================================================================
|
|
179
|
+
|
|
180
|
+
/**
|
|
181
|
+
* Compute cosine similarity between two vectors.
|
|
182
|
+
* Returns value in range [-1, 1] where 1 = identical.
|
|
183
|
+
*/
|
|
184
|
+
export function cosineSimilarity(a: Float32Array, b: Float32Array): number {
|
|
185
|
+
let dot = 0;
|
|
186
|
+
let normA = 0;
|
|
187
|
+
let normB = 0;
|
|
188
|
+
const len = Math.min(a.length, b.length);
|
|
189
|
+
|
|
190
|
+
for (let i = 0; i < len; i++) {
|
|
191
|
+
dot += a[i] * b[i];
|
|
192
|
+
normA += a[i] * a[i];
|
|
193
|
+
normB += b[i] * b[i];
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
const magnitude = Math.sqrt(normA) * Math.sqrt(normB);
|
|
197
|
+
return magnitude === 0 ? 0 : dot / magnitude;
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
/**
|
|
201
|
+
* Generate a simple hash-based embedding for text.
|
|
202
|
+
* Use for fallback when no embedding model is available.
|
|
203
|
+
*/
|
|
204
|
+
export function generateHashEmbedding(text: string, dimensions: number): Float32Array {
|
|
205
|
+
const embedding = new Float32Array(dimensions);
|
|
206
|
+
const normalized = text.toLowerCase();
|
|
207
|
+
let hash = 0;
|
|
208
|
+
|
|
209
|
+
for (let i = 0; i < normalized.length; i++) {
|
|
210
|
+
hash = ((hash << 5) - hash) + normalized.charCodeAt(i);
|
|
211
|
+
hash = hash & hash;
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
for (let i = 0; i < dimensions; i++) {
|
|
215
|
+
embedding[i] = Math.sin(hash * (i + 1) * 0.001) * 0.5 + 0.5;
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
// Normalize
|
|
219
|
+
let norm = 0;
|
|
220
|
+
for (let i = 0; i < dimensions; i++) {
|
|
221
|
+
norm += embedding[i] * embedding[i];
|
|
222
|
+
}
|
|
223
|
+
norm = Math.sqrt(norm);
|
|
224
|
+
if (norm > 0) {
|
|
225
|
+
for (let i = 0; i < dimensions; i++) {
|
|
226
|
+
embedding[i] /= norm;
|
|
227
|
+
}
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
return embedding;
|
|
231
|
+
}
|
|
232
|
+
|
|
233
|
+
/**
|
|
234
|
+
* Lazy initialization mixin for async-initialized classes.
|
|
235
|
+
*/
|
|
236
|
+
export abstract class LazyInitializable {
|
|
237
|
+
protected initPromise: Promise<void> | null = null;
|
|
238
|
+
protected initialized = false;
|
|
239
|
+
|
|
240
|
+
abstract doInitialize(): Promise<void>;
|
|
241
|
+
|
|
242
|
+
async initialize(): Promise<void> {
|
|
243
|
+
if (this.initialized) return;
|
|
244
|
+
if (this.initPromise) return this.initPromise;
|
|
245
|
+
|
|
246
|
+
this.initPromise = (async () => {
|
|
247
|
+
await this.doInitialize();
|
|
248
|
+
this.initialized = true;
|
|
249
|
+
})();
|
|
250
|
+
|
|
251
|
+
return this.initPromise;
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
protected async ensureInitialized(): Promise<void> {
|
|
255
|
+
await this.initialize();
|
|
256
|
+
}
|
|
257
|
+
}
|
|
@@ -0,0 +1,445 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* SONA Learning Plugin
|
|
3
|
+
*
|
|
4
|
+
* Self-Optimizing Neural Adaptation using @ruvector/learning-wasm.
|
|
5
|
+
* Enables <100μs real-time adaptation through LoRA fine-tuning.
|
|
6
|
+
*
|
|
7
|
+
* Features:
|
|
8
|
+
* - Ultra-fast pattern learning (<100μs)
|
|
9
|
+
* - LoRA adapter management
|
|
10
|
+
* - EWC++ for catastrophic forgetting prevention
|
|
11
|
+
* - Pattern-based behavior optimization
|
|
12
|
+
* - Quality score tracking
|
|
13
|
+
*
|
|
14
|
+
* @example
|
|
15
|
+
* ```typescript
|
|
16
|
+
* import { sonaLearningPlugin } from '@sparkleideas/plugins/examples/ruvector-plugins';
|
|
17
|
+
* await getDefaultRegistry().register(sonaLearningPlugin);
|
|
18
|
+
* ```
|
|
19
|
+
*/
|
|
20
|
+
|
|
21
|
+
import {
|
|
22
|
+
PluginBuilder,
|
|
23
|
+
MCPToolBuilder,
|
|
24
|
+
HookBuilder,
|
|
25
|
+
HookEvent,
|
|
26
|
+
HookPriority,
|
|
27
|
+
Security,
|
|
28
|
+
} from '../../src/index.js';
|
|
29
|
+
|
|
30
|
+
// Import shared vector utilities (consolidated from all plugins)
|
|
31
|
+
import {
|
|
32
|
+
IVectorDB,
|
|
33
|
+
ILoRAEngine,
|
|
34
|
+
LoRAAdapter,
|
|
35
|
+
createVectorDB,
|
|
36
|
+
createLoRAEngine,
|
|
37
|
+
generateHashEmbedding,
|
|
38
|
+
} from './shared/vector-utils.js';
|
|
39
|
+
|
|
40
|
+
// ============================================================================
|
|
41
|
+
// Types
|
|
42
|
+
// ============================================================================
|
|
43
|
+
|
|
44
|
+
export interface LearningPattern {
|
|
45
|
+
id: string;
|
|
46
|
+
category: string;
|
|
47
|
+
trigger: string;
|
|
48
|
+
action: string;
|
|
49
|
+
context: Record<string, unknown>;
|
|
50
|
+
quality: number;
|
|
51
|
+
usageCount: number;
|
|
52
|
+
lastUsed: Date;
|
|
53
|
+
createdAt: Date;
|
|
54
|
+
embedding?: Float32Array;
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
export interface AdaptationResult {
|
|
58
|
+
patternId: string;
|
|
59
|
+
applied: boolean;
|
|
60
|
+
adaptationTime: number; // microseconds
|
|
61
|
+
qualityDelta: number;
|
|
62
|
+
newQuality: number;
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
export interface SONAConfig {
|
|
66
|
+
learningRate: number;
|
|
67
|
+
ewcLambda: number;
|
|
68
|
+
maxPatterns: number;
|
|
69
|
+
qualityThreshold: number;
|
|
70
|
+
adaptationBudget: number; // max microseconds
|
|
71
|
+
loraRank: number;
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
// ============================================================================
|
|
75
|
+
// SONA Learning Core
|
|
76
|
+
// ============================================================================
|
|
77
|
+
|
|
78
|
+
export class SONALearning {
|
|
79
|
+
private loraEngine: ILoRAEngine | null = null;
|
|
80
|
+
private vectorDb: IVectorDB | null = null;
|
|
81
|
+
private patterns = new Map<string, LearningPattern>();
|
|
82
|
+
private adapters = new Map<string, LoRAAdapter>();
|
|
83
|
+
private config: SONAConfig;
|
|
84
|
+
private dimensions = 768;
|
|
85
|
+
private nextId = 1;
|
|
86
|
+
private initPromise: Promise<void> | null = null;
|
|
87
|
+
|
|
88
|
+
constructor(config?: Partial<SONAConfig>) {
|
|
89
|
+
this.config = {
|
|
90
|
+
learningRate: 0.001,
|
|
91
|
+
ewcLambda: 0.1,
|
|
92
|
+
maxPatterns: 10000,
|
|
93
|
+
qualityThreshold: 0.5,
|
|
94
|
+
adaptationBudget: 100,
|
|
95
|
+
loraRank: 8,
|
|
96
|
+
...config,
|
|
97
|
+
};
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
async initialize(): Promise<void> {
|
|
101
|
+
if (this.loraEngine && this.vectorDb) return;
|
|
102
|
+
if (this.initPromise) return this.initPromise;
|
|
103
|
+
|
|
104
|
+
this.initPromise = (async () => {
|
|
105
|
+
this.loraEngine = await createLoRAEngine();
|
|
106
|
+
this.vectorDb = await createVectorDB(this.dimensions);
|
|
107
|
+
})();
|
|
108
|
+
|
|
109
|
+
return this.initPromise;
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
private async ensureInitialized(): Promise<{ lora: ILoRAEngine; db: IVectorDB }> {
|
|
113
|
+
await this.initialize();
|
|
114
|
+
return { lora: this.loraEngine!, db: this.vectorDb! };
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
/**
|
|
118
|
+
* Learn a new pattern (<100μs with @ruvector/learning-wasm).
|
|
119
|
+
*/
|
|
120
|
+
async learn(
|
|
121
|
+
category: string,
|
|
122
|
+
trigger: string,
|
|
123
|
+
action: string,
|
|
124
|
+
context: Record<string, unknown>,
|
|
125
|
+
quality: number
|
|
126
|
+
): Promise<LearningPattern> {
|
|
127
|
+
const { lora, db } = await this.ensureInitialized();
|
|
128
|
+
const startTime = performance.now();
|
|
129
|
+
|
|
130
|
+
const safeCategory = Security.validateString(category, { maxLength: 100 });
|
|
131
|
+
const safeTrigger = Security.validateString(trigger, { maxLength: 1000 });
|
|
132
|
+
const safeAction = Security.validateString(action, { maxLength: 1000 });
|
|
133
|
+
const safeQuality = Security.validateNumber(quality, { min: 0, max: 1 });
|
|
134
|
+
|
|
135
|
+
const id = `pattern-${this.nextId++}`;
|
|
136
|
+
const embedding = this.generatePatternEmbedding(safeTrigger, safeAction, safeCategory);
|
|
137
|
+
|
|
138
|
+
const pattern: LearningPattern = {
|
|
139
|
+
id,
|
|
140
|
+
category: safeCategory,
|
|
141
|
+
trigger: safeTrigger,
|
|
142
|
+
action: safeAction,
|
|
143
|
+
context,
|
|
144
|
+
quality: safeQuality,
|
|
145
|
+
usageCount: 0,
|
|
146
|
+
lastUsed: new Date(),
|
|
147
|
+
createdAt: new Date(),
|
|
148
|
+
embedding,
|
|
149
|
+
};
|
|
150
|
+
|
|
151
|
+
// Get or create LoRA adapter for this category
|
|
152
|
+
let adapter = this.adapters.get(safeCategory);
|
|
153
|
+
if (!adapter) {
|
|
154
|
+
adapter = await lora.createAdapter(safeCategory, this.config.loraRank);
|
|
155
|
+
this.adapters.set(safeCategory, adapter);
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
// Compute and apply gradient with LoRA
|
|
159
|
+
const target = new Float32Array(embedding.length).fill(safeQuality);
|
|
160
|
+
const gradient = lora.computeGradient(embedding, target);
|
|
161
|
+
await lora.updateAdapter(adapter.id, gradient, this.config.learningRate);
|
|
162
|
+
|
|
163
|
+
// Apply EWC++ to prevent catastrophic forgetting
|
|
164
|
+
await lora.applyEWC(adapter.id, this.config.ewcLambda);
|
|
165
|
+
|
|
166
|
+
// Store in vector DB
|
|
167
|
+
db.insert(embedding, id, { category: safeCategory, quality: safeQuality });
|
|
168
|
+
this.patterns.set(id, pattern);
|
|
169
|
+
|
|
170
|
+
// Prune if over limit
|
|
171
|
+
if (this.patterns.size > this.config.maxPatterns) {
|
|
172
|
+
await this.prunePatterns();
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
const adaptationTime = (performance.now() - startTime) * 1000; // microseconds
|
|
176
|
+
console.debug(`[SONA] Learned pattern in ${adaptationTime.toFixed(1)}μs`);
|
|
177
|
+
|
|
178
|
+
return pattern;
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
/**
|
|
182
|
+
* Retrieve patterns matching a trigger.
|
|
183
|
+
*/
|
|
184
|
+
async retrieve(trigger: string, category?: string, k: number = 5): Promise<LearningPattern[]> {
|
|
185
|
+
const { db } = await this.ensureInitialized();
|
|
186
|
+
|
|
187
|
+
const safeTrigger = Security.validateString(trigger, { maxLength: 1000 });
|
|
188
|
+
const queryEmbedding = this.generatePatternEmbedding(safeTrigger, '', category || '');
|
|
189
|
+
const searchResults = db.search(queryEmbedding, k * 2);
|
|
190
|
+
|
|
191
|
+
const results: LearningPattern[] = [];
|
|
192
|
+
for (const result of searchResults) {
|
|
193
|
+
const pattern = this.patterns.get(result.id);
|
|
194
|
+
if (!pattern) continue;
|
|
195
|
+
if (category && pattern.category !== category) continue;
|
|
196
|
+
if (pattern.quality < this.config.qualityThreshold) continue;
|
|
197
|
+
results.push(pattern);
|
|
198
|
+
if (results.length >= k) break;
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
return results;
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
/**
|
|
205
|
+
* Apply a pattern and track adaptation.
|
|
206
|
+
*/
|
|
207
|
+
async apply(patternId: string): Promise<AdaptationResult> {
|
|
208
|
+
const startTime = performance.now();
|
|
209
|
+
const pattern = this.patterns.get(patternId);
|
|
210
|
+
if (!pattern) throw new Error(`Pattern ${patternId} not found`);
|
|
211
|
+
|
|
212
|
+
pattern.usageCount++;
|
|
213
|
+
pattern.lastUsed = new Date();
|
|
214
|
+
|
|
215
|
+
return {
|
|
216
|
+
patternId,
|
|
217
|
+
applied: true,
|
|
218
|
+
adaptationTime: (performance.now() - startTime) * 1000,
|
|
219
|
+
qualityDelta: 0,
|
|
220
|
+
newQuality: pattern.quality,
|
|
221
|
+
};
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
/**
|
|
225
|
+
* Update pattern quality based on outcome.
|
|
226
|
+
*/
|
|
227
|
+
async feedback(patternId: string, success: boolean, qualityDelta?: number): Promise<void> {
|
|
228
|
+
const { lora } = await this.ensureInitialized();
|
|
229
|
+
const pattern = this.patterns.get(patternId);
|
|
230
|
+
if (!pattern) throw new Error(`Pattern ${patternId} not found`);
|
|
231
|
+
|
|
232
|
+
const delta = qualityDelta ?? (success ? 0.05 : -0.1);
|
|
233
|
+
pattern.quality = Math.max(0, Math.min(1, pattern.quality + delta));
|
|
234
|
+
|
|
235
|
+
// Update LoRA adapter with feedback
|
|
236
|
+
const adapter = this.adapters.get(pattern.category);
|
|
237
|
+
if (adapter && pattern.embedding) {
|
|
238
|
+
const target = new Float32Array(pattern.embedding.length).fill(pattern.quality);
|
|
239
|
+
const gradient = lora.computeGradient(pattern.embedding, target);
|
|
240
|
+
await lora.updateAdapter(adapter.id, gradient, this.config.learningRate * 0.1);
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
if (pattern.quality < 0.1) {
|
|
244
|
+
this.patterns.delete(patternId);
|
|
245
|
+
}
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
/**
|
|
249
|
+
* Get learning statistics.
|
|
250
|
+
*/
|
|
251
|
+
getStats(): {
|
|
252
|
+
totalPatterns: number;
|
|
253
|
+
totalAdapters: number;
|
|
254
|
+
byCategory: Record<string, { count: number; avgQuality: number }>;
|
|
255
|
+
avgQuality: number;
|
|
256
|
+
topPatterns: LearningPattern[];
|
|
257
|
+
} {
|
|
258
|
+
const byCategory: Record<string, { count: number; totalQuality: number }> = {};
|
|
259
|
+
let totalQuality = 0;
|
|
260
|
+
|
|
261
|
+
for (const pattern of this.patterns.values()) {
|
|
262
|
+
if (!byCategory[pattern.category]) {
|
|
263
|
+
byCategory[pattern.category] = { count: 0, totalQuality: 0 };
|
|
264
|
+
}
|
|
265
|
+
byCategory[pattern.category].count++;
|
|
266
|
+
byCategory[pattern.category].totalQuality += pattern.quality;
|
|
267
|
+
totalQuality += pattern.quality;
|
|
268
|
+
}
|
|
269
|
+
|
|
270
|
+
const categoryStats: Record<string, { count: number; avgQuality: number }> = {};
|
|
271
|
+
for (const [cat, stats] of Object.entries(byCategory)) {
|
|
272
|
+
categoryStats[cat] = { count: stats.count, avgQuality: stats.count > 0 ? stats.totalQuality / stats.count : 0 };
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
const topPatterns = Array.from(this.patterns.values())
|
|
276
|
+
.sort((a, b) => (b.quality * b.usageCount) - (a.quality * a.usageCount))
|
|
277
|
+
.slice(0, 5);
|
|
278
|
+
|
|
279
|
+
return {
|
|
280
|
+
totalPatterns: this.patterns.size,
|
|
281
|
+
totalAdapters: this.adapters.size,
|
|
282
|
+
byCategory: categoryStats,
|
|
283
|
+
avgQuality: this.patterns.size > 0 ? totalQuality / this.patterns.size : 0,
|
|
284
|
+
topPatterns,
|
|
285
|
+
};
|
|
286
|
+
}
|
|
287
|
+
|
|
288
|
+
/**
|
|
289
|
+
* Export learned patterns.
|
|
290
|
+
*/
|
|
291
|
+
export(): { patterns: LearningPattern[]; config: SONAConfig } {
|
|
292
|
+
return {
|
|
293
|
+
patterns: Array.from(this.patterns.values()).map(p => ({ ...p, embedding: undefined })),
|
|
294
|
+
config: this.config,
|
|
295
|
+
};
|
|
296
|
+
}
|
|
297
|
+
|
|
298
|
+
/**
|
|
299
|
+
* Import patterns.
|
|
300
|
+
*/
|
|
301
|
+
async import(data: { patterns: LearningPattern[]; config?: Partial<SONAConfig> }): Promise<number> {
|
|
302
|
+
if (data.config) this.config = { ...this.config, ...data.config };
|
|
303
|
+
|
|
304
|
+
let imported = 0;
|
|
305
|
+
for (const pattern of data.patterns) {
|
|
306
|
+
const embedding = this.generatePatternEmbedding(pattern.trigger, pattern.action, pattern.category);
|
|
307
|
+
this.patterns.set(pattern.id, { ...pattern, embedding });
|
|
308
|
+
imported++;
|
|
309
|
+
}
|
|
310
|
+
return imported;
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
// =========================================================================
|
|
314
|
+
// Private Helpers
|
|
315
|
+
// =========================================================================
|
|
316
|
+
|
|
317
|
+
private generatePatternEmbedding(trigger: string, action: string, category: string): Float32Array {
|
|
318
|
+
const text = `${category} ${trigger} ${action}`.toLowerCase();
|
|
319
|
+
const embedding = new Float32Array(this.dimensions);
|
|
320
|
+
let hash = 0;
|
|
321
|
+
for (let i = 0; i < text.length; i++) { hash = ((hash << 5) - hash) + text.charCodeAt(i); hash = hash & hash; }
|
|
322
|
+
for (let i = 0; i < this.dimensions; i++) { embedding[i] = Math.sin(hash * (i + 1) * 0.001) * 0.5 + 0.5; }
|
|
323
|
+
let norm = 0;
|
|
324
|
+
for (let i = 0; i < this.dimensions; i++) norm += embedding[i] * embedding[i];
|
|
325
|
+
norm = Math.sqrt(norm);
|
|
326
|
+
for (let i = 0; i < this.dimensions; i++) embedding[i] /= norm;
|
|
327
|
+
return embedding;
|
|
328
|
+
}
|
|
329
|
+
|
|
330
|
+
private async prunePatterns(): Promise<void> {
|
|
331
|
+
const { db } = await this.ensureInitialized();
|
|
332
|
+
const sorted = Array.from(this.patterns.entries()).sort((a, b) => a[1].quality - b[1].quality);
|
|
333
|
+
const toRemove = sorted.slice(0, Math.floor(this.config.maxPatterns * 0.1));
|
|
334
|
+
for (const [id] of toRemove) {
|
|
335
|
+
db.delete(id);
|
|
336
|
+
this.patterns.delete(id);
|
|
337
|
+
}
|
|
338
|
+
}
|
|
339
|
+
}
|
|
340
|
+
|
|
341
|
+
// ============================================================================
|
|
342
|
+
// Plugin Definition
|
|
343
|
+
// ============================================================================
|
|
344
|
+
|
|
345
|
+
let sonaInstance: SONALearning | null = null;
|
|
346
|
+
|
|
347
|
+
async function getSONALearning(): Promise<SONALearning> {
|
|
348
|
+
if (!sonaInstance) {
|
|
349
|
+
sonaInstance = new SONALearning();
|
|
350
|
+
await sonaInstance.initialize();
|
|
351
|
+
}
|
|
352
|
+
return sonaInstance;
|
|
353
|
+
}
|
|
354
|
+
|
|
355
|
+
export const sonaLearningPlugin = new PluginBuilder('sona-learning', '1.0.0')
|
|
356
|
+
.withDescription('Self-Optimizing Neural Adaptation with @ruvector/learning-wasm (<100μs LoRA)')
|
|
357
|
+
.withAuthor('Claude Flow Team')
|
|
358
|
+
.withTags(['learning', 'neural', 'adaptation', 'lora', 'ruvector', 'sona', 'ewc'])
|
|
359
|
+
.withMCPTools([
|
|
360
|
+
new MCPToolBuilder('sona-learn')
|
|
361
|
+
.withDescription('Learn a new pattern (<100μs with LoRA)')
|
|
362
|
+
.addStringParam('category', 'Pattern category', { required: true })
|
|
363
|
+
.addStringParam('trigger', 'What triggered this pattern', { required: true })
|
|
364
|
+
.addStringParam('action', 'What action was taken', { required: true })
|
|
365
|
+
.addStringParam('context', 'JSON context data')
|
|
366
|
+
.addNumberParam('quality', 'Quality score 0-1', { default: 0.7, minimum: 0, maximum: 1 })
|
|
367
|
+
.withHandler(async (params) => {
|
|
368
|
+
try {
|
|
369
|
+
const sona = await getSONALearning();
|
|
370
|
+
const context = params.context ? JSON.parse(params.context as string) : {};
|
|
371
|
+
const pattern = await sona.learn(params.category as string, params.trigger as string, params.action as string, context, params.quality as number);
|
|
372
|
+
return { content: [{ type: 'text', text: `🧠 **Learned:** ${pattern.id}\nCategory: ${pattern.category}\nQuality: ${(pattern.quality * 100).toFixed(1)}%` }] };
|
|
373
|
+
} catch (error) {
|
|
374
|
+
return { content: [{ type: 'text', text: `❌ Error: ${error instanceof Error ? error.message : String(error)}` }], isError: true };
|
|
375
|
+
}
|
|
376
|
+
})
|
|
377
|
+
.build(),
|
|
378
|
+
|
|
379
|
+
new MCPToolBuilder('sona-retrieve')
|
|
380
|
+
.withDescription('Retrieve patterns matching a trigger')
|
|
381
|
+
.addStringParam('trigger', 'Trigger to match', { required: true })
|
|
382
|
+
.addStringParam('category', 'Filter by category')
|
|
383
|
+
.addNumberParam('k', 'Number of patterns', { default: 5 })
|
|
384
|
+
.withHandler(async (params) => {
|
|
385
|
+
try {
|
|
386
|
+
const sona = await getSONALearning();
|
|
387
|
+
const patterns = await sona.retrieve(params.trigger as string, params.category as string | undefined, params.k as number);
|
|
388
|
+
if (patterns.length === 0) return { content: [{ type: 'text', text: '🔍 No matching patterns.' }] };
|
|
389
|
+
const output = patterns.map((p, i) => `${i + 1}. **${p.id}** [${p.category}] (q: ${(p.quality * 100).toFixed(0)}%)\n ${p.action.substring(0, 50)}...`).join('\n\n');
|
|
390
|
+
return { content: [{ type: 'text', text: `🧠 **Found ${patterns.length} patterns:**\n\n${output}` }] };
|
|
391
|
+
} catch (error) {
|
|
392
|
+
return { content: [{ type: 'text', text: `❌ Error: ${error instanceof Error ? error.message : String(error)}` }], isError: true };
|
|
393
|
+
}
|
|
394
|
+
})
|
|
395
|
+
.build(),
|
|
396
|
+
|
|
397
|
+
new MCPToolBuilder('sona-feedback')
|
|
398
|
+
.withDescription('Provide feedback on a pattern')
|
|
399
|
+
.addStringParam('patternId', 'Pattern ID', { required: true })
|
|
400
|
+
.addBooleanParam('success', 'Was successful?', { required: true })
|
|
401
|
+
.withHandler(async (params) => {
|
|
402
|
+
try {
|
|
403
|
+
const sona = await getSONALearning();
|
|
404
|
+
await sona.feedback(params.patternId as string, params.success as boolean);
|
|
405
|
+
return { content: [{ type: 'text', text: `✅ Feedback recorded: ${params.success ? 'Success' : 'Failure'}` }] };
|
|
406
|
+
} catch (error) {
|
|
407
|
+
return { content: [{ type: 'text', text: `❌ Error: ${error instanceof Error ? error.message : String(error)}` }], isError: true };
|
|
408
|
+
}
|
|
409
|
+
})
|
|
410
|
+
.build(),
|
|
411
|
+
|
|
412
|
+
new MCPToolBuilder('sona-stats')
|
|
413
|
+
.withDescription('Get SONA learning statistics')
|
|
414
|
+
.withHandler(async () => {
|
|
415
|
+
const sona = await getSONALearning();
|
|
416
|
+
const stats = sona.getStats();
|
|
417
|
+
return { content: [{ type: 'text', text: `🧠 **SONA Stats:**\n\n**Patterns:** ${stats.totalPatterns}\n**LoRA Adapters:** ${stats.totalAdapters}\n**Avg Quality:** ${(stats.avgQuality * 100).toFixed(1)}%\n**Backend:** @ruvector/learning-wasm` }] };
|
|
418
|
+
})
|
|
419
|
+
.build(),
|
|
420
|
+
])
|
|
421
|
+
.withHooks([
|
|
422
|
+
new HookBuilder(HookEvent.PostTaskComplete)
|
|
423
|
+
.withName('sona-auto-learn')
|
|
424
|
+
.withDescription('Auto-learn from successful completions')
|
|
425
|
+
.withPriority(HookPriority.Low)
|
|
426
|
+
.when((ctx) => (ctx.data as { success?: boolean; category?: string } | undefined)?.success === true)
|
|
427
|
+
.handle(async (ctx) => {
|
|
428
|
+
const data = ctx.data as { category?: string; trigger?: string; action?: string; context?: Record<string, unknown> };
|
|
429
|
+
if (!data.trigger || !data.action) return { success: true };
|
|
430
|
+
try {
|
|
431
|
+
const sona = await getSONALearning();
|
|
432
|
+
await sona.learn(data.category || 'general', data.trigger, data.action, data.context || {}, 0.75);
|
|
433
|
+
} catch { /* silent */ }
|
|
434
|
+
return { success: true };
|
|
435
|
+
})
|
|
436
|
+
.build(),
|
|
437
|
+
])
|
|
438
|
+
.onInitialize(async (ctx) => {
|
|
439
|
+
ctx.logger.info('SONA Learning initializing with @ruvector/learning-wasm...');
|
|
440
|
+
await getSONALearning();
|
|
441
|
+
ctx.logger.info('SONA ready - LoRA adaptation <100μs, EWC++ enabled');
|
|
442
|
+
})
|
|
443
|
+
.build();
|
|
444
|
+
|
|
445
|
+
export default sonaLearningPlugin;
|