@claude-flow/cli 3.0.0-alpha.174 → 3.0.0-alpha.176

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,146 @@
1
+ /**
2
+ * Registry API Client
3
+ * Secure integration with Claude Flow Cloud Functions
4
+ *
5
+ * Security:
6
+ * - HTTPS only
7
+ * - No credentials stored in code
8
+ * - Rate limiting respected
9
+ * - Input validation
10
+ */
11
+ const REGISTRY_API_URL = 'https://us-central1-claude-flow.cloudfunctions.net/publish-registry';
12
+ /**
13
+ * Validate item ID to prevent injection
14
+ */
15
+ function validateItemId(itemId) {
16
+ // Only allow alphanumeric, @, /, -, _
17
+ return /^[@a-zA-Z0-9\/_-]+$/.test(itemId) && itemId.length < 100;
18
+ }
19
+ /**
20
+ * Validate rating value
21
+ */
22
+ function validateRating(rating) {
23
+ return Number.isInteger(rating) && rating >= 1 && rating <= 5;
24
+ }
25
+ /**
26
+ * Rate a plugin or model
27
+ */
28
+ export async function rateItem(itemId, rating, itemType = 'plugin', userId) {
29
+ if (!validateItemId(itemId)) {
30
+ throw new Error('Invalid item ID');
31
+ }
32
+ if (!validateRating(rating)) {
33
+ throw new Error('Rating must be integer 1-5');
34
+ }
35
+ const response = await fetch(`${REGISTRY_API_URL}?action=rate`, {
36
+ method: 'POST',
37
+ headers: { 'Content-Type': 'application/json' },
38
+ body: JSON.stringify({
39
+ itemId,
40
+ rating,
41
+ itemType,
42
+ ...(userId && { userId }),
43
+ }),
44
+ signal: AbortSignal.timeout(10000),
45
+ });
46
+ if (!response.ok) {
47
+ const error = await response.text();
48
+ throw new Error(`Rating failed: ${error}`);
49
+ }
50
+ return response.json();
51
+ }
52
+ /**
53
+ * Get ratings for a single item
54
+ */
55
+ export async function getRating(itemId, itemType = 'plugin') {
56
+ if (!validateItemId(itemId)) {
57
+ throw new Error('Invalid item ID');
58
+ }
59
+ const params = new URLSearchParams({
60
+ action: 'get-ratings',
61
+ itemId,
62
+ itemType,
63
+ });
64
+ const response = await fetch(`${REGISTRY_API_URL}?${params}`, {
65
+ signal: AbortSignal.timeout(10000),
66
+ });
67
+ if (!response.ok) {
68
+ throw new Error('Failed to get ratings');
69
+ }
70
+ return response.json();
71
+ }
72
+ /**
73
+ * Get ratings for multiple items (batch)
74
+ */
75
+ export async function getBulkRatings(itemIds, itemType = 'plugin') {
76
+ // Validate all IDs
77
+ for (const id of itemIds) {
78
+ if (!validateItemId(id)) {
79
+ throw new Error(`Invalid item ID: ${id}`);
80
+ }
81
+ }
82
+ // Limit batch size
83
+ const limitedIds = itemIds.slice(0, 50);
84
+ const response = await fetch(`${REGISTRY_API_URL}?action=bulk-ratings`, {
85
+ method: 'POST',
86
+ headers: { 'Content-Type': 'application/json' },
87
+ body: JSON.stringify({
88
+ itemIds: limitedIds,
89
+ itemType,
90
+ }),
91
+ signal: AbortSignal.timeout(15000),
92
+ });
93
+ if (!response.ok) {
94
+ throw new Error('Failed to get bulk ratings');
95
+ }
96
+ return response.json();
97
+ }
98
+ /**
99
+ * Get analytics data
100
+ */
101
+ export async function getAnalytics() {
102
+ const response = await fetch(`${REGISTRY_API_URL}?action=analytics`, {
103
+ signal: AbortSignal.timeout(10000),
104
+ });
105
+ if (!response.ok) {
106
+ throw new Error('Failed to get analytics');
107
+ }
108
+ return response.json();
109
+ }
110
+ /**
111
+ * Track a download event
112
+ */
113
+ export async function trackDownload(pluginId) {
114
+ if (!validateItemId(pluginId)) {
115
+ return; // Silently fail for invalid IDs
116
+ }
117
+ try {
118
+ await fetch(`${REGISTRY_API_URL}?action=track-download`, {
119
+ method: 'POST',
120
+ headers: { 'Content-Type': 'application/json' },
121
+ body: JSON.stringify({ pluginId }),
122
+ signal: AbortSignal.timeout(5000),
123
+ });
124
+ }
125
+ catch {
126
+ // Non-critical, don't throw
127
+ }
128
+ }
129
+ /**
130
+ * Check API health
131
+ */
132
+ export async function checkHealth() {
133
+ try {
134
+ const response = await fetch(`${REGISTRY_API_URL}?action=status`, {
135
+ signal: AbortSignal.timeout(5000),
136
+ });
137
+ return response.json();
138
+ }
139
+ catch (error) {
140
+ return {
141
+ healthy: false,
142
+ error: error instanceof Error ? error.message : 'Unknown error',
143
+ };
144
+ }
145
+ }
146
+ //# sourceMappingURL=registry-api.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"registry-api.js","sourceRoot":"","sources":["../../../src/services/registry-api.ts"],"names":[],"mappings":"AAAA;;;;;;;;;GASG;AAEH,MAAM,gBAAgB,GAAG,qEAAqE,CAAC;AAwB/F;;GAEG;AACH,SAAS,cAAc,CAAC,MAAc;IACpC,sCAAsC;IACtC,OAAO,qBAAqB,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,MAAM,CAAC,MAAM,GAAG,GAAG,CAAC;AACnE,CAAC;AAED;;GAEG;AACH,SAAS,cAAc,CAAC,MAAc;IACpC,OAAO,MAAM,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,MAAM,IAAI,CAAC,IAAI,MAAM,IAAI,CAAC,CAAC;AAChE,CAAC;AAED;;GAEG;AACH,MAAM,CAAC,KAAK,UAAU,QAAQ,CAC5B,MAAc,EACd,MAAc,EACd,WAA+B,QAAQ,EACvC,MAAe;IAEf,IAAI,CAAC,cAAc,CAAC,MAAM,CAAC,EAAE,CAAC;QAC5B,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAC;IACrC,CAAC;IACD,IAAI,CAAC,cAAc,CAAC,MAAM,CAAC,EAAE,CAAC;QAC5B,MAAM,IAAI,KAAK,CAAC,4BAA4B,CAAC,CAAC;IAChD,CAAC;IAED,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,GAAG,gBAAgB,cAAc,EAAE;QAC9D,MAAM,EAAE,MAAM;QACd,OAAO,EAAE,EAAE,cAAc,EAAE,kBAAkB,EAAE;QAC/C,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC;YACnB,MAAM;YACN,MAAM;YACN,QAAQ;YACR,GAAG,CAAC,MAAM,IAAI,EAAE,MAAM,EAAE,CAAC;SAC1B,CAAC;QACF,MAAM,EAAE,WAAW,CAAC,OAAO,CAAC,KAAK,CAAC;KACnC,CAAC,CAAC;IAEH,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE,CAAC;QACjB,MAAM,KAAK,GAAG,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAC;QACpC,MAAM,IAAI,KAAK,CAAC,kBAAkB,KAAK,EAAE,CAAC,CAAC;IAC7C,CAAC;IAED,OAAO,QAAQ,CAAC,IAAI,EAA6B,CAAC;AACpD,CAAC;AAED;;GAEG;AACH,MAAM,CAAC,KAAK,UAAU,SAAS,CAC7B,MAAc,EACd,WAA+B,QAAQ;IAEvC,IAAI,CAAC,cAAc,CAAC,MAAM,CAAC,EAAE,CAAC;QAC5B,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAC;IACrC,CAAC;IAED,MAAM,MAAM,GAAG,IAAI,eAAe,CAAC;QACjC,MAAM,EAAE,aAAa;QACrB,MAAM;QACN,QAAQ;KACT,CAAC,CAAC;IAEH,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,GAAG,gBAAgB,IAAI,MAAM,EAAE,EAAE;QAC5D,MAAM,EAAE,WAAW,CAAC,OAAO,CAAC,KAAK,CAAC;KACnC,CAAC,CAAC;IAEH,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE,CAAC;QACjB,MAAM,IAAI,KAAK,CAAC,uBAAuB,CAAC,CAAC;IAC3C,CAAC;IAED,OAAO,QAAQ,CAAC,IAAI,EAA6B,CAAC;AACpD,CAAC;AAED;;GAEG;AACH,MAAM,CAAC,KAAK,UAAU,cAAc,CAClC,OAAiB,EACjB,WAA+B,QAAQ;IAEvC,mBAAmB;IACnB,KAAK,MAAM,EAAE,IAAI,OAAO,EAAE,CAAC;QACzB,IAAI,CAAC,cAAc,CAAC,EAAE,CAAC,EAAE,CAAC;YACxB,MAAM,IAAI,KAAK,CAAC,oBAAoB,EAAE,EAAE,CAAC,CAAC;QAC5C,CAAC;IACH,CAAC;IAED,mBAAmB;IACnB,MAAM,UAAU,GAAG,OAAO,CAAC,KAAK,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;IAExC,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,GAAG,gBAAgB,sBAAsB,EAAE;QACtE,MAAM,EAAE,MAAM;QACd,OAAO,EAAE,EAAE,cAAc,EAAE,kBAAkB,EAAE;QAC/C,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC;YACnB,OAAO,EAAE,UAAU;YACnB,QAAQ;SACT,CAAC;QACF,MAAM,EAAE,WAAW,CAAC,OAAO,CAAC,KAAK,CAAC;KACnC,CAAC,CAAC;IAEH,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE,CAAC;QACjB,MAAM,IAAI,KAAK,CAAC,4BAA4B,CAAC,CAAC;IAChD,CAAC;IAED,OAAO,QAAQ,CAAC,IAAI,EAAkC,CAAC;AACzD,CAAC;AAED;;GAEG;AACH,MAAM,CAAC,KAAK,UAAU,YAAY;IAChC,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,GAAG,gBAAgB,mBAAmB,EAAE;QACnE,MAAM,EAAE,WAAW,CAAC,OAAO,CAAC,KAAK,CAAC;KACnC,CAAC,CAAC;IAEH,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE,CAAC;QACjB,MAAM,IAAI,KAAK,CAAC,yBAAyB,CAAC,CAAC;IAC7C,CAAC;IAED,OAAO,QAAQ,CAAC,IAAI,EAAgC,CAAC;AACvD,CAAC;AAED;;GAEG;AACH,MAAM,CAAC,KAAK,UAAU,aAAa,CAAC,QAAgB;IAClD,IAAI,CAAC,cAAc,CAAC,QAAQ,CAAC,EAAE,CAAC;QAC9B,OAAO,CAAC,gCAAgC;IAC1C,CAAC;IAED,IAAI,CAAC;QACH,MAAM,KAAK,CAAC,GAAG,gBAAgB,wBAAwB,EAAE;YACvD,MAAM,EAAE,MAAM;YACd,OAAO,EAAE,EAAE,cAAc,EAAE,kBAAkB,EAAE;YAC/C,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC,EAAE,QAAQ,EAAE,CAAC;YAClC,MAAM,EAAE,WAAW,CAAC,OAAO,CAAC,IAAI,CAAC;SAClC,CAAC,CAAC;IACL,CAAC;IAAC,MAAM,CAAC;QACP,4BAA4B;IAC9B,CAAC;AACH,CAAC;AAED;;GAEG;AACH,MAAM,CAAC,KAAK,UAAU,WAAW;IAK/B,IAAI,CAAC;QACH,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,GAAG,gBAAgB,gBAAgB,EAAE;YAChE,MAAM,EAAE,WAAW,CAAC,OAAO,CAAC,IAAI,CAAC;SAClC,CAAC,CAAC;QACH,OAAO,QAAQ,CAAC,IAAI,EAAuE,CAAC;IAC9F,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,OAAO;YACL,OAAO,EAAE,KAAK;YACd,KAAK,EAAE,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,eAAe;SAChE,CAAC;IACJ,CAAC;AACH,CAAC"}
@@ -0,0 +1,170 @@
1
+ /**
2
+ * RuVector Training Service
3
+ * Real WASM-accelerated neural training using @ruvector packages
4
+ *
5
+ * Features:
6
+ * - MicroLoRA: <100µs adaptation with rank-2 LoRA
7
+ * - Flash Attention: 2.49x-7.47x speedup
8
+ * - Trajectory Buffer: Learning from success/failure
9
+ * - Contrastive Learning: InfoNCE loss
10
+ *
11
+ * Created with ❤️ by ruv.io
12
+ */
13
+ import type { BenchmarkResult } from '@ruvector/attention';
14
+ export interface TrainingConfig {
15
+ dim?: number;
16
+ learningRate?: number;
17
+ alpha?: number;
18
+ trajectoryCapacity?: number;
19
+ useFlashAttention?: boolean;
20
+ useMoE?: boolean;
21
+ useHyperbolic?: boolean;
22
+ totalSteps?: number;
23
+ warmupSteps?: number;
24
+ }
25
+ export interface TrainingResult {
26
+ success: boolean;
27
+ adaptationCount: bigint;
28
+ forwardCount: bigint;
29
+ deltaNorm: number;
30
+ trajectoryStats?: {
31
+ successRate: number;
32
+ meanImprovement: number;
33
+ bestImprovement: number;
34
+ totalCount: bigint;
35
+ };
36
+ benchmark?: BenchmarkResult[];
37
+ }
38
+ /**
39
+ * Initialize the RuVector training system
40
+ */
41
+ export declare function initializeTraining(config?: TrainingConfig): Promise<{
42
+ success: boolean;
43
+ features: string[];
44
+ error?: string;
45
+ }>;
46
+ /**
47
+ * Operator types for scoped LoRA (0-16)
48
+ */
49
+ export declare const OperatorType: {
50
+ readonly GENERAL: 0;
51
+ readonly ATTENTION: 1;
52
+ readonly MLP: 2;
53
+ readonly EMBEDDING: 3;
54
+ readonly NORMALIZATION: 4;
55
+ readonly PROJECTION: 5;
56
+ readonly POOLING: 6;
57
+ readonly CONVOLUTION: 7;
58
+ readonly RECURRENT: 8;
59
+ readonly ROUTING: 9;
60
+ readonly MEMORY: 10;
61
+ readonly REASONING: 11;
62
+ readonly COORDINATION: 12;
63
+ readonly OPTIMIZATION: 13;
64
+ readonly SECURITY: 14;
65
+ readonly TESTING: 15;
66
+ readonly DEBUGGING: 16;
67
+ };
68
+ /**
69
+ * Train a pattern with MicroLoRA
70
+ */
71
+ export declare function trainPattern(embedding: Float32Array, gradient: Float32Array, operatorType?: number): Promise<{
72
+ deltaNorm: number;
73
+ adaptCount: bigint;
74
+ }>;
75
+ /**
76
+ * Forward pass through LoRA
77
+ */
78
+ export declare function forward(input: Float32Array, operatorType?: number): Float32Array;
79
+ /**
80
+ * Reward-based adaptation (reinforcement learning)
81
+ */
82
+ export declare function adaptWithReward(improvement: number, operatorType?: number): void;
83
+ /**
84
+ * Record a learning trajectory
85
+ */
86
+ export declare function recordTrajectory(embedding: Float32Array, operatorType: number, attentionType: number, executionMs: number, baselineMs: number): void;
87
+ /**
88
+ * Get trajectory statistics
89
+ */
90
+ export declare function getTrajectoryStats(): {
91
+ successRate: number;
92
+ meanImprovement: number;
93
+ bestImprovement: number;
94
+ totalCount: bigint;
95
+ highQualityCount: number;
96
+ variance: number;
97
+ } | null;
98
+ /**
99
+ * Compute attention with Flash Attention (2.49x-7.47x faster)
100
+ */
101
+ export declare function computeFlashAttention(query: Float32Array, keys: Float32Array[], values: Float32Array[]): Float32Array;
102
+ /**
103
+ * Compute MoE routing
104
+ */
105
+ export declare function computeMoEAttention(query: Float32Array, keys: Float32Array[], values: Float32Array[]): Float32Array;
106
+ /**
107
+ * Compute hyperbolic attention (for hierarchical patterns)
108
+ */
109
+ export declare function computeHyperbolicAttention(query: Float32Array, keys: Float32Array[], values: Float32Array[]): Float32Array;
110
+ /**
111
+ * Compute contrastive loss for training
112
+ */
113
+ export declare function computeContrastiveLoss(anchor: Float32Array, positives: Float32Array[], negatives: Float32Array[]): {
114
+ loss: number;
115
+ gradient: Float32Array;
116
+ };
117
+ /**
118
+ * Optimizer step
119
+ */
120
+ export declare function optimizerStep(params: Float32Array, gradients: Float32Array): Float32Array;
121
+ /**
122
+ * Get curriculum difficulty for current step
123
+ */
124
+ export declare function getCurriculumDifficulty(step: number): number;
125
+ /**
126
+ * Mine hard negatives for better training
127
+ */
128
+ export declare function mineHardNegatives(anchor: Float32Array, candidates: Float32Array[]): number[];
129
+ /**
130
+ * Benchmark the training system
131
+ */
132
+ export declare function benchmarkTraining(dim?: number, iterations?: number): Promise<BenchmarkResult[]>;
133
+ /**
134
+ * Get training statistics
135
+ */
136
+ export declare function getTrainingStats(): {
137
+ initialized: boolean;
138
+ totalAdaptations: number;
139
+ totalForwards: number;
140
+ microLoraStats?: {
141
+ paramCount: number;
142
+ adaptCount: bigint;
143
+ forwardCount: bigint;
144
+ deltaNorm: number;
145
+ };
146
+ scopedLoraStats?: {
147
+ totalAdaptCount: bigint;
148
+ totalForwardCount: bigint;
149
+ };
150
+ trajectoryStats?: ReturnType<typeof getTrajectoryStats>;
151
+ lastBenchmark?: BenchmarkResult[];
152
+ };
153
+ /**
154
+ * Reset the training system
155
+ */
156
+ export declare function resetTraining(): void;
157
+ /**
158
+ * Export trained weights
159
+ */
160
+ export declare function exportWeights(): {
161
+ dim: number;
162
+ deltaNorm: number;
163
+ adaptCount: bigint;
164
+ trajectoryStats: ReturnType<typeof getTrajectoryStats>;
165
+ } | null;
166
+ /**
167
+ * Cleanup resources
168
+ */
169
+ export declare function cleanup(): void;
170
+ //# sourceMappingURL=ruvector-training.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"ruvector-training.d.ts","sourceRoot":"","sources":["../../../src/services/ruvector-training.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;GAWG;AAQH,OAAO,KAAK,EAQV,eAAe,EAChB,MAAM,qBAAqB,CAAC;AAoB7B,MAAM,WAAW,cAAc;IAC7B,GAAG,CAAC,EAAE,MAAM,CAAC;IACb,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,kBAAkB,CAAC,EAAE,MAAM,CAAC;IAC5B,iBAAiB,CAAC,EAAE,OAAO,CAAC;IAC5B,MAAM,CAAC,EAAE,OAAO,CAAC;IACjB,aAAa,CAAC,EAAE,OAAO,CAAC;IACxB,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,WAAW,CAAC,EAAE,MAAM,CAAC;CACtB;AAED,MAAM,WAAW,cAAc;IAC7B,OAAO,EAAE,OAAO,CAAC;IACjB,eAAe,EAAE,MAAM,CAAC;IACxB,YAAY,EAAE,MAAM,CAAC;IACrB,SAAS,EAAE,MAAM,CAAC;IAClB,eAAe,CAAC,EAAE;QAChB,WAAW,EAAE,MAAM,CAAC;QACpB,eAAe,EAAE,MAAM,CAAC;QACxB,eAAe,EAAE,MAAM,CAAC;QACxB,UAAU,EAAE,MAAM,CAAC;KACpB,CAAC;IACF,SAAS,CAAC,EAAE,eAAe,EAAE,CAAC;CAC/B;AAED;;GAEG;AACH,wBAAsB,kBAAkB,CAAC,MAAM,GAAE,cAAmB,GAAG,OAAO,CAAC;IAC7E,OAAO,EAAE,OAAO,CAAC;IACjB,QAAQ,EAAE,MAAM,EAAE,CAAC;IACnB,KAAK,CAAC,EAAE,MAAM,CAAC;CAChB,CAAC,CAsFD;AAED;;GAEG;AACH,eAAO,MAAM,YAAY;;;;;;;;;;;;;;;;;;CAkBf,CAAC;AAEX;;GAEG;AACH,wBAAsB,YAAY,CAChC,SAAS,EAAE,YAAY,EACvB,QAAQ,EAAE,YAAY,EACtB,YAAY,CAAC,EAAE,MAAM,GACpB,OAAO,CAAC;IAAE,SAAS,EAAE,MAAM,CAAC;IAAC,UAAU,EAAE,MAAM,CAAA;CAAE,CAAC,CAsBpD;AAED;;GAEG;AACH,wBAAgB,OAAO,CACrB,KAAK,EAAE,YAAY,EACnB,YAAY,CAAC,EAAE,MAAM,GACpB,YAAY,CAYd;AAED;;GAEG;AACH,wBAAgB,eAAe,CAC7B,WAAW,EAAE,MAAM,EACnB,YAAY,CAAC,EAAE,MAAM,GACpB,IAAI,CAYN;AAED;;GAEG;AACH,wBAAgB,gBAAgB,CAC9B,SAAS,EAAE,YAAY,EACvB,YAAY,EAAE,MAAM,EACpB,aAAa,EAAE,MAAM,EACrB,WAAW,EAAE,MAAM,EACnB,UAAU,EAAE,MAAM,GACjB,IAAI,CAYN;AAED;;GAEG;AACH,wBAAgB,kBAAkB,IAAI;IACpC,WAAW,EAAE,MAAM,CAAC;IACpB,eAAe,EAAE,MAAM,CAAC;IACxB,eAAe,EAAE,MAAM,CAAC;IACxB,UAAU,EAAE,MAAM,CAAC;IACnB,gBAAgB,EAAE,MAAM,CAAC;IACzB,QAAQ,EAAE,MAAM,CAAC;CAClB,GAAG,IAAI,CAaP;AAED;;GAEG;AACH,wBAAgB,qBAAqB,CACnC,KAAK,EAAE,YAAY,EACnB,IAAI,EAAE,YAAY,EAAE,EACpB,MAAM,EAAE,YAAY,EAAE,GACrB,YAAY,CAMd;AAED;;GAEG;AACH,wBAAgB,mBAAmB,CACjC,KAAK,EAAE,YAAY,EACnB,IAAI,EAAE,YAAY,EAAE,EACpB,MAAM,EAAE,YAAY,EAAE,GACrB,YAAY,CAMd;AAED;;GAEG;AACH,wBAAgB,0BAA0B,CACxC,KAAK,EAAE,YAAY,EACnB,IAAI,EAAE,YAAY,EAAE,EACpB,MAAM,EAAE,YAAY,EAAE,GACrB,YAAY,CAMd;AAED;;GAEG;AACH,wBAAgB,sBAAsB,CACpC,MAAM,EAAE,YAAY,EACpB,SAAS,EAAE,YAAY,EAAE,EACzB,SAAS,EAAE,YAAY,EAAE,GACxB;IAAE,IAAI,EAAE,MAAM,CAAC;IAAC,QAAQ,EAAE,YAAY,CAAA;CAAE,CAS1C;AAED;;GAEG;AACH,wBAAgB,aAAa,CAC3B,MAAM,EAAE,YAAY,EACpB,SAAS,EAAE,YAAY,GACtB,YAAY,CAMd;AAED;;GAEG;AACH,wBAAgB,uBAAuB,CAAC,IAAI,EAAE,MAAM,GAAG,MAAM,CAM5D;AAED;;GAEG;AACH,wBAAgB,iBAAiB,CAC/B,MAAM,EAAE,YAAY,EACpB,UAAU,EAAE,YAAY,EAAE,GACzB,MAAM,EAAE,CAMV;AAED;;GAEG;AACH,wBAAsB,iBAAiB,CACrC,GAAG,CAAC,EAAE,MAAM,EACZ,UAAU,CAAC,EAAE,MAAM,GAClB,OAAO,CAAC,eAAe,EAAE,CAAC,CAI5B;AAED;;GAEG;AACH,wBAAgB,gBAAgB,IAAI;IAClC,WAAW,EAAE,OAAO,CAAC;IACrB,gBAAgB,EAAE,MAAM,CAAC;IACzB,aAAa,EAAE,MAAM,CAAC;IACtB,cAAc,CAAC,EAAE;QACf,UAAU,EAAE,MAAM,CAAC;QACnB,UAAU,EAAE,MAAM,CAAC;QACnB,YAAY,EAAE,MAAM,CAAC;QACrB,SAAS,EAAE,MAAM,CAAC;KACnB,CAAC;IACF,eAAe,CAAC,EAAE;QAChB,eAAe,EAAE,MAAM,CAAC;QACxB,iBAAiB,EAAE,MAAM,CAAC;KAC3B,CAAC;IACF,eAAe,CAAC,EAAE,UAAU,CAAC,OAAO,kBAAkB,CAAC,CAAC;IACxD,aAAa,CAAC,EAAE,eAAe,EAAE,CAAC;CACnC,CAgCA;AAED;;GAEG;AACH,wBAAgB,aAAa,IAAI,IAAI,CAOpC;AAED;;GAEG;AACH,wBAAgB,aAAa,IAAI;IAC/B,GAAG,EAAE,MAAM,CAAC;IACZ,SAAS,EAAE,MAAM,CAAC;IAClB,UAAU,EAAE,MAAM,CAAC;IACnB,eAAe,EAAE,UAAU,CAAC,OAAO,kBAAkB,CAAC,CAAC;CACxD,GAAG,IAAI,CAWP;AAED;;GAEG;AACH,wBAAgB,OAAO,IAAI,IAAI,CA0B9B"}
@@ -0,0 +1,357 @@
1
+ /**
2
+ * RuVector Training Service
3
+ * Real WASM-accelerated neural training using @ruvector packages
4
+ *
5
+ * Features:
6
+ * - MicroLoRA: <100µs adaptation with rank-2 LoRA
7
+ * - Flash Attention: 2.49x-7.47x speedup
8
+ * - Trajectory Buffer: Learning from success/failure
9
+ * - Contrastive Learning: InfoNCE loss
10
+ *
11
+ * Created with ❤️ by ruv.io
12
+ */
13
+ // Lazy-loaded WASM modules
14
+ let microLoRA = null;
15
+ let scopedLoRA = null;
16
+ let trajectoryBuffer = null;
17
+ let flashAttention = null;
18
+ let moeAttention = null;
19
+ let hyperbolicAttention = null;
20
+ let optimizer = null;
21
+ let contrastiveLoss = null;
22
+ let curriculum = null;
23
+ let hardMiner = null;
24
+ // Training state
25
+ let initialized = false;
26
+ let totalAdaptations = 0;
27
+ let totalForwards = 0;
28
+ let lastBenchmark = null;
29
+ /**
30
+ * Initialize the RuVector training system
31
+ */
32
+ export async function initializeTraining(config = {}) {
33
+ const features = [];
34
+ const dim = Math.min(config.dim || 256, 256); // Max 256 for WASM
35
+ const lr = config.learningRate || 0.01;
36
+ const alpha = config.alpha || 0.1;
37
+ try {
38
+ // Initialize MicroLoRA with direct WASM loading (Node.js compatible)
39
+ const fs = await import('fs');
40
+ const { createRequire } = await import('module');
41
+ const require = createRequire(import.meta.url);
42
+ // Load WASM file directly instead of using fetch
43
+ const wasmPath = require.resolve('@ruvector/learning-wasm/ruvector_learning_wasm_bg.wasm');
44
+ const wasmBuffer = fs.readFileSync(wasmPath);
45
+ const learningWasm = await import('@ruvector/learning-wasm');
46
+ learningWasm.initSync({ module: wasmBuffer });
47
+ microLoRA = new learningWasm.WasmMicroLoRA(dim, alpha, lr);
48
+ features.push(`MicroLoRA (${dim}-dim, <1μs adaptation)`);
49
+ // Initialize ScopedLoRA for per-operator learning
50
+ scopedLoRA = new learningWasm.WasmScopedLoRA(dim, alpha, lr);
51
+ scopedLoRA.set_category_fallback(true);
52
+ features.push('ScopedLoRA (17 operators)');
53
+ // Initialize trajectory buffer
54
+ trajectoryBuffer = new learningWasm.WasmTrajectoryBuffer(config.trajectoryCapacity || 10000, dim);
55
+ features.push('TrajectoryBuffer');
56
+ // Initialize attention mechanisms
57
+ const attention = await import('@ruvector/attention');
58
+ if (config.useFlashAttention !== false) {
59
+ flashAttention = new attention.FlashAttention(dim, 64);
60
+ features.push('FlashAttention');
61
+ }
62
+ if (config.useMoE) {
63
+ moeAttention = attention.MoEAttention.simple(dim, 8, 2);
64
+ features.push('MoE (8 experts, top-2)');
65
+ }
66
+ if (config.useHyperbolic) {
67
+ hyperbolicAttention = new attention.HyperbolicAttention(dim, 1.0);
68
+ features.push('HyperbolicAttention');
69
+ }
70
+ // Initialize optimizer and loss
71
+ optimizer = new attention.AdamWOptimizer(lr, 0.9, 0.999, 1e-8, 0.01);
72
+ features.push('AdamW Optimizer');
73
+ contrastiveLoss = new attention.InfoNceLoss(0.07);
74
+ features.push('InfoNCE Loss');
75
+ // Curriculum scheduler
76
+ if (config.totalSteps) {
77
+ curriculum = new attention.CurriculumScheduler(config.totalSteps, config.warmupSteps || Math.floor(config.totalSteps * 0.1));
78
+ features.push('Curriculum Learning');
79
+ }
80
+ // Hard negative mining - use string for MiningStrategy enum due to NAPI binding quirk
81
+ try {
82
+ // @ts-expect-error - MiningStrategy enum binding expects string not enum value
83
+ hardMiner = new attention.HardNegativeMiner(5, 'semi_hard');
84
+ features.push('Hard Negative Mining');
85
+ }
86
+ catch {
87
+ // Mining not available, continue without it
88
+ }
89
+ initialized = true;
90
+ return { success: true, features };
91
+ }
92
+ catch (error) {
93
+ return {
94
+ success: false,
95
+ features,
96
+ error: error instanceof Error ? error.message : String(error),
97
+ };
98
+ }
99
+ }
100
+ /**
101
+ * Operator types for scoped LoRA (0-16)
102
+ */
103
+ export const OperatorType = {
104
+ GENERAL: 0,
105
+ ATTENTION: 1,
106
+ MLP: 2,
107
+ EMBEDDING: 3,
108
+ NORMALIZATION: 4,
109
+ PROJECTION: 5,
110
+ POOLING: 6,
111
+ CONVOLUTION: 7,
112
+ RECURRENT: 8,
113
+ ROUTING: 9,
114
+ MEMORY: 10,
115
+ REASONING: 11,
116
+ COORDINATION: 12,
117
+ OPTIMIZATION: 13,
118
+ SECURITY: 14,
119
+ TESTING: 15,
120
+ DEBUGGING: 16,
121
+ };
122
+ /**
123
+ * Train a pattern with MicroLoRA
124
+ */
125
+ export async function trainPattern(embedding, gradient, operatorType) {
126
+ if (!initialized || !microLoRA) {
127
+ throw new Error('Training system not initialized');
128
+ }
129
+ // Use scoped LoRA if operator type specified
130
+ if (operatorType !== undefined && scopedLoRA) {
131
+ scopedLoRA.adapt_array(operatorType, gradient);
132
+ return {
133
+ deltaNorm: scopedLoRA.delta_norm(operatorType),
134
+ adaptCount: scopedLoRA.adapt_count(operatorType),
135
+ };
136
+ }
137
+ // Standard MicroLoRA adaptation
138
+ microLoRA.adapt_array(gradient);
139
+ totalAdaptations++;
140
+ return {
141
+ deltaNorm: microLoRA.delta_norm(),
142
+ adaptCount: microLoRA.adapt_count(),
143
+ };
144
+ }
145
+ /**
146
+ * Forward pass through LoRA
147
+ */
148
+ export function forward(input, operatorType) {
149
+ if (!initialized || !microLoRA) {
150
+ throw new Error('Training system not initialized');
151
+ }
152
+ totalForwards++;
153
+ if (operatorType !== undefined && scopedLoRA) {
154
+ return scopedLoRA.forward_array(operatorType, input);
155
+ }
156
+ return microLoRA.forward_array(input);
157
+ }
158
+ /**
159
+ * Reward-based adaptation (reinforcement learning)
160
+ */
161
+ export function adaptWithReward(improvement, operatorType) {
162
+ if (!initialized) {
163
+ throw new Error('Training system not initialized');
164
+ }
165
+ if (operatorType !== undefined && scopedLoRA) {
166
+ scopedLoRA.adapt_with_reward(operatorType, improvement);
167
+ }
168
+ else if (microLoRA) {
169
+ microLoRA.adapt_with_reward(improvement);
170
+ }
171
+ totalAdaptations++;
172
+ }
173
+ /**
174
+ * Record a learning trajectory
175
+ */
176
+ export function recordTrajectory(embedding, operatorType, attentionType, executionMs, baselineMs) {
177
+ if (!trajectoryBuffer) {
178
+ throw new Error('Trajectory buffer not initialized');
179
+ }
180
+ trajectoryBuffer.record(embedding, operatorType, attentionType, executionMs, baselineMs);
181
+ }
182
+ /**
183
+ * Get trajectory statistics
184
+ */
185
+ export function getTrajectoryStats() {
186
+ if (!trajectoryBuffer || trajectoryBuffer.is_empty()) {
187
+ return null;
188
+ }
189
+ return {
190
+ successRate: trajectoryBuffer.success_rate(),
191
+ meanImprovement: trajectoryBuffer.mean_improvement(),
192
+ bestImprovement: trajectoryBuffer.best_improvement(),
193
+ totalCount: trajectoryBuffer.total_count(),
194
+ highQualityCount: trajectoryBuffer.high_quality_count(0.1),
195
+ variance: trajectoryBuffer.variance(),
196
+ };
197
+ }
198
+ /**
199
+ * Compute attention with Flash Attention (2.49x-7.47x faster)
200
+ */
201
+ export function computeFlashAttention(query, keys, values) {
202
+ if (!flashAttention) {
203
+ throw new Error('Flash attention not initialized');
204
+ }
205
+ return flashAttention.computeRaw(query, keys, values);
206
+ }
207
+ /**
208
+ * Compute MoE routing
209
+ */
210
+ export function computeMoEAttention(query, keys, values) {
211
+ if (!moeAttention) {
212
+ throw new Error('MoE attention not initialized');
213
+ }
214
+ return moeAttention.computeRaw(query, keys, values);
215
+ }
216
+ /**
217
+ * Compute hyperbolic attention (for hierarchical patterns)
218
+ */
219
+ export function computeHyperbolicAttention(query, keys, values) {
220
+ if (!hyperbolicAttention) {
221
+ throw new Error('Hyperbolic attention not initialized');
222
+ }
223
+ return hyperbolicAttention.computeRaw(query, keys, values);
224
+ }
225
+ /**
226
+ * Compute contrastive loss for training
227
+ */
228
+ export function computeContrastiveLoss(anchor, positives, negatives) {
229
+ if (!contrastiveLoss) {
230
+ throw new Error('Contrastive loss not initialized');
231
+ }
232
+ const loss = contrastiveLoss.compute(anchor, positives, negatives);
233
+ const gradient = contrastiveLoss.backward(anchor, positives, negatives);
234
+ return { loss, gradient };
235
+ }
236
+ /**
237
+ * Optimizer step
238
+ */
239
+ export function optimizerStep(params, gradients) {
240
+ if (!optimizer) {
241
+ throw new Error('Optimizer not initialized');
242
+ }
243
+ return optimizer.step(params, gradients);
244
+ }
245
+ /**
246
+ * Get curriculum difficulty for current step
247
+ */
248
+ export function getCurriculumDifficulty(step) {
249
+ if (!curriculum) {
250
+ return 1.0; // Full difficulty if no curriculum
251
+ }
252
+ return curriculum.getDifficulty(step);
253
+ }
254
+ /**
255
+ * Mine hard negatives for better training
256
+ */
257
+ export function mineHardNegatives(anchor, candidates) {
258
+ if (!hardMiner) {
259
+ throw new Error('Hard negative miner not initialized');
260
+ }
261
+ return hardMiner.mine(anchor, candidates);
262
+ }
263
+ /**
264
+ * Benchmark the training system
265
+ */
266
+ export async function benchmarkTraining(dim, iterations) {
267
+ const attention = await import('@ruvector/attention');
268
+ lastBenchmark = attention.benchmarkAttention(dim || 256, 100, iterations || 1000);
269
+ return lastBenchmark;
270
+ }
271
+ /**
272
+ * Get training statistics
273
+ */
274
+ export function getTrainingStats() {
275
+ const stats = {
276
+ initialized,
277
+ totalAdaptations,
278
+ totalForwards,
279
+ };
280
+ if (microLoRA) {
281
+ stats.microLoraStats = {
282
+ paramCount: microLoRA.param_count(),
283
+ adaptCount: microLoRA.adapt_count(),
284
+ forwardCount: microLoRA.forward_count(),
285
+ deltaNorm: microLoRA.delta_norm(),
286
+ };
287
+ }
288
+ if (scopedLoRA) {
289
+ stats.scopedLoraStats = {
290
+ totalAdaptCount: scopedLoRA.total_adapt_count(),
291
+ totalForwardCount: scopedLoRA.total_forward_count(),
292
+ };
293
+ }
294
+ if (trajectoryBuffer && !trajectoryBuffer.is_empty()) {
295
+ stats.trajectoryStats = getTrajectoryStats();
296
+ }
297
+ if (lastBenchmark) {
298
+ stats.lastBenchmark = lastBenchmark;
299
+ }
300
+ return stats;
301
+ }
302
+ /**
303
+ * Reset the training system
304
+ */
305
+ export function resetTraining() {
306
+ if (microLoRA)
307
+ microLoRA.reset();
308
+ if (scopedLoRA)
309
+ scopedLoRA.reset_all();
310
+ if (trajectoryBuffer)
311
+ trajectoryBuffer.reset();
312
+ totalAdaptations = 0;
313
+ totalForwards = 0;
314
+ }
315
+ /**
316
+ * Export trained weights
317
+ */
318
+ export function exportWeights() {
319
+ if (!initialized || !microLoRA) {
320
+ return null;
321
+ }
322
+ return {
323
+ dim: microLoRA.dim(),
324
+ deltaNorm: microLoRA.delta_norm(),
325
+ adaptCount: microLoRA.adapt_count(),
326
+ trajectoryStats: getTrajectoryStats(),
327
+ };
328
+ }
329
+ /**
330
+ * Cleanup resources
331
+ */
332
+ export function cleanup() {
333
+ if (microLoRA) {
334
+ microLoRA.free();
335
+ microLoRA = null;
336
+ }
337
+ if (scopedLoRA) {
338
+ scopedLoRA.free();
339
+ scopedLoRA = null;
340
+ }
341
+ if (trajectoryBuffer) {
342
+ trajectoryBuffer.free();
343
+ trajectoryBuffer = null;
344
+ }
345
+ flashAttention = null;
346
+ moeAttention = null;
347
+ hyperbolicAttention = null;
348
+ optimizer = null;
349
+ contrastiveLoss = null;
350
+ curriculum = null;
351
+ hardMiner = null;
352
+ initialized = false;
353
+ totalAdaptations = 0;
354
+ totalForwards = 0;
355
+ lastBenchmark = null;
356
+ }
357
+ //# sourceMappingURL=ruvector-training.js.map