agentic-qe 3.8.10 → 3.8.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (119) hide show
  1. package/.claude/skills/skills-manifest.json +1 -1
  2. package/CHANGELOG.md +40 -0
  3. package/dist/cli/bundle.js +1345 -1003
  4. package/dist/cli/command-registry.js +5 -1
  5. package/dist/cli/commands/pipeline.d.ts +16 -0
  6. package/dist/cli/commands/pipeline.js +314 -0
  7. package/dist/cli/commands/ruvector-commands.js +17 -0
  8. package/dist/cli/commands/token-usage.js +24 -1
  9. package/dist/cli/handlers/heartbeat-handler.d.ts +26 -0
  10. package/dist/cli/handlers/heartbeat-handler.js +382 -0
  11. package/dist/cli/handlers/index.d.ts +2 -0
  12. package/dist/cli/handlers/index.js +2 -0
  13. package/dist/cli/handlers/routing-handler.d.ts +22 -0
  14. package/dist/cli/handlers/routing-handler.js +227 -0
  15. package/dist/cli/index.js +2 -0
  16. package/dist/coordination/deterministic-actions.d.ts +36 -0
  17. package/dist/coordination/deterministic-actions.js +257 -0
  18. package/dist/coordination/workflow-orchestrator.d.ts +18 -1
  19. package/dist/coordination/workflow-orchestrator.js +113 -3
  20. package/dist/coordination/workflow-types.d.ts +19 -1
  21. package/dist/coordination/workflow-types.js +3 -0
  22. package/dist/coordination/yaml-pipeline-loader.d.ts +1 -0
  23. package/dist/coordination/yaml-pipeline-loader.js +34 -0
  24. package/dist/domains/code-intelligence/coordinator-gnn.d.ts +21 -0
  25. package/dist/domains/code-intelligence/coordinator-gnn.js +102 -0
  26. package/dist/domains/contract-testing/coordinator.js +13 -0
  27. package/dist/domains/coverage-analysis/coordinator.js +5 -0
  28. package/dist/domains/defect-intelligence/coordinator.d.ts +1 -0
  29. package/dist/domains/defect-intelligence/coordinator.js +43 -0
  30. package/dist/domains/quality-assessment/coordinator.js +26 -0
  31. package/dist/domains/test-generation/coordinator.js +14 -0
  32. package/dist/integrations/agentic-flow/reasoning-bank/experience-replay.d.ts +11 -0
  33. package/dist/integrations/agentic-flow/reasoning-bank/experience-replay.js +44 -1
  34. package/dist/integrations/rl-suite/algorithms/eprop.d.ts +79 -0
  35. package/dist/integrations/rl-suite/algorithms/eprop.js +284 -0
  36. package/dist/integrations/rl-suite/algorithms/index.d.ts +2 -1
  37. package/dist/integrations/rl-suite/algorithms/index.js +2 -1
  38. package/dist/integrations/rl-suite/index.d.ts +2 -2
  39. package/dist/integrations/rl-suite/index.js +2 -2
  40. package/dist/integrations/rl-suite/interfaces.d.ts +3 -3
  41. package/dist/integrations/rl-suite/interfaces.js +1 -1
  42. package/dist/integrations/rl-suite/orchestrator.d.ts +2 -2
  43. package/dist/integrations/rl-suite/orchestrator.js +3 -2
  44. package/dist/integrations/rl-suite/reward-signals.d.ts +1 -1
  45. package/dist/integrations/rl-suite/reward-signals.js +1 -1
  46. package/dist/integrations/ruvector/coherence-gate-cohomology.d.ts +41 -0
  47. package/dist/integrations/ruvector/coherence-gate-cohomology.js +47 -0
  48. package/dist/integrations/ruvector/coherence-gate-core.d.ts +200 -0
  49. package/dist/integrations/ruvector/coherence-gate-core.js +294 -0
  50. package/dist/integrations/ruvector/coherence-gate-energy.d.ts +136 -0
  51. package/dist/integrations/ruvector/coherence-gate-energy.js +373 -0
  52. package/dist/integrations/ruvector/coherence-gate-vector.d.ts +38 -0
  53. package/dist/integrations/ruvector/coherence-gate-vector.js +76 -0
  54. package/dist/integrations/ruvector/coherence-gate.d.ts +10 -311
  55. package/dist/integrations/ruvector/coherence-gate.js +10 -652
  56. package/dist/integrations/ruvector/cold-tier-trainer.d.ts +103 -0
  57. package/dist/integrations/ruvector/cold-tier-trainer.js +377 -0
  58. package/dist/integrations/ruvector/cusum-detector.d.ts +70 -0
  59. package/dist/integrations/ruvector/cusum-detector.js +142 -0
  60. package/dist/integrations/ruvector/delta-tracker.d.ts +122 -0
  61. package/dist/integrations/ruvector/delta-tracker.js +311 -0
  62. package/dist/integrations/ruvector/domain-transfer.d.ts +79 -1
  63. package/dist/integrations/ruvector/domain-transfer.js +158 -2
  64. package/dist/integrations/ruvector/eprop-learner.d.ts +135 -0
  65. package/dist/integrations/ruvector/eprop-learner.js +351 -0
  66. package/dist/integrations/ruvector/feature-flags.d.ts +177 -0
  67. package/dist/integrations/ruvector/feature-flags.js +145 -0
  68. package/dist/integrations/ruvector/graphmae-encoder.d.ts +88 -0
  69. package/dist/integrations/ruvector/graphmae-encoder.js +360 -0
  70. package/dist/integrations/ruvector/hdc-fingerprint.d.ts +127 -0
  71. package/dist/integrations/ruvector/hdc-fingerprint.js +222 -0
  72. package/dist/integrations/ruvector/hopfield-memory.d.ts +97 -0
  73. package/dist/integrations/ruvector/hopfield-memory.js +238 -0
  74. package/dist/integrations/ruvector/index.d.ts +13 -2
  75. package/dist/integrations/ruvector/index.js +46 -2
  76. package/dist/integrations/ruvector/mincut-wrapper.d.ts +7 -0
  77. package/dist/integrations/ruvector/mincut-wrapper.js +54 -2
  78. package/dist/integrations/ruvector/reservoir-replay.d.ts +172 -0
  79. package/dist/integrations/ruvector/reservoir-replay.js +335 -0
  80. package/dist/integrations/ruvector/solver-adapter.d.ts +93 -0
  81. package/dist/integrations/ruvector/solver-adapter.js +299 -0
  82. package/dist/integrations/ruvector/sona-persistence.d.ts +33 -0
  83. package/dist/integrations/ruvector/sona-persistence.js +47 -0
  84. package/dist/integrations/ruvector/spectral-sparsifier.d.ts +154 -0
  85. package/dist/integrations/ruvector/spectral-sparsifier.js +389 -0
  86. package/dist/integrations/ruvector/temporal-causality.d.ts +63 -0
  87. package/dist/integrations/ruvector/temporal-causality.js +317 -0
  88. package/dist/learning/pattern-promotion.d.ts +63 -0
  89. package/dist/learning/pattern-promotion.js +235 -1
  90. package/dist/learning/pattern-store.d.ts +2 -0
  91. package/dist/learning/pattern-store.js +187 -1
  92. package/dist/learning/sqlite-persistence.d.ts +2 -0
  93. package/dist/learning/sqlite-persistence.js +4 -0
  94. package/dist/mcp/bundle.js +477 -380
  95. package/dist/mcp/handlers/heartbeat-handlers.d.ts +67 -0
  96. package/dist/mcp/handlers/heartbeat-handlers.js +180 -0
  97. package/dist/mcp/handlers/index.d.ts +2 -1
  98. package/dist/mcp/handlers/index.js +5 -1
  99. package/dist/mcp/handlers/task-handlers.d.ts +28 -0
  100. package/dist/mcp/handlers/task-handlers.js +39 -0
  101. package/dist/mcp/protocol-server.js +45 -1
  102. package/dist/mcp/server.js +41 -1
  103. package/dist/optimization/index.d.ts +2 -0
  104. package/dist/optimization/index.js +1 -0
  105. package/dist/optimization/session-cache.d.ts +80 -0
  106. package/dist/optimization/session-cache.js +227 -0
  107. package/dist/optimization/token-optimizer-service.d.ts +10 -0
  108. package/dist/optimization/token-optimizer-service.js +51 -0
  109. package/dist/routing/economic-routing.d.ts +126 -0
  110. package/dist/routing/economic-routing.js +290 -0
  111. package/dist/routing/index.d.ts +2 -0
  112. package/dist/routing/index.js +2 -0
  113. package/dist/routing/routing-feedback.d.ts +29 -0
  114. package/dist/routing/routing-feedback.js +75 -0
  115. package/dist/shared/utils/index.d.ts +1 -0
  116. package/dist/shared/utils/index.js +1 -0
  117. package/dist/shared/utils/xorshift128.d.ts +24 -0
  118. package/dist/shared/utils/xorshift128.js +50 -0
  119. package/package.json +1 -1
@@ -0,0 +1,103 @@
1
+ /**
2
+ * R6: Cold-Tier GNN Training
3
+ *
4
+ * LRU-cached mini-batch GNN trainer for graphs exceeding a configurable
5
+ * memory budget (hotsetSize). Uses an LRU eviction cache to limit the
6
+ * number of node features held in memory simultaneously.
7
+ *
8
+ * When the graph fits within hotsetSize, trains fully in-memory (fast path).
9
+ * When the graph exceeds hotsetSize, streams nodes through the LRU cache
10
+ * with eviction, producing embeddings only for the hot-set nodes.
11
+ *
12
+ * TypeScript implementation. Future: NAPI upgrade via @ruvector/gnn cold_tier
13
+ * for true block-aligned disk I/O with mmap-backed node storage.
14
+ *
15
+ * @module integrations/ruvector/cold-tier-trainer
16
+ */
17
+ export interface ColdTierConfig {
18
+ /** Maximum nodes kept in memory (hot set). Default: 10000 */
19
+ hotsetSize: number;
20
+ /** Number of training epochs. Default: 10 */
21
+ epochs: number;
22
+ /** Learning rate. Default: 0.01 */
23
+ learningRate: number;
24
+ /** Hidden dimension for GNN layers. Default: 64 */
25
+ hiddenDim: number;
26
+ }
27
+ export interface ColdTierGraph {
28
+ nodeCount: number;
29
+ featureDim: number;
30
+ getNode(id: number): Float32Array | null;
31
+ getNeighbors(id: number): number[];
32
+ nodeIds(): Iterable<number>;
33
+ }
34
+ export interface TrainingResult {
35
+ loss: number;
36
+ lossHistory: number[];
37
+ embeddings: Map<number, Float32Array>;
38
+ peakMemoryNodes: number;
39
+ usedInMemoryMode: boolean;
40
+ }
41
+ export interface CacheStats {
42
+ hits: number;
43
+ misses: number;
44
+ evictions: number;
45
+ currentSize: number;
46
+ }
47
+ /** Simple in-memory graph for testing and small graphs. */
48
+ export declare class InMemoryGraph implements ColdTierGraph {
49
+ private features;
50
+ private adjacency;
51
+ constructor(features: Map<number, Float32Array>, adjacency: Map<number, number[]>);
52
+ get nodeCount(): number;
53
+ get featureDim(): number;
54
+ getNode(id: number): Float32Array | null;
55
+ getNeighbors(id: number): number[];
56
+ nodeIds(): Iterable<number>;
57
+ }
58
+ /**
59
+ * File-backed graph for larger-than-RAM training.
60
+ * Writes node features to a temp file in block-aligned format,
61
+ * reads them back lazily via getNode().
62
+ */
63
+ export declare class FileBackedGraph implements ColdTierGraph {
64
+ private readonly featureFile;
65
+ private readonly nodeOffsets;
66
+ private readonly adjacency;
67
+ private readonly _nodeCount;
68
+ private readonly _featureDim;
69
+ private readonly bytesPerNode;
70
+ constructor(features: Map<number, Float32Array>, adjacency: Map<number, number[]>, storagePath?: string);
71
+ get nodeCount(): number;
72
+ get featureDim(): number;
73
+ getNode(id: number): Float32Array | null;
74
+ getNeighbors(id: number): number[];
75
+ nodeIds(): Iterable<number>;
76
+ /** Clean up temp file */
77
+ dispose(): void;
78
+ }
79
+ /** LRU-cached GNN trainer with mean-aggregation message passing. */
80
+ export declare class ColdTierTrainer {
81
+ private readonly config;
82
+ private cache;
83
+ private weights;
84
+ private bias;
85
+ constructor(config?: Partial<ColdTierConfig>);
86
+ /** Train a GNN on the given graph. Throws if useColdTierGNN flag is off. */
87
+ train(graph: ColdTierGraph): TrainingResult;
88
+ getCacheStats(): CacheStats;
89
+ reset(): void;
90
+ /** Mean-aggregate self + neighbor features into a pre-allocated buffer. */
91
+ private aggregate;
92
+ /** Forward pass: aggregated * W + bias with ReLU. */
93
+ private linearRelu;
94
+ private trainNode;
95
+ /** Forward-only pass for embedding extraction. */
96
+ private forward;
97
+ private fetchNode;
98
+ private ensureCached;
99
+ private shuffle;
100
+ }
101
+ /** Create a ColdTierTrainer with the given configuration. */
102
+ export declare function createColdTierTrainer(config?: Partial<ColdTierConfig>): ColdTierTrainer;
103
+ //# sourceMappingURL=cold-tier-trainer.d.ts.map
@@ -0,0 +1,377 @@
1
+ /**
2
+ * R6: Cold-Tier GNN Training
3
+ *
4
+ * LRU-cached mini-batch GNN trainer for graphs exceeding a configurable
5
+ * memory budget (hotsetSize). Uses an LRU eviction cache to limit the
6
+ * number of node features held in memory simultaneously.
7
+ *
8
+ * When the graph fits within hotsetSize, trains fully in-memory (fast path).
9
+ * When the graph exceeds hotsetSize, streams nodes through the LRU cache
10
+ * with eviction, producing embeddings only for the hot-set nodes.
11
+ *
12
+ * TypeScript implementation. Future: NAPI upgrade via @ruvector/gnn cold_tier
13
+ * for true block-aligned disk I/O with mmap-backed node storage.
14
+ *
15
+ * @module integrations/ruvector/cold-tier-trainer
16
+ */
17
+ import { getRuVectorFeatureFlags } from './feature-flags.js';
18
+ import { Xorshift128 } from '../../shared/utils/xorshift128.js';
19
+ // ============================================================================
20
+ // Constants
21
+ // ============================================================================
22
+ const DEFAULT_CONFIG = {
23
+ hotsetSize: 10000,
24
+ epochs: 10,
25
+ learningRate: 0.01,
26
+ hiddenDim: 64,
27
+ };
28
+ // ============================================================================
29
+ // Internal: LRU Cache
30
+ // ============================================================================
31
+ class LruNodeCache {
32
+ capacity;
33
+ entries = new Map();
34
+ accessCounter = 0;
35
+ _hits = 0;
36
+ _misses = 0;
37
+ _evictions = 0;
38
+ _peakSize = 0;
39
+ constructor(capacity) {
40
+ this.capacity = capacity;
41
+ }
42
+ get(id) {
43
+ const entry = this.entries.get(id);
44
+ if (entry) {
45
+ this._hits++;
46
+ entry.lastAccess = ++this.accessCounter;
47
+ return entry.features;
48
+ }
49
+ this._misses++;
50
+ return null;
51
+ }
52
+ put(id, features) {
53
+ const existing = this.entries.get(id);
54
+ if (existing) {
55
+ existing.features = features;
56
+ existing.lastAccess = ++this.accessCounter;
57
+ return;
58
+ }
59
+ if (this.entries.size >= this.capacity)
60
+ this.evictLru();
61
+ this.entries.set(id, { features, lastAccess: ++this.accessCounter });
62
+ if (this.entries.size > this._peakSize)
63
+ this._peakSize = this.entries.size;
64
+ }
65
+ get peakSize() { return this._peakSize; }
66
+ get stats() {
67
+ return { hits: this._hits, misses: this._misses, evictions: this._evictions, currentSize: this.entries.size };
68
+ }
69
+ cachedIds() {
70
+ return Array.from(this.entries.keys());
71
+ }
72
+ clear() {
73
+ this.entries.clear();
74
+ this.accessCounter = 0;
75
+ this._hits = this._misses = this._evictions = this._peakSize = 0;
76
+ }
77
+ evictLru() {
78
+ let oldestAccess = Infinity;
79
+ let oldestId = -1;
80
+ for (const [id, entry] of this.entries) {
81
+ if (entry.lastAccess < oldestAccess) {
82
+ oldestAccess = entry.lastAccess;
83
+ oldestId = id;
84
+ }
85
+ }
86
+ if (oldestId >= 0) {
87
+ this.entries.delete(oldestId);
88
+ this._evictions++;
89
+ }
90
+ }
91
+ }
92
+ // ============================================================================
93
+ // InMemoryGraph
94
+ // ============================================================================
95
+ /** Simple in-memory graph for testing and small graphs. */
96
+ export class InMemoryGraph {
97
+ features;
98
+ adjacency;
99
+ constructor(features, adjacency) {
100
+ this.features = features;
101
+ this.adjacency = adjacency;
102
+ }
103
+ get nodeCount() { return this.features.size; }
104
+ get featureDim() {
105
+ const first = this.features.values().next();
106
+ return first.done ? 0 : first.value.length;
107
+ }
108
+ getNode(id) { return this.features.get(id) ?? null; }
109
+ getNeighbors(id) { return this.adjacency.get(id) ?? []; }
110
+ *nodeIds() { yield* this.features.keys(); }
111
+ }
112
+ // ============================================================================
113
+ // FileBackedGraph
114
+ // ============================================================================
115
+ /**
116
+ * File-backed graph for larger-than-RAM training.
117
+ * Writes node features to a temp file in block-aligned format,
118
+ * reads them back lazily via getNode().
119
+ */
120
+ export class FileBackedGraph {
121
+ featureFile;
122
+ nodeOffsets;
123
+ adjacency;
124
+ _nodeCount;
125
+ _featureDim;
126
+ bytesPerNode;
127
+ constructor(features, adjacency, storagePath) {
128
+ const fs = require('fs');
129
+ const os = require('os');
130
+ const path = require('path');
131
+ this._nodeCount = features.size;
132
+ this._featureDim = features.values().next().value?.length ?? 0;
133
+ this.bytesPerNode = this._featureDim * 4; // Float32 = 4 bytes
134
+ this.adjacency = adjacency;
135
+ this.nodeOffsets = new Map();
136
+ // Write features to a temp file with sequential block-aligned layout
137
+ this.featureFile = storagePath ?? path.join(os.tmpdir(), `coldtier-${Date.now()}-${Math.random().toString(36).slice(2)}.bin`);
138
+ const buffer = Buffer.alloc(this._nodeCount * this.bytesPerNode);
139
+ let offset = 0;
140
+ for (const [id, feat] of features) {
141
+ this.nodeOffsets.set(id, offset);
142
+ for (let i = 0; i < feat.length; i++) {
143
+ buffer.writeFloatLE(feat[i], offset + i * 4);
144
+ }
145
+ offset += this.bytesPerNode;
146
+ }
147
+ fs.writeFileSync(this.featureFile, buffer);
148
+ }
149
+ get nodeCount() { return this._nodeCount; }
150
+ get featureDim() { return this._featureDim; }
151
+ getNode(id) {
152
+ const offset = this.nodeOffsets.get(id);
153
+ if (offset === undefined)
154
+ return null;
155
+ const fs = require('fs');
156
+ const buf = Buffer.alloc(this.bytesPerNode);
157
+ const fd = fs.openSync(this.featureFile, 'r');
158
+ fs.readSync(fd, buf, 0, this.bytesPerNode, offset);
159
+ fs.closeSync(fd);
160
+ const result = new Float32Array(this._featureDim);
161
+ for (let i = 0; i < this._featureDim; i++) {
162
+ result[i] = buf.readFloatLE(i * 4);
163
+ }
164
+ return result;
165
+ }
166
+ getNeighbors(id) { return this.adjacency.get(id) ?? []; }
167
+ *nodeIds() { yield* this.nodeOffsets.keys(); }
168
+ /** Clean up temp file */
169
+ dispose() {
170
+ try {
171
+ const fs = require('fs');
172
+ if (fs.existsSync(this.featureFile))
173
+ fs.unlinkSync(this.featureFile);
174
+ }
175
+ catch { /* ignore cleanup failures */ }
176
+ }
177
+ }
178
+ // ============================================================================
179
+ // ColdTierTrainer
180
+ // ============================================================================
181
+ /** LRU-cached GNN trainer with mean-aggregation message passing. */
182
+ export class ColdTierTrainer {
183
+ config;
184
+ cache;
185
+ weights = null;
186
+ bias = null;
187
+ constructor(config) {
188
+ this.config = { ...DEFAULT_CONFIG, ...config };
189
+ if (this.config.hotsetSize <= 0) {
190
+ throw new Error(`hotsetSize must be positive, got ${this.config.hotsetSize}`);
191
+ }
192
+ this.cache = new LruNodeCache(this.config.hotsetSize);
193
+ }
194
+ // --------------------------------------------------------------------------
195
+ // Public API
196
+ // --------------------------------------------------------------------------
197
+ /** Train a GNN on the given graph. Throws if useColdTierGNN flag is off. */
198
+ train(graph) {
199
+ if (!getRuVectorFeatureFlags().useColdTierGNN) {
200
+ throw new Error('Cold-Tier GNN training is disabled (useColdTierGNN = false)');
201
+ }
202
+ if (graph.nodeCount === 0) {
203
+ return { loss: 0, lossHistory: [], embeddings: new Map(), peakMemoryNodes: 0, usedInMemoryMode: true };
204
+ }
205
+ this.cache.clear();
206
+ const inMemory = graph.nodeCount <= this.config.hotsetSize;
207
+ const nodeIds = [...graph.nodeIds()];
208
+ const rng = new Xorshift128(42);
209
+ // Xavier/Glorot weight init
210
+ const { featureDim } = graph;
211
+ const { hiddenDim } = this.config;
212
+ const scale = Math.sqrt(2.0 / (featureDim + hiddenDim));
213
+ this.weights = new Float32Array(featureDim * hiddenDim);
214
+ for (let i = 0; i < this.weights.length; i++)
215
+ this.weights[i] = rng.nextGaussian() * scale;
216
+ this.bias = new Float32Array(hiddenDim);
217
+ // Preload all nodes for in-memory mode
218
+ if (inMemory) {
219
+ for (const id of nodeIds) {
220
+ const f = graph.getNode(id);
221
+ if (f)
222
+ this.cache.put(id, f);
223
+ }
224
+ }
225
+ const lossHistory = [];
226
+ for (let epoch = 0; epoch < this.config.epochs; epoch++) {
227
+ this.shuffle(nodeIds, rng);
228
+ let epochLoss = 0;
229
+ let count = 0;
230
+ for (const id of nodeIds) {
231
+ if (!inMemory)
232
+ this.ensureCached(id, graph);
233
+ epochLoss += this.trainNode(id, graph);
234
+ count++;
235
+ }
236
+ lossHistory.push(count > 0 ? epochLoss / count : 0);
237
+ }
238
+ // Collect embeddings
239
+ const embIds = inMemory ? nodeIds : this.cache.cachedIds();
240
+ const embeddings = new Map();
241
+ for (const id of embIds) {
242
+ const emb = this.forward(id, graph);
243
+ if (emb)
244
+ embeddings.set(id, emb);
245
+ }
246
+ return {
247
+ loss: lossHistory.length > 0 ? lossHistory[lossHistory.length - 1] : 0,
248
+ lossHistory,
249
+ embeddings,
250
+ peakMemoryNodes: this.cache.peakSize,
251
+ usedInMemoryMode: inMemory,
252
+ };
253
+ }
254
+ getCacheStats() { return this.cache.stats; }
255
+ reset() {
256
+ this.cache.clear();
257
+ this.weights = null;
258
+ this.bias = null;
259
+ }
260
+ // --------------------------------------------------------------------------
261
+ // Internal: Aggregation (shared by training and forward)
262
+ // --------------------------------------------------------------------------
263
+ /** Mean-aggregate self + neighbor features into a pre-allocated buffer. */
264
+ aggregate(id, graph, out) {
265
+ const self = this.fetchNode(id, graph);
266
+ if (!self)
267
+ return null;
268
+ const dim = graph.featureDim;
269
+ let count = 1;
270
+ for (let d = 0; d < dim; d++)
271
+ out[d] = self[d];
272
+ for (const nid of graph.getNeighbors(id)) {
273
+ const nf = this.fetchNode(nid, graph);
274
+ if (nf) {
275
+ for (let d = 0; d < dim; d++)
276
+ out[d] += nf[d];
277
+ count++;
278
+ }
279
+ }
280
+ for (let d = 0; d < dim; d++)
281
+ out[d] /= count;
282
+ return self;
283
+ }
284
+ /** Forward pass: aggregated * W + bias with ReLU. */
285
+ linearRelu(agg, featureDim, hiddenDim) {
286
+ const h = new Float32Array(hiddenDim);
287
+ for (let j = 0; j < hiddenDim; j++) {
288
+ let s = this.bias[j];
289
+ for (let i = 0; i < featureDim; i++)
290
+ s += agg[i] * this.weights[i * hiddenDim + j];
291
+ h[j] = Math.max(0, s);
292
+ }
293
+ return h;
294
+ }
295
+ // --------------------------------------------------------------------------
296
+ // Internal: Training step
297
+ // --------------------------------------------------------------------------
298
+ trainNode(id, graph) {
299
+ if (!this.weights || !this.bias)
300
+ return 0;
301
+ const { featureDim } = graph;
302
+ const { hiddenDim, learningRate: lr } = this.config;
303
+ const agg = new Float32Array(featureDim);
304
+ const self = this.aggregate(id, graph, agg);
305
+ if (!self)
306
+ return 0;
307
+ const hidden = this.linearRelu(agg, featureDim, hiddenDim);
308
+ // MSE loss: hidden vs truncated input features
309
+ const cmpDim = Math.min(hiddenDim, featureDim);
310
+ let loss = 0;
311
+ for (let d = 0; d < cmpDim; d++) {
312
+ const diff = hidden[d] - self[d];
313
+ loss += diff * diff;
314
+ }
315
+ loss /= cmpDim;
316
+ // SGD weight update
317
+ for (let j = 0; j < cmpDim; j++) {
318
+ const grad = (2 * (hidden[j] - self[j]) / cmpDim) * (hidden[j] > 0 ? 1 : 0);
319
+ this.bias[j] -= lr * grad;
320
+ for (let i = 0; i < featureDim; i++) {
321
+ this.weights[i * hiddenDim + j] -= lr * grad * agg[i];
322
+ }
323
+ }
324
+ return loss;
325
+ }
326
+ /** Forward-only pass for embedding extraction. */
327
+ forward(id, graph) {
328
+ if (!this.weights || !this.bias)
329
+ return null;
330
+ const agg = new Float32Array(graph.featureDim);
331
+ if (!this.aggregate(id, graph, agg))
332
+ return null;
333
+ return this.linearRelu(agg, graph.featureDim, this.config.hiddenDim);
334
+ }
335
+ // --------------------------------------------------------------------------
336
+ // Internal: Cache helpers
337
+ // --------------------------------------------------------------------------
338
+ fetchNode(id, graph) {
339
+ const cached = this.cache.get(id);
340
+ if (cached)
341
+ return cached;
342
+ const features = graph.getNode(id);
343
+ if (features)
344
+ this.cache.put(id, features);
345
+ return features;
346
+ }
347
+ ensureCached(id, graph) {
348
+ if (this.cache.get(id))
349
+ return;
350
+ const f = graph.getNode(id);
351
+ if (f)
352
+ this.cache.put(id, f);
353
+ }
354
+ shuffle(arr, rng) {
355
+ for (let i = arr.length - 1; i > 0; i--) {
356
+ // Use rejection sampling to avoid modulo bias
357
+ const range = i + 1;
358
+ const limit = 0x100000000 - (0x100000000 % range);
359
+ let r;
360
+ do {
361
+ r = rng.next();
362
+ } while (r >= limit);
363
+ const j = r % range;
364
+ const tmp = arr[i];
365
+ arr[i] = arr[j];
366
+ arr[j] = tmp;
367
+ }
368
+ }
369
+ }
370
+ // ============================================================================
371
+ // Factory
372
+ // ============================================================================
373
+ /** Create a ColdTierTrainer with the given configuration. */
374
+ export function createColdTierTrainer(config) {
375
+ return new ColdTierTrainer(config);
376
+ }
377
+ //# sourceMappingURL=cold-tier-trainer.js.map
@@ -0,0 +1,70 @@
1
+ /**
2
+ * CUSUM Drift Detector (R2, Phase 5)
3
+ *
4
+ * Two-sided Cumulative Sum (CUSUM) detector for monitoring coherence
5
+ * gate energy drift. Each gate type maintains independent state.
6
+ *
7
+ * Algorithm:
8
+ * S+(n) = max(0, S+(n-1) + (x_n - mu - slack))
9
+ * S-(n) = max(0, S-(n-1) + (-x_n + mu - slack))
10
+ * Drift detected when S+ > threshold OR S- > threshold
11
+ *
12
+ * mu is the running mean of the first N samples (warmup period).
13
+ *
14
+ * @module integrations/ruvector/cusum-detector
15
+ * @see ADR-087-ruvector-advanced-capabilities.md
16
+ */
17
+ /** Configuration for the CUSUM detector */
18
+ export interface CusumConfig {
19
+ /** Detection threshold (default: 5.0) */
20
+ threshold: number;
21
+ /** Allowable slack/drift (default: 0.5) */
22
+ slack: number;
23
+ /** Reset cumulative sum after alarm (default: true) */
24
+ resetOnAlarm: boolean;
25
+ /** Number of samples for warmup period to estimate mu (default: 20) */
26
+ warmupSamples: number;
27
+ }
28
+ /** Result of a CUSUM update */
29
+ export interface CusumResult {
30
+ /** Whether drift was detected */
31
+ driftDetected: boolean;
32
+ /** Current cumulative sum (max of positive and negative) */
33
+ cumulativeSum: number;
34
+ /** Direction of detected drift */
35
+ direction: 'positive' | 'negative' | 'none';
36
+ /** Number of samples since last reset */
37
+ samplesSinceReset: number;
38
+ }
39
+ /** Gate types that can be independently monitored */
40
+ export type GateType = 'retrieve' | 'write' | 'learn' | 'act';
41
+ /**
42
+ * Two-sided CUSUM detector for drift monitoring.
43
+ *
44
+ * Tracks both positive and negative cumulative sums per gate type.
45
+ * During the warmup period, samples are collected to estimate the
46
+ * running mean (mu). After warmup, deviations from mu are accumulated
47
+ * and compared against the threshold.
48
+ */
49
+ export declare class CusumDetector {
50
+ private readonly config;
51
+ private readonly states;
52
+ constructor(config?: Partial<CusumConfig>);
53
+ /**
54
+ * Update the CUSUM state for a gate type with a new sample value.
55
+ *
56
+ * During warmup, collects samples to estimate mu.
57
+ * After warmup, applies the two-sided CUSUM algorithm.
58
+ */
59
+ update(gateType: GateType, value: number): CusumResult;
60
+ /**
61
+ * Reset the CUSUM state for a specific gate type, or all gates.
62
+ */
63
+ reset(gateType?: GateType): void;
64
+ /**
65
+ * Get the current CUSUM state for a gate type without updating.
66
+ */
67
+ getState(gateType: GateType): CusumResult;
68
+ private getOrCreateState;
69
+ }
70
+ //# sourceMappingURL=cusum-detector.d.ts.map
@@ -0,0 +1,142 @@
1
+ /**
2
+ * CUSUM Drift Detector (R2, Phase 5)
3
+ *
4
+ * Two-sided Cumulative Sum (CUSUM) detector for monitoring coherence
5
+ * gate energy drift. Each gate type maintains independent state.
6
+ *
7
+ * Algorithm:
8
+ * S+(n) = max(0, S+(n-1) + (x_n - mu - slack))
9
+ * S-(n) = max(0, S-(n-1) + (-x_n + mu - slack))
10
+ * Drift detected when S+ > threshold OR S- > threshold
11
+ *
12
+ * mu is the running mean of the first N samples (warmup period).
13
+ *
14
+ * @module integrations/ruvector/cusum-detector
15
+ * @see ADR-087-ruvector-advanced-capabilities.md
16
+ */
17
+ const DEFAULT_CONFIG = {
18
+ threshold: 5.0,
19
+ slack: 0.5,
20
+ resetOnAlarm: true,
21
+ warmupSamples: 20,
22
+ };
23
+ /**
24
+ * Two-sided CUSUM detector for drift monitoring.
25
+ *
26
+ * Tracks both positive and negative cumulative sums per gate type.
27
+ * During the warmup period, samples are collected to estimate the
28
+ * running mean (mu). After warmup, deviations from mu are accumulated
29
+ * and compared against the threshold.
30
+ */
31
+ export class CusumDetector {
32
+ config;
33
+ states = new Map();
34
+ constructor(config = {}) {
35
+ this.config = { ...DEFAULT_CONFIG, ...config };
36
+ }
37
+ /**
38
+ * Update the CUSUM state for a gate type with a new sample value.
39
+ *
40
+ * During warmup, collects samples to estimate mu.
41
+ * After warmup, applies the two-sided CUSUM algorithm.
42
+ */
43
+ update(gateType, value) {
44
+ const state = this.getOrCreateState(gateType);
45
+ state.samplesSinceReset++;
46
+ // Warmup phase: collect samples to estimate mu
47
+ if (state.mu === null) {
48
+ state.warmupSum += value;
49
+ state.warmupCount++;
50
+ if (state.warmupCount >= this.config.warmupSamples) {
51
+ state.mu = state.warmupSum / state.warmupCount;
52
+ }
53
+ return {
54
+ driftDetected: false,
55
+ cumulativeSum: 0,
56
+ direction: 'none',
57
+ samplesSinceReset: state.samplesSinceReset,
58
+ };
59
+ }
60
+ // Two-sided CUSUM update
61
+ state.sPlus = Math.max(0, state.sPlus + (value - state.mu - this.config.slack));
62
+ state.sMinus = Math.max(0, state.sMinus + (-value + state.mu - this.config.slack));
63
+ const cumulativeSum = Math.max(state.sPlus, state.sMinus);
64
+ let driftDetected = false;
65
+ let direction = 'none';
66
+ if (state.sPlus > this.config.threshold) {
67
+ driftDetected = true;
68
+ direction = 'positive';
69
+ }
70
+ else if (state.sMinus > this.config.threshold) {
71
+ driftDetected = true;
72
+ direction = 'negative';
73
+ }
74
+ const result = {
75
+ driftDetected,
76
+ cumulativeSum,
77
+ direction,
78
+ samplesSinceReset: state.samplesSinceReset,
79
+ };
80
+ if (driftDetected && this.config.resetOnAlarm) {
81
+ state.sPlus = 0;
82
+ state.sMinus = 0;
83
+ state.samplesSinceReset = 0;
84
+ }
85
+ return result;
86
+ }
87
+ /**
88
+ * Reset the CUSUM state for a specific gate type, or all gates.
89
+ */
90
+ reset(gateType) {
91
+ if (gateType) {
92
+ this.states.delete(gateType);
93
+ }
94
+ else {
95
+ this.states.clear();
96
+ }
97
+ }
98
+ /**
99
+ * Get the current CUSUM state for a gate type without updating.
100
+ */
101
+ getState(gateType) {
102
+ const state = this.states.get(gateType);
103
+ if (!state) {
104
+ return {
105
+ driftDetected: false,
106
+ cumulativeSum: 0,
107
+ direction: 'none',
108
+ samplesSinceReset: 0,
109
+ };
110
+ }
111
+ const cumulativeSum = Math.max(state.sPlus, state.sMinus);
112
+ let direction = 'none';
113
+ if (state.sPlus > this.config.threshold) {
114
+ direction = 'positive';
115
+ }
116
+ else if (state.sMinus > this.config.threshold) {
117
+ direction = 'negative';
118
+ }
119
+ return {
120
+ driftDetected: state.sPlus > this.config.threshold || state.sMinus > this.config.threshold,
121
+ cumulativeSum,
122
+ direction,
123
+ samplesSinceReset: state.samplesSinceReset,
124
+ };
125
+ }
126
+ getOrCreateState(gateType) {
127
+ let state = this.states.get(gateType);
128
+ if (!state) {
129
+ state = {
130
+ sPlus: 0,
131
+ sMinus: 0,
132
+ samplesSinceReset: 0,
133
+ warmupSum: 0,
134
+ warmupCount: 0,
135
+ mu: null,
136
+ };
137
+ this.states.set(gateType, state);
138
+ }
139
+ return state;
140
+ }
141
+ }
142
+ //# sourceMappingURL=cusum-detector.js.map