claude-flow 3.5.21 → 3.5.23

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (25) hide show
  1. package/.claude/helpers/hook-handler.cjs +4 -2
  2. package/README.md +9 -7
  3. package/package.json +1 -1
  4. package/v3/@claude-flow/cli/README.md +9 -7
  5. package/v3/@claude-flow/cli/dist/src/commands/hooks.js +698 -55
  6. package/v3/@claude-flow/cli/dist/src/commands/init.js +3 -1
  7. package/v3/@claude-flow/cli/dist/src/commands/neural.js +11 -5
  8. package/v3/@claude-flow/cli/dist/src/index.d.ts +1 -1
  9. package/v3/@claude-flow/cli/dist/src/index.js +2 -0
  10. package/v3/@claude-flow/cli/dist/src/mcp-tools/coordination-tools.js +191 -12
  11. package/v3/@claude-flow/cli/dist/src/mcp-tools/hive-mind-tools.js +224 -23
  12. package/v3/@claude-flow/cli/dist/src/mcp-tools/memory-tools.js +1 -0
  13. package/v3/@claude-flow/cli/dist/src/memory/ewc-consolidation.d.ts +24 -0
  14. package/v3/@claude-flow/cli/dist/src/memory/ewc-consolidation.js +59 -0
  15. package/v3/@claude-flow/cli/dist/src/memory/intelligence.d.ts +53 -0
  16. package/v3/@claude-flow/cli/dist/src/memory/intelligence.js +225 -0
  17. package/v3/@claude-flow/cli/dist/src/memory/memory-initializer.d.ts +7 -0
  18. package/v3/@claude-flow/cli/dist/src/memory/memory-initializer.js +27 -1
  19. package/v3/@claude-flow/cli/dist/src/ruvector/index.d.ts +4 -0
  20. package/v3/@claude-flow/cli/dist/src/ruvector/index.js +12 -0
  21. package/v3/@claude-flow/cli/dist/src/services/ruvector-training.d.ts +9 -1
  22. package/v3/@claude-flow/cli/dist/src/services/ruvector-training.js +223 -39
  23. package/v3/@claude-flow/cli/dist/src/services/worker-daemon.d.ts +4 -0
  24. package/v3/@claude-flow/cli/dist/src/services/worker-daemon.js +33 -5
  25. package/v3/@claude-flow/cli/package.json +1 -1
@@ -34,34 +34,224 @@ let totalForwards = 0;
34
34
  let totalSonaLearns = 0;
35
35
  let totalSonaSearches = 0;
36
36
  let lastBenchmark = null;
37
+ // Backend tracking
38
+ let activeBackend = 'js-fallback';
37
39
  /**
38
- * Initialize the RuVector training system
40
+ * Get which backend is active for training
41
+ */
42
+ export function getActiveBackend() {
43
+ return activeBackend;
44
+ }
45
+ /**
46
+ * Pure-JS fallback implementations for when WASM is unavailable.
47
+ * These provide the same API surface with basic linear algebra.
48
+ */
49
+ class JsMicroLoRA {
50
+ _dim;
51
+ _alpha;
52
+ _lr;
53
+ _adaptCount = 0n;
54
+ _forwardCount = 0n;
55
+ _deltaNorm = 0;
56
+ _A; // Low-rank A (rank x dim)
57
+ _B; // Low-rank B (dim x rank)
58
+ RANK = 2;
59
+ constructor(dim, alpha, lr) {
60
+ this._dim = dim;
61
+ this._alpha = alpha;
62
+ this._lr = lr;
63
+ this._A = new Float32Array(this.RANK * dim);
64
+ this._B = new Float32Array(dim * this.RANK);
65
+ // Xavier initialization
66
+ const scale = Math.sqrt(2 / (dim + this.RANK));
67
+ for (let i = 0; i < this._A.length; i++)
68
+ this._A[i] = (Math.random() - 0.5) * scale;
69
+ for (let i = 0; i < this._B.length; i++)
70
+ this._B[i] = (Math.random() - 0.5) * scale;
71
+ }
72
+ adapt_array(gradient) {
73
+ // Simple gradient update on low-rank matrices
74
+ let norm = 0;
75
+ for (let i = 0; i < Math.min(gradient.length, this._A.length); i++) {
76
+ const delta = -this._lr * gradient[i % gradient.length] * this._alpha;
77
+ this._A[i] += delta;
78
+ norm += delta * delta;
79
+ }
80
+ this._deltaNorm = Math.sqrt(norm);
81
+ this._adaptCount++;
82
+ }
83
+ adapt_count() { return this._adaptCount; }
84
+ param_count() { return this._A.length + this._B.length; }
85
+ forward_array(input) {
86
+ const output = new Float32Array(this._dim);
87
+ // y = x + alpha * B @ A @ x (simplified low-rank)
88
+ for (let i = 0; i < this._dim; i++) {
89
+ output[i] = input[i];
90
+ let sum = 0;
91
+ for (let r = 0; r < this.RANK; r++) {
92
+ let dot = 0;
93
+ for (let j = 0; j < this._dim; j++) {
94
+ dot += this._A[r * this._dim + j] * input[j];
95
+ }
96
+ sum += this._B[i * this.RANK + r] * dot;
97
+ }
98
+ output[i] += this._alpha * sum;
99
+ }
100
+ this._forwardCount++;
101
+ return output;
102
+ }
103
+ forward_count() { return this._forwardCount; }
104
+ adapt_with_reward(improvement) {
105
+ const scale = improvement * this._lr * this._alpha;
106
+ let norm = 0;
107
+ for (let i = 0; i < this._A.length; i++) {
108
+ const delta = scale * (Math.random() - 0.5);
109
+ this._A[i] += delta;
110
+ norm += delta * delta;
111
+ }
112
+ this._deltaNorm = Math.sqrt(norm);
113
+ this._adaptCount++;
114
+ }
115
+ delta_norm() { return this._deltaNorm; }
116
+ dim() { return this._dim; }
117
+ reset() {
118
+ this._A.fill(0);
119
+ this._B.fill(0);
120
+ this._adaptCount = 0n;
121
+ this._forwardCount = 0n;
122
+ this._deltaNorm = 0;
123
+ }
124
+ free() { }
125
+ }
126
+ class JsScopedLoRA {
127
+ adapters = new Map();
128
+ _dim;
129
+ _alpha;
130
+ _lr;
131
+ _fallback = false;
132
+ constructor(dim, alpha, lr) {
133
+ this._dim = dim;
134
+ this._alpha = alpha;
135
+ this._lr = lr;
136
+ }
137
+ getAdapter(opType) {
138
+ if (!this.adapters.has(opType)) {
139
+ if (this._fallback && opType > 0 && this.adapters.has(0)) {
140
+ return this.adapters.get(0);
141
+ }
142
+ this.adapters.set(opType, new JsMicroLoRA(this._dim, this._alpha, this._lr));
143
+ }
144
+ return this.adapters.get(opType);
145
+ }
146
+ adapt_array(opType, gradient) { this.getAdapter(opType).adapt_array(gradient); }
147
+ adapt_count(opType) { return this.getAdapter(opType).adapt_count(); }
148
+ forward_array(opType, input) { return this.getAdapter(opType).forward_array(input); }
149
+ forward_count(opType) { return this.getAdapter(opType).forward_count(); }
150
+ adapt_with_reward(opType, improvement) { this.getAdapter(opType).adapt_with_reward(improvement); }
151
+ delta_norm(opType) { return this.getAdapter(opType).delta_norm(); }
152
+ set_category_fallback(enabled) { this._fallback = enabled; }
153
+ total_adapt_count() {
154
+ let total = 0n;
155
+ for (const a of this.adapters.values())
156
+ total += a.adapt_count();
157
+ return total;
158
+ }
159
+ total_forward_count() {
160
+ let total = 0n;
161
+ for (const a of this.adapters.values())
162
+ total += a.forward_count();
163
+ return total;
164
+ }
165
+ reset_all() { this.adapters.clear(); }
166
+ reset_scope(opType) { this.adapters.delete(opType); }
167
+ free() { this.adapters.clear(); }
168
+ }
169
+ class JsTrajectoryBuffer {
170
+ entries = [];
171
+ capacity;
172
+ constructor(capacity, _dim) {
173
+ this.capacity = capacity;
174
+ }
175
+ record(_embedding, _opType, _attType, executionMs, baselineMs) {
176
+ const improvement = baselineMs > 0 ? (baselineMs - executionMs) / baselineMs : 0;
177
+ if (this.entries.length >= this.capacity)
178
+ this.entries.shift();
179
+ this.entries.push({ improvement });
180
+ }
181
+ is_empty() { return this.entries.length === 0; }
182
+ total_count() { return BigInt(this.entries.length); }
183
+ success_rate() {
184
+ if (this.entries.length === 0)
185
+ return 0;
186
+ return this.entries.filter(e => e.improvement > 0).length / this.entries.length;
187
+ }
188
+ mean_improvement() {
189
+ if (this.entries.length === 0)
190
+ return 0;
191
+ return this.entries.reduce((s, e) => s + e.improvement, 0) / this.entries.length;
192
+ }
193
+ best_improvement() {
194
+ if (this.entries.length === 0)
195
+ return 0;
196
+ return Math.max(...this.entries.map(e => e.improvement));
197
+ }
198
+ high_quality_count(threshold) {
199
+ return this.entries.filter(e => e.improvement > threshold).length;
200
+ }
201
+ variance() {
202
+ if (this.entries.length < 2)
203
+ return 0;
204
+ const mean = this.mean_improvement();
205
+ return this.entries.reduce((s, e) => s + (e.improvement - mean) ** 2, 0) / (this.entries.length - 1);
206
+ }
207
+ reset() { this.entries = []; }
208
+ free() { this.entries = []; }
209
+ }
210
+ /**
211
+ * Initialize the RuVector training system.
212
+ * Attempts to load @ruvector/learning-wasm for WASM-accelerated training.
213
+ * Falls back to a pure-JS implementation if WASM is unavailable.
39
214
  */
40
215
  export async function initializeTraining(config = {}) {
41
216
  const features = [];
42
217
  const dim = Math.min(config.dim || 256, 256); // Max 256 for WASM
43
218
  const lr = config.learningRate || 0.01;
44
219
  const alpha = config.alpha || 0.1;
220
+ // --- Attempt WASM backend first ---
221
+ let wasmLoaded = false;
45
222
  try {
46
- // Initialize MicroLoRA with direct WASM loading (Node.js compatible)
47
223
  const fs = await import('fs');
48
224
  const { createRequire } = await import('module');
49
225
  const require = createRequire(import.meta.url);
50
- // Load WASM file directly instead of using fetch
51
226
  const wasmPath = require.resolve('@ruvector/learning-wasm/ruvector_learning_wasm_bg.wasm');
52
227
  const wasmBuffer = fs.readFileSync(wasmPath);
53
228
  const learningWasm = await import('@ruvector/learning-wasm');
54
229
  learningWasm.initSync({ module: wasmBuffer });
55
230
  microLoRA = new learningWasm.WasmMicroLoRA(dim, alpha, lr);
56
- features.push(`MicroLoRA (${dim}-dim, <1μs adaptation)`);
57
- // Initialize ScopedLoRA for per-operator learning
231
+ features.push(`MicroLoRA/WASM (${dim}-dim, <1μs adaptation)`);
58
232
  scopedLoRA = new learningWasm.WasmScopedLoRA(dim, alpha, lr);
59
233
  scopedLoRA.set_category_fallback(true);
60
- features.push('ScopedLoRA (17 operators)');
61
- // Initialize trajectory buffer
234
+ features.push('ScopedLoRA/WASM (17 operators)');
62
235
  trajectoryBuffer = new learningWasm.WasmTrajectoryBuffer(config.trajectoryCapacity || 10000, dim);
63
- features.push('TrajectoryBuffer');
64
- // Initialize attention mechanisms
236
+ features.push('TrajectoryBuffer/WASM');
237
+ activeBackend = 'wasm';
238
+ wasmLoaded = true;
239
+ }
240
+ catch (wasmError) {
241
+ // WASM not available - fall back to JS implementation
242
+ const reason = wasmError instanceof Error ? wasmError.message : String(wasmError);
243
+ console.warn(`[ruvector] WASM backend unavailable (${reason}), using JS fallback`);
244
+ microLoRA = new JsMicroLoRA(dim, alpha, lr);
245
+ features.push(`MicroLoRA/JS (${dim}-dim, JS fallback)`);
246
+ scopedLoRA = new JsScopedLoRA(dim, alpha, lr);
247
+ scopedLoRA.set_category_fallback(true);
248
+ features.push('ScopedLoRA/JS (17 operators)');
249
+ trajectoryBuffer = new JsTrajectoryBuffer(config.trajectoryCapacity || 10000, dim);
250
+ features.push('TrajectoryBuffer/JS');
251
+ activeBackend = 'js-fallback';
252
+ }
253
+ // --- Attention mechanisms (optional, independent of WASM) ---
254
+ try {
65
255
  const attention = await import('@ruvector/attention');
66
256
  if (config.useFlashAttention !== false) {
67
257
  flashAttention = new attention.FlashAttention(dim, 64);
@@ -75,17 +265,14 @@ export async function initializeTraining(config = {}) {
75
265
  hyperbolicAttention = new attention.HyperbolicAttention(dim, 1.0);
76
266
  features.push('HyperbolicAttention');
77
267
  }
78
- // Initialize optimizer and loss
79
268
  optimizer = new attention.AdamWOptimizer(lr, 0.9, 0.999, 1e-8, 0.01);
80
269
  features.push('AdamW Optimizer');
81
270
  contrastiveLoss = new attention.InfoNceLoss(0.07);
82
271
  features.push('InfoNCE Loss');
83
- // Curriculum scheduler
84
272
  if (config.totalSteps) {
85
273
  curriculum = new attention.CurriculumScheduler(config.totalSteps, config.warmupSteps || Math.floor(config.totalSteps * 0.1));
86
274
  features.push('Curriculum Learning');
87
275
  }
88
- // Hard negative mining - use string for MiningStrategy enum due to NAPI binding quirk
89
276
  try {
90
277
  hardMiner = new attention.HardNegativeMiner(5, 'semi_hard');
91
278
  features.push('Hard Negative Mining');
@@ -93,36 +280,31 @@ export async function initializeTraining(config = {}) {
93
280
  catch {
94
281
  // Mining not available, continue without it
95
282
  }
96
- // Initialize SONA (optional, backward compatible)
97
- if (config.useSona !== false) {
98
- try {
99
- const sona = await import('@ruvector/sona');
100
- const sonaRank = config.sonaRank || 4;
101
- // SonaEngine constructor: (dim, rank, alpha, learningRate) - TypeScript types are wrong
102
- // @ts-expect-error - SonaEngine accepts 4 positional args but types say 1
103
- sonaEngine = new sona.SonaEngine(dim, sonaRank, alpha, lr);
104
- sonaAvailable = true;
105
- features.push(`SONA (${dim}-dim, rank-${sonaRank}, 624k learn/s)`);
106
- }
107
- catch (sonaError) {
108
- // SONA not available, continue without it (backward compatible)
109
- sonaAvailable = false;
110
- // Only log if explicitly requested
111
- if (config.useSona === true) {
112
- console.warn('SONA requested but not available:', sonaError);
113
- }
283
+ }
284
+ catch (attentionError) {
285
+ // @ruvector/attention not available - attention features skipped
286
+ const reason = attentionError instanceof Error ? attentionError.message : String(attentionError);
287
+ console.warn(`[ruvector] @ruvector/attention unavailable (${reason}), attention features disabled`);
288
+ }
289
+ // --- SONA (optional, backward compatible) ---
290
+ if (config.useSona !== false) {
291
+ try {
292
+ const sona = await import('@ruvector/sona');
293
+ const sonaRank = config.sonaRank || 4;
294
+ // @ts-expect-error - SonaEngine accepts 4 positional args but types say 1
295
+ sonaEngine = new sona.SonaEngine(dim, sonaRank, alpha, lr);
296
+ sonaAvailable = true;
297
+ features.push(`SONA (${dim}-dim, rank-${sonaRank}, 624k learn/s)`);
298
+ }
299
+ catch (sonaError) {
300
+ sonaAvailable = false;
301
+ if (config.useSona === true) {
302
+ console.warn('SONA requested but not available:', sonaError);
114
303
  }
115
304
  }
116
- initialized = true;
117
- return { success: true, features };
118
- }
119
- catch (error) {
120
- return {
121
- success: false,
122
- features,
123
- error: error instanceof Error ? error.message : String(error),
124
- };
125
305
  }
306
+ initialized = true;
307
+ return { success: true, features, backend: activeBackend };
126
308
  }
127
309
  /**
128
310
  * Operator types for scoped LoRA (0-16)
@@ -396,6 +578,7 @@ export function sonaFlush() {
396
578
  export function getTrainingStats() {
397
579
  const stats = {
398
580
  initialized,
581
+ backend: activeBackend,
399
582
  totalAdaptations,
400
583
  totalForwards,
401
584
  };
@@ -443,6 +626,7 @@ export function resetTraining() {
443
626
  totalForwards = 0;
444
627
  totalSonaLearns = 0;
445
628
  totalSonaSearches = 0;
629
+ activeBackend = 'js-fallback';
446
630
  }
447
631
  /**
448
632
  * Export trained weights
@@ -122,6 +122,10 @@ export declare class WorkerDaemon extends EventEmitter {
122
122
  private executeWorker;
123
123
  /**
124
124
  * Run a function with timeout (P1 fix)
125
+ * @param fn - The async function to execute
126
+ * @param timeoutMs - Timeout in milliseconds
127
+ * @param timeoutMessage - Error message on timeout
128
+ * @param onTimeout - Optional cleanup callback invoked when timeout fires (#1117: kills orphan processes)
125
129
  */
126
130
  private runWithTimeout;
127
131
  /**
@@ -10,7 +10,7 @@
10
10
  * - testgaps: Test coverage analysis (20 min interval)
11
11
  */
12
12
  import { EventEmitter } from 'events';
13
- import { existsSync, mkdirSync, writeFileSync, readFileSync } from 'fs';
13
+ import { existsSync, mkdirSync, writeFileSync, readFileSync, appendFileSync } from 'fs';
14
14
  import { join } from 'path';
15
15
  import { HeadlessWorkerExecutor, isHeadlessWorker, } from './headless-worker-executor.js';
16
16
  // Default worker configurations with improved intervals (P0 fix: map 5min -> 15min)
@@ -328,7 +328,13 @@ export class WorkerDaemon extends EventEmitter {
328
328
  this.log('info', `Starting worker: ${workerConfig.type} (${this.runningWorkers.size}/${this.config.maxConcurrent} concurrent)`);
329
329
  try {
330
330
  // Execute worker logic with timeout (P1 fix)
331
- const output = await this.runWithTimeout(() => this.runWorkerLogic(workerConfig), this.config.workerTimeoutMs, `Worker ${workerConfig.type} timed out after ${this.config.workerTimeoutMs / 1000}s`);
331
+ // Pass cleanup callback to kill orphan child processes on timeout (#1117)
332
+ const output = await this.runWithTimeout(() => this.runWorkerLogic(workerConfig), this.config.workerTimeoutMs, `Worker ${workerConfig.type} timed out after ${this.config.workerTimeoutMs / 1000}s`, () => {
333
+ // On timeout, cancel any headless execution to prevent orphan processes
334
+ if (this.headlessExecutor) {
335
+ this.headlessExecutor.cancelAll();
336
+ }
337
+ });
332
338
  const durationMs = Date.now() - startTime;
333
339
  // Update state
334
340
  state.runCount++;
@@ -376,18 +382,41 @@ export class WorkerDaemon extends EventEmitter {
376
382
  }
377
383
  /**
378
384
  * Run a function with timeout (P1 fix)
385
+ * @param fn - The async function to execute
386
+ * @param timeoutMs - Timeout in milliseconds
387
+ * @param timeoutMessage - Error message on timeout
388
+ * @param onTimeout - Optional cleanup callback invoked when timeout fires (#1117: kills orphan processes)
379
389
  */
380
- async runWithTimeout(fn, timeoutMs, timeoutMessage) {
390
+ async runWithTimeout(fn, timeoutMs, timeoutMessage, onTimeout) {
381
391
  return new Promise((resolve, reject) => {
392
+ let settled = false;
382
393
  const timer = setTimeout(() => {
394
+ if (settled)
395
+ return;
396
+ settled = true;
397
+ // Kill orphan child processes before rejecting (#1117)
398
+ if (onTimeout) {
399
+ try {
400
+ onTimeout();
401
+ }
402
+ catch {
403
+ // Ignore cleanup errors
404
+ }
405
+ }
383
406
  reject(new Error(timeoutMessage));
384
407
  }, timeoutMs);
385
408
  fn()
386
409
  .then((result) => {
410
+ if (settled)
411
+ return;
412
+ settled = true;
387
413
  clearTimeout(timer);
388
414
  resolve(result);
389
415
  })
390
416
  .catch((error) => {
417
+ if (settled)
418
+ return;
419
+ settled = true;
391
420
  clearTimeout(timer);
392
421
  reject(error);
393
422
  });
@@ -714,8 +743,7 @@ export class WorkerDaemon extends EventEmitter {
714
743
  // Also write to log file
715
744
  try {
716
745
  const logFile = join(this.config.logDir, 'daemon.log');
717
- const fs = require('fs');
718
- fs.appendFileSync(logFile, logMessage + '\n');
746
+ appendFileSync(logFile, logMessage + '\n');
719
747
  }
720
748
  catch {
721
749
  // Ignore log write errors
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@claude-flow/cli",
3
- "version": "3.5.21",
3
+ "version": "3.5.23",
4
4
  "type": "module",
5
5
  "description": "Ruflo CLI - Enterprise AI agent orchestration with 60+ specialized agents, swarm coordination, MCP server, self-learning hooks, and vector memory for Claude Code",
6
6
  "main": "dist/src/index.js",