ruvector 0.2.0 → 0.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/bin/cli.js +688 -598
  2. package/bin/mcp-server.js +272 -1
  3. package/package.json +7 -7
package/bin/cli.js CHANGED
@@ -45,79 +45,59 @@ function requireRuvector() {
45
45
  }
46
46
  }
47
47
 
48
- // Lazy load GNN (optional - loaded on first use, not at startup)
49
- // Saves ~6ms startup time by deferring require('@ruvector/gnn')
50
- let _gnnModule = undefined; // undefined = not yet attempted, null = failed, object = loaded
48
+ // Import GNN (optional - graceful fallback if not available)
51
49
  let RuvectorLayer, TensorCompress, differentiableSearch, getCompressionLevel, hierarchicalForward;
52
50
  let gnnAvailable = false;
53
-
54
- function loadGnn() {
55
- if (_gnnModule !== undefined) return _gnnModule;
56
- try {
57
- const gnn = require('@ruvector/gnn');
58
- RuvectorLayer = gnn.RuvectorLayer;
59
- TensorCompress = gnn.TensorCompress;
60
- differentiableSearch = gnn.differentiableSearch;
61
- getCompressionLevel = gnn.getCompressionLevel;
62
- hierarchicalForward = gnn.hierarchicalForward;
63
- _gnnModule = gnn;
64
- gnnAvailable = true;
65
- return gnn;
66
- } catch (e) {
67
- _gnnModule = null;
68
- gnnAvailable = false;
69
- return null;
70
- }
51
+ try {
52
+ const gnn = require('@ruvector/gnn');
53
+ RuvectorLayer = gnn.RuvectorLayer;
54
+ TensorCompress = gnn.TensorCompress;
55
+ differentiableSearch = gnn.differentiableSearch;
56
+ getCompressionLevel = gnn.getCompressionLevel;
57
+ hierarchicalForward = gnn.hierarchicalForward;
58
+ gnnAvailable = true;
59
+ } catch (e) {
60
+ // GNN not available - commands will show helpful message
71
61
  }
72
62
 
73
- // Lazy load Attention (optional - loaded on first use, not at startup)
74
- // Saves ~5ms startup time by deferring require('@ruvector/attention')
75
- let _attentionModule = undefined; // undefined = not yet attempted
63
+ // Import Attention (optional - graceful fallback if not available)
76
64
  let DotProductAttention, MultiHeadAttention, HyperbolicAttention, FlashAttention, LinearAttention, MoEAttention;
77
65
  let GraphRoPeAttention, EdgeFeaturedAttention, DualSpaceAttention, LocalGlobalAttention;
78
66
  let benchmarkAttention, computeAttentionAsync, batchAttentionCompute, parallelAttentionCompute;
79
67
  let expMap, logMap, mobiusAddition, poincareDistance, projectToPoincareBall;
80
68
  let attentionInfo, attentionVersion;
81
69
  let attentionAvailable = false;
82
-
83
- function loadAttention() {
84
- if (_attentionModule !== undefined) return _attentionModule;
85
- try {
86
- const attention = require('@ruvector/attention');
87
- // Core mechanisms
88
- DotProductAttention = attention.DotProductAttention;
89
- MultiHeadAttention = attention.MultiHeadAttention;
90
- HyperbolicAttention = attention.HyperbolicAttention;
91
- FlashAttention = attention.FlashAttention;
92
- LinearAttention = attention.LinearAttention;
93
- MoEAttention = attention.MoEAttention;
94
- // Graph attention
95
- GraphRoPeAttention = attention.GraphRoPeAttention;
96
- EdgeFeaturedAttention = attention.EdgeFeaturedAttention;
97
- DualSpaceAttention = attention.DualSpaceAttention;
98
- LocalGlobalAttention = attention.LocalGlobalAttention;
99
- // Utilities
100
- benchmarkAttention = attention.benchmarkAttention;
101
- computeAttentionAsync = attention.computeAttentionAsync;
102
- batchAttentionCompute = attention.batchAttentionCompute;
103
- parallelAttentionCompute = attention.parallelAttentionCompute;
104
- // Hyperbolic math
105
- expMap = attention.expMap;
106
- logMap = attention.logMap;
107
- mobiusAddition = attention.mobiusAddition;
108
- poincareDistance = attention.poincareDistance;
109
- projectToPoincareBall = attention.projectToPoincareBall;
110
- // Meta
111
- attentionInfo = attention.info;
112
- attentionVersion = attention.version;
113
- _attentionModule = attention;
114
- attentionAvailable = true;
115
- return attention;
116
- } catch (e) {
117
- _attentionModule = null;
118
- attentionAvailable = false;
119
- return null;
120
- }
70
+ try {
71
+ const attention = require('@ruvector/attention');
72
+ // Core mechanisms
73
+ DotProductAttention = attention.DotProductAttention;
74
+ MultiHeadAttention = attention.MultiHeadAttention;
75
+ HyperbolicAttention = attention.HyperbolicAttention;
76
+ FlashAttention = attention.FlashAttention;
77
+ LinearAttention = attention.LinearAttention;
78
+ MoEAttention = attention.MoEAttention;
79
+ // Graph attention
80
+ GraphRoPeAttention = attention.GraphRoPeAttention;
81
+ EdgeFeaturedAttention = attention.EdgeFeaturedAttention;
82
+ DualSpaceAttention = attention.DualSpaceAttention;
83
+ LocalGlobalAttention = attention.LocalGlobalAttention;
84
+ // Utilities
85
+ benchmarkAttention = attention.benchmarkAttention;
86
+ computeAttentionAsync = attention.computeAttentionAsync;
87
+ batchAttentionCompute = attention.batchAttentionCompute;
88
+ parallelAttentionCompute = attention.parallelAttentionCompute;
89
+ // Hyperbolic math
90
+ expMap = attention.expMap;
91
+ logMap = attention.logMap;
92
+ mobiusAddition = attention.mobiusAddition;
93
+ poincareDistance = attention.poincareDistance;
94
+ projectToPoincareBall = attention.projectToPoincareBall;
95
+ // Meta
96
+ attentionInfo = attention.info;
97
+ attentionVersion = attention.version;
98
+ attentionAvailable = true;
99
+ } catch (e) {
100
+ // Attention not available - commands will show helpful message
121
101
  }
122
102
 
123
103
  const program = new Command();
@@ -384,10 +364,6 @@ program
384
364
  .command('info')
385
365
  .description('Show ruvector information')
386
366
  .action(() => {
387
- // Trigger lazy load of optional modules for availability check
388
- loadGnn();
389
- loadAttention();
390
-
391
367
  console.log(chalk.cyan('\nruvector Information'));
392
368
  console.log(chalk.white(` CLI Version: ${chalk.yellow(packageJson.version)}`));
393
369
 
@@ -424,9 +400,6 @@ program
424
400
  .action(async (packages, options) => {
425
401
  const { execSync } = require('child_process');
426
402
 
427
- // Trigger lazy load to check availability
428
- loadGnn();
429
-
430
403
  // Available optional packages - all ruvector npm packages
431
404
  const availablePackages = {
432
405
  // Core packages
@@ -714,9 +687,8 @@ program
714
687
  // GNN Commands
715
688
  // =============================================================================
716
689
 
717
- // Helper to check GNN availability (triggers lazy load)
690
+ // Helper to check GNN availability
718
691
  function requireGnn() {
719
- loadGnn();
720
692
  if (!gnnAvailable) {
721
693
  console.error(chalk.red('Error: GNN module not available.'));
722
694
  console.error(chalk.yellow('Install it with: npm install @ruvector/gnn'));
@@ -912,7 +884,6 @@ gnnCmd
912
884
  .command('info')
913
885
  .description('Show GNN module information')
914
886
  .action(() => {
915
- loadGnn();
916
887
  if (!gnnAvailable) {
917
888
  console.log(chalk.yellow('\nGNN Module: Not installed'));
918
889
  console.log(chalk.white('Install with: npm install @ruvector/gnn'));
@@ -942,9 +913,8 @@ gnnCmd
942
913
  // Attention Commands
943
914
  // =============================================================================
944
915
 
945
- // Helper to require attention module (triggers lazy load)
916
+ // Helper to require attention module
946
917
  function requireAttention() {
947
- loadAttention();
948
918
  if (!attentionAvailable) {
949
919
  console.error(chalk.red('Error: @ruvector/attention is not installed'));
950
920
  console.error(chalk.yellow('Install it with: npm install @ruvector/attention'));
@@ -1272,7 +1242,6 @@ attentionCmd
1272
1242
  .command('info')
1273
1243
  .description('Show attention module information')
1274
1244
  .action(() => {
1275
- loadAttention();
1276
1245
  if (!attentionAvailable) {
1277
1246
  console.log(chalk.yellow('\nAttention Module: Not installed'));
1278
1247
  console.log(chalk.white('Install with: npm install @ruvector/attention'));
@@ -1318,7 +1287,6 @@ attentionCmd
1318
1287
  .description('List all available attention mechanisms')
1319
1288
  .option('-v, --verbose', 'Show detailed information')
1320
1289
  .action((options) => {
1321
- loadAttention();
1322
1290
  console.log(chalk.cyan('\n═══════════════════════════════════════════════════════════════'));
1323
1291
  console.log(chalk.cyan(' Available Attention Mechanisms'));
1324
1292
  console.log(chalk.cyan('═══════════════════════════════════════════════════════════════\n'));
@@ -1375,10 +1343,6 @@ program
1375
1343
  .action(async (options) => {
1376
1344
  const { execSync } = require('child_process');
1377
1345
 
1378
- // Trigger lazy load of optional modules for availability check
1379
- loadGnn();
1380
- loadAttention();
1381
-
1382
1346
  console.log(chalk.cyan('\n═══════════════════════════════════════════════════════════════'));
1383
1347
  console.log(chalk.cyan(' RuVector Doctor'));
1384
1348
  console.log(chalk.cyan('═══════════════════════════════════════════════════════════════\n'));
@@ -2567,7 +2531,6 @@ program
2567
2531
  }
2568
2532
 
2569
2533
  if (options.gnn) {
2570
- loadGnn();
2571
2534
  if (!gnnAvailable) {
2572
2535
  console.log(chalk.yellow(' @ruvector/gnn not installed.'));
2573
2536
  console.log(chalk.white(' Install with: npm install @ruvector/gnn'));
@@ -7400,20 +7363,232 @@ mcpCmd.command('info')
7400
7363
  console.log();
7401
7364
  });
7402
7365
 
7366
+ // ============================================================================
7367
+ // MCP tools subcommand
7368
+ // ============================================================================
7369
+
7370
+ mcpCmd.command('tools')
7371
+ .description('List all MCP tools organized by group')
7372
+ .option('--group <name>', 'Filter by group (hooks, workers, rvf, rvlite, brain, edge, identity)')
7373
+ .option('--json', 'Output as JSON')
7374
+ .action((opts) => {
7375
+ const toolGroups = {
7376
+ 'hooks-core': [
7377
+ { name: 'hooks_stats', args: '(none)', desc: 'Get intelligence statistics' },
7378
+ { name: 'hooks_route', args: 'task, file?', desc: 'Route task to best agent' },
7379
+ { name: 'hooks_remember', args: 'content, type?', desc: 'Store context in vector memory' },
7380
+ { name: 'hooks_recall', args: 'query, limit?', desc: 'Search vector memory' },
7381
+ { name: 'hooks_init', args: 'project_path?, force?', desc: 'Initialize hooks in project' },
7382
+ { name: 'hooks_pretrain', args: 'scan_path?, patterns?', desc: 'Pretrain from repository' },
7383
+ { name: 'hooks_build_agents', args: 'project_path?', desc: 'Generate agent configs' },
7384
+ { name: 'hooks_verify', args: '(none)', desc: 'Verify hooks configuration' },
7385
+ { name: 'hooks_doctor', args: 'fix?', desc: 'Diagnose setup issues' },
7386
+ { name: 'hooks_export', args: 'format?', desc: 'Export intelligence data' },
7387
+ ],
7388
+ 'hooks-trajectory': [
7389
+ { name: 'hooks_trajectory_start', args: 'task, context?', desc: 'Start learning trajectory' },
7390
+ { name: 'hooks_trajectory_step', args: 'trajectory_id, action, result', desc: 'Record trajectory step' },
7391
+ { name: 'hooks_trajectory_end', args: 'trajectory_id, outcome, score?', desc: 'End trajectory with outcome' },
7392
+ ],
7393
+ 'hooks-coedit': [
7394
+ { name: 'hooks_pre_edit', args: 'file, changes', desc: 'Pre-edit analysis' },
7395
+ { name: 'hooks_post_edit', args: 'file, changes, result', desc: 'Post-edit learning' },
7396
+ { name: 'hooks_pre_command', args: 'command, args?', desc: 'Pre-command analysis' },
7397
+ { name: 'hooks_post_command', args: 'command, exit_code, output?', desc: 'Post-command learning' },
7398
+ { name: 'hooks_pre_task', args: 'task, context?', desc: 'Pre-task routing' },
7399
+ { name: 'hooks_post_task', args: 'task, result, duration?', desc: 'Post-task learning' },
7400
+ ],
7401
+ 'hooks-errors': [
7402
+ { name: 'hooks_error_learn', args: 'error, context?', desc: 'Learn from errors' },
7403
+ { name: 'hooks_error_patterns', args: 'limit?', desc: 'Get learned error patterns' },
7404
+ { name: 'hooks_error_suggest', args: 'error', desc: 'Suggest fix for error' },
7405
+ ],
7406
+ 'hooks-analysis': [
7407
+ { name: 'hooks_complexity', args: 'file', desc: 'Analyze code complexity' },
7408
+ { name: 'hooks_dependencies', args: 'file', desc: 'Analyze dependencies' },
7409
+ { name: 'hooks_security_scan', args: 'file', desc: 'Security vulnerability scan' },
7410
+ { name: 'hooks_test_coverage', args: 'file', desc: 'Estimate test coverage' },
7411
+ { name: 'hooks_dead_code', args: 'file', desc: 'Detect dead code' },
7412
+ { name: 'hooks_duplicate_code', args: 'file', desc: 'Find duplicate code' },
7413
+ ],
7414
+ 'hooks-learning': [
7415
+ { name: 'hooks_pattern_store', args: 'pattern, category, confidence?', desc: 'Store a learned pattern' },
7416
+ { name: 'hooks_pattern_search', args: 'query, category?, limit?', desc: 'Search patterns' },
7417
+ { name: 'hooks_attention', args: 'query, context', desc: 'Attention-weighted relevance' },
7418
+ ],
7419
+ 'hooks-compress': [
7420
+ { name: 'hooks_compress_context', args: 'content, max_tokens?', desc: 'Compress context' },
7421
+ { name: 'hooks_compress_code', args: 'code, language?', desc: 'Compress code representation' },
7422
+ { name: 'hooks_compress_diff', args: 'diff', desc: 'Compress diff' },
7423
+ ],
7424
+ 'hooks-events': [
7425
+ { name: 'hooks_session_start', args: '(none)', desc: 'Signal session start' },
7426
+ { name: 'hooks_session_end', args: 'summary?', desc: 'Signal session end' },
7427
+ { name: 'hooks_notify', args: 'message, level?', desc: 'Send notification' },
7428
+ { name: 'hooks_transfer', args: 'target, data', desc: 'Transfer context' },
7429
+ ],
7430
+ 'hooks-model': [
7431
+ { name: 'hooks_model_route', args: 'task, complexity?', desc: 'Route to optimal model tier' },
7432
+ { name: 'hooks_model_outcome', args: 'model, task, success, tokens?', desc: 'Record model outcome' },
7433
+ { name: 'hooks_model_stats', args: '(none)', desc: 'Get model routing stats' },
7434
+ ],
7435
+ 'workers': [
7436
+ { name: 'workers_list', args: '(none)', desc: 'List available workers' },
7437
+ { name: 'workers_status', args: 'worker_id?', desc: 'Get worker status' },
7438
+ { name: 'workers_dispatch', args: 'worker, task, args?', desc: 'Dispatch task to worker' },
7439
+ { name: 'workers_cancel', args: 'job_id', desc: 'Cancel running job' },
7440
+ { name: 'workers_detect', args: 'file', desc: 'Auto-detect applicable workers' },
7441
+ { name: 'workers_complexity', args: 'file', desc: 'Worker: complexity analysis' },
7442
+ { name: 'workers_dependencies', args: 'file', desc: 'Worker: dependency analysis' },
7443
+ { name: 'workers_security', args: 'file', desc: 'Worker: security scan' },
7444
+ { name: 'workers_coverage', args: 'file', desc: 'Worker: test coverage' },
7445
+ { name: 'workers_dead_code', args: 'file', desc: 'Worker: dead code detection' },
7446
+ { name: 'workers_duplicates', args: 'file', desc: 'Worker: duplicate detection' },
7447
+ { name: 'workers_performance', args: 'file', desc: 'Worker: performance analysis' },
7448
+ ],
7449
+ 'rvf': [
7450
+ { name: 'rvf_create', args: 'path, dimension?, metric?', desc: 'Create new .rvf vector store' },
7451
+ { name: 'rvf_open', args: 'path', desc: 'Open existing .rvf store' },
7452
+ { name: 'rvf_ingest', args: 'path, vectors, ids?, metadata?', desc: 'Insert vectors' },
7453
+ { name: 'rvf_query', args: 'path, vector, k?, filter?', desc: 'Query nearest neighbors' },
7454
+ { name: 'rvf_delete', args: 'path, ids', desc: 'Delete vectors by ID' },
7455
+ { name: 'rvf_status', args: 'path', desc: 'Get store status' },
7456
+ { name: 'rvf_compact', args: 'path', desc: 'Compact store' },
7457
+ { name: 'rvf_derive', args: 'parent_path, child_path', desc: 'COW-branch to child store' },
7458
+ { name: 'rvf_segments', args: 'path', desc: 'List file segments' },
7459
+ { name: 'rvf_examples', args: '(none)', desc: 'List example .rvf files' },
7460
+ ],
7461
+ 'rvlite': [
7462
+ { name: 'rvlite_sql', args: 'query, db_path?', desc: 'Execute SQL query' },
7463
+ { name: 'rvlite_cypher', args: 'query, db_path?', desc: 'Execute Cypher graph query' },
7464
+ { name: 'rvlite_sparql', args: 'query, db_path?', desc: 'Execute SPARQL RDF query' },
7465
+ ],
7466
+ 'brain': [
7467
+ { name: 'brain_search', args: 'query, category?, limit?', desc: 'Semantic search shared brain' },
7468
+ { name: 'brain_share', args: 'title, content, category, tags?, code_snippet?', desc: 'Share knowledge' },
7469
+ { name: 'brain_get', args: 'id', desc: 'Retrieve memory by ID' },
7470
+ { name: 'brain_vote', args: 'id, direction', desc: 'Quality vote (up/down)' },
7471
+ { name: 'brain_list', args: 'category?, limit?', desc: 'List recent memories' },
7472
+ { name: 'brain_delete', args: 'id', desc: 'Delete own contribution' },
7473
+ { name: 'brain_status', args: '(none)', desc: 'System health' },
7474
+ { name: 'brain_drift', args: 'domain?', desc: 'Check knowledge drift' },
7475
+ { name: 'brain_partition', args: 'domain?, min_cluster_size?', desc: 'Knowledge topology' },
7476
+ { name: 'brain_transfer', args: 'source_domain, target_domain', desc: 'Cross-domain transfer' },
7477
+ { name: 'brain_sync', args: 'direction?', desc: 'LoRA weight sync' },
7478
+ ],
7479
+ 'edge': [
7480
+ { name: 'edge_status', args: '(none)', desc: 'Network status' },
7481
+ { name: 'edge_join', args: 'contribution?', desc: 'Join compute network' },
7482
+ { name: 'edge_balance', args: '(none)', desc: 'Check rUv balance' },
7483
+ { name: 'edge_tasks', args: 'limit?', desc: 'List compute tasks' },
7484
+ ],
7485
+ 'identity': [
7486
+ { name: 'identity_generate', args: '(none)', desc: 'Generate new pi key' },
7487
+ { name: 'identity_show', args: '(none)', desc: 'Show current identity' },
7488
+ ],
7489
+ };
7490
+
7491
+ if (opts.json) {
7492
+ const output = {};
7493
+ Object.entries(toolGroups).forEach(([group, tools]) => {
7494
+ if (!opts.group || group === opts.group || group.startsWith(opts.group)) {
7495
+ output[group] = tools;
7496
+ }
7497
+ });
7498
+ console.log(JSON.stringify(output, null, 2));
7499
+ return;
7500
+ }
7501
+
7502
+ console.log(chalk.bold.cyan('\nRuVector MCP Tools\n'));
7503
+ let total = 0;
7504
+ Object.entries(toolGroups).forEach(([group, tools]) => {
7505
+ if (opts.group && group !== opts.group && !group.startsWith(opts.group)) return;
7506
+ console.log(chalk.bold.yellow(` ${group} (${tools.length}):`));
7507
+ tools.forEach(t => {
7508
+ console.log(` ${chalk.green(t.name.padEnd(28))} ${chalk.dim(t.args.padEnd(40))} ${t.desc}`);
7509
+ });
7510
+ console.log();
7511
+ total += tools.length;
7512
+ });
7513
+ console.log(chalk.bold(`Total: ${total} MCP tools\n`));
7514
+ });
7515
+
7516
+ // ============================================================================
7517
+ // MCP test subcommand
7518
+ // ============================================================================
7519
+
7520
+ mcpCmd.command('test')
7521
+ .description('Test MCP server setup and tool registration')
7522
+ .action(() => {
7523
+ console.log(chalk.bold.cyan('\nMCP Server Test Results'));
7524
+ console.log(chalk.dim('-'.repeat(40)));
7525
+
7526
+ const mcpServerPath = path.join(__dirname, 'mcp-server.js');
7527
+ if (fs.existsSync(mcpServerPath)) {
7528
+ console.log(` ${chalk.green('PASS')} mcp-server.js exists`);
7529
+ } else {
7530
+ console.log(` ${chalk.red('FAIL')} mcp-server.js not found`);
7531
+ process.exit(1);
7532
+ }
7533
+
7534
+ try {
7535
+ const { execSync } = require('child_process');
7536
+ execSync(`node -c ${mcpServerPath}`, { stdio: 'pipe' });
7537
+ console.log(` ${chalk.green('PASS')} mcp-server.js syntax valid`);
7538
+ } catch {
7539
+ console.log(` ${chalk.red('FAIL')} mcp-server.js has syntax errors`);
7540
+ process.exit(1);
7541
+ }
7542
+
7543
+ try {
7544
+ require('@modelcontextprotocol/sdk/server/index.js');
7545
+ console.log(` ${chalk.green('PASS')} @modelcontextprotocol/sdk installed`);
7546
+ } catch {
7547
+ console.log(` ${chalk.red('FAIL')} @modelcontextprotocol/sdk not installed`);
7548
+ process.exit(1);
7549
+ }
7550
+
7551
+ try {
7552
+ const src = fs.readFileSync(mcpServerPath, 'utf8');
7553
+ const toolsStart = src.indexOf('const TOOLS = [');
7554
+ const toolsSection = toolsStart >= 0 ? src.slice(toolsStart) : src;
7555
+ const toolDefs = toolsSection.match(/name:\s*'([a-z][a-z0-9_]*)'\s*,\s*\n\s*description:/g) || [];
7556
+ const toolNames = toolDefs.map(m => m.match(/name:\s*'([a-z][a-z0-9_]*)'/)[1]);
7557
+ const groups = {};
7558
+ toolNames.forEach(n => {
7559
+ const g = n.split('_')[0];
7560
+ groups[g] = (groups[g] || 0) + 1;
7561
+ });
7562
+
7563
+ Object.entries(groups).sort((a, b) => b[1] - a[1]).forEach(([group, count]) => {
7564
+ console.log(` ${chalk.green('PASS')} ${group}: ${count} tools`);
7565
+ });
7566
+ console.log(chalk.bold(`\n Total: ${toolNames.length} tools registered`));
7567
+ } catch (e) {
7568
+ console.log(` ${chalk.yellow('WARN')} Could not parse tool count: ${e.message}`);
7569
+ }
7570
+
7571
+ try {
7572
+ const src = fs.readFileSync(mcpServerPath, 'utf8');
7573
+ const verMatch = src.match(/version:\s*'([^']+)'/);
7574
+ if (verMatch) {
7575
+ const pkg = require(path.join(__dirname, '..', 'package.json'));
7576
+ const match = verMatch[1] === pkg.version;
7577
+ console.log(` ${match ? chalk.green('PASS') : chalk.yellow('WARN')} Server version: ${verMatch[1]}${match ? '' : ` (package: ${pkg.version})`}`);
7578
+ }
7579
+ } catch {}
7580
+
7581
+ console.log(chalk.bold.green('\n All checks passed.\n'));
7582
+ console.log(chalk.dim(' Setup: claude mcp add ruvector npx ruvector mcp start\n'));
7583
+ });
7584
+
7403
7585
  // ============================================================================
7404
7586
  // Brain Commands — Shared intelligence via @ruvector/pi-brain (lazy-loaded)
7405
7587
  // ============================================================================
7406
7588
 
7407
- let _piBrainClient = null;
7408
- async function getPiBrainClient(opts = {}) {
7409
- if (_piBrainClient) return _piBrainClient;
7589
+ async function requirePiBrain() {
7410
7590
  try {
7411
- const piBrain = require('@ruvector/pi-brain');
7412
- const PiBrainClient = piBrain.PiBrainClient || piBrain.default;
7413
- const url = opts.url || process.env.BRAIN_URL || 'https://pi.ruv.io';
7414
- const key = opts.key || process.env.PI || '';
7415
- _piBrainClient = new PiBrainClient({ url, key });
7416
- return _piBrainClient;
7591
+ return require('@ruvector/pi-brain');
7417
7592
  } catch {
7418
7593
  console.error(chalk.red('Brain commands require @ruvector/pi-brain'));
7419
7594
  console.error(chalk.yellow(' npm install @ruvector/pi-brain'));
@@ -7421,114 +7596,90 @@ async function getPiBrainClient(opts = {}) {
7421
7596
  }
7422
7597
  }
7423
7598
 
7424
- const brainCmd = program.command('brain').description('Shared intelligence — search, share, vote on collective knowledge');
7599
+ function getBrainConfig(opts) {
7600
+ return {
7601
+ url: opts.url || process.env.BRAIN_URL || 'https://pi.ruv.io',
7602
+ key: opts.key || process.env.PI
7603
+ };
7604
+ }
7605
+
7606
+ const brainCmd = program.command('brain').description('Shared intelligence — search, share, and manage collective knowledge');
7425
7607
 
7426
7608
  brainCmd.command('search <query>')
7427
- .description('Semantic search across shared knowledge')
7428
- .option('-l, --limit <n>', 'Max results', '10')
7609
+ .description('Semantic search across shared brain knowledge')
7429
7610
  .option('-c, --category <cat>', 'Filter by category')
7611
+ .option('-l, --limit <n>', 'Max results', '10')
7430
7612
  .option('--url <url>', 'Brain server URL')
7431
- .option('--key <key>', 'PI key')
7432
- .option('--json', 'JSON output')
7613
+ .option('--key <key>', 'Pi key')
7614
+ .option('--json', 'Output as JSON')
7433
7615
  .action(async (query, opts) => {
7434
- const spinner = ora(`Searching brain for "${query}"...`);
7435
- spinner.start();
7616
+ const piBrain = await requirePiBrain();
7617
+ const config = getBrainConfig(opts);
7436
7618
  try {
7437
- const client = await getPiBrainClient(opts);
7438
- const results = await client.search(query, { limit: parseInt(opts.limit), category: opts.category });
7439
- spinner.stop();
7440
- if (opts.json || !process.stdout.isTTY) {
7441
- console.log(JSON.stringify(results, null, 2));
7442
- } else {
7443
- const items = results.memories || results.results || results || [];
7444
- if (items.length === 0) {
7445
- console.log(chalk.yellow('No results found.'));
7446
- } else {
7447
- items.forEach((m, i) => {
7448
- console.log(chalk.bold.cyan(`${i + 1}. ${m.title || m.id}`));
7449
- if (m.category) console.log(chalk.dim(` Category: ${m.category}`));
7450
- if (m.quality_score != null) console.log(chalk.dim(` Quality: ${(m.quality_score * 100).toFixed(0)}%`));
7451
- if (m.content) console.log(` ${m.content.slice(0, 120)}${m.content.length > 120 ? '...' : ''}`);
7452
- console.log();
7453
- });
7454
- }
7455
- }
7456
- } catch (e) {
7457
- spinner.stop();
7458
- console.error(chalk.red('Search failed:'), e.message);
7459
- process.exit(1);
7460
- }
7619
+ const client = new piBrain.PiBrainClient(config);
7620
+ const results = await client.search(query, { category: opts.category, limit: parseInt(opts.limit) });
7621
+ if (opts.json || !process.stdout.isTTY) { console.log(JSON.stringify(results, null, 2)); return; }
7622
+ console.log(chalk.bold.cyan(`\nBrain Search: "${query}"\n`));
7623
+ if (!results.length) { console.log(chalk.dim(' No results found.\n')); return; }
7624
+ results.forEach((r, i) => {
7625
+ console.log(` ${chalk.yellow(i + 1 + '.')} ${chalk.bold(r.title || r.id)}`);
7626
+ if (r.category) console.log(` ${chalk.dim('Category:')} ${r.category}`);
7627
+ if (r.score) console.log(` ${chalk.dim('Score:')} ${r.score.toFixed(3)}`);
7628
+ console.log();
7629
+ });
7630
+ } catch (e) { console.error(chalk.red(`Error: ${e.message}`)); process.exit(1); }
7461
7631
  });
7462
7632
 
7463
7633
  brainCmd.command('share <title>')
7464
7634
  .description('Share knowledge with the collective brain')
7465
- .option('-c, --category <cat>', 'Category (pattern, solution, architecture, convention, security, performance, tooling)', 'pattern')
7635
+ .requiredOption('-c, --category <cat>', 'Category (pattern, solution, architecture, convention, security, performance, tooling)')
7466
7636
  .option('-t, --tags <tags>', 'Comma-separated tags')
7467
- .option('--content <text>', 'Content body (or pipe via stdin)')
7637
+ .option('--content <text>', 'Content body')
7638
+ .option('--code <snippet>', 'Code snippet')
7468
7639
  .option('--url <url>', 'Brain server URL')
7469
- .option('--key <key>', 'PI key')
7640
+ .option('--key <key>', 'Pi key')
7470
7641
  .action(async (title, opts) => {
7471
- const spinner = ora('Sharing with brain...');
7472
- spinner.start();
7642
+ const piBrain = await requirePiBrain();
7643
+ const config = getBrainConfig(opts);
7473
7644
  try {
7474
- const client = await getPiBrainClient(opts);
7475
- let content = opts.content || '';
7476
- if (!content && !process.stdin.isTTY) {
7477
- const chunks = [];
7478
- for await (const chunk of process.stdin) chunks.push(chunk);
7479
- content = Buffer.concat(chunks).toString('utf8');
7480
- }
7481
- if (!content) { spinner.stop(); console.error(chalk.red('Provide content via --content or stdin')); process.exit(1); }
7482
- const tags = opts.tags ? opts.tags.split(',').map(t => t.trim()) : [];
7483
- const result = await client.share({ title, content, category: opts.category, tags });
7484
- spinner.succeed(chalk.green(`Shared: ${result.id || 'ok'}`));
7485
- } catch (e) {
7486
- spinner.fail(chalk.red('Share failed: ' + e.message));
7487
- process.exit(1);
7488
- }
7645
+ const client = new piBrain.PiBrainClient(config);
7646
+ const result = await client.share({ title, content: opts.content || title, category: opts.category, tags: opts.tags ? opts.tags.split(',').map(t => t.trim()) : [], code_snippet: opts.code });
7647
+ console.log(chalk.green(`Shared: ${result.id || 'OK'}`));
7648
+ } catch (e) { console.error(chalk.red(`Error: ${e.message}`)); process.exit(1); }
7489
7649
  });
7490
7650
 
7491
7651
  brainCmd.command('get <id>')
7492
- .description('Retrieve a specific memory with provenance')
7652
+ .description('Retrieve a specific memory by ID')
7493
7653
  .option('--url <url>', 'Brain server URL')
7494
- .option('--key <key>', 'PI key')
7495
- .option('--json', 'JSON output')
7654
+ .option('--key <key>', 'Pi key')
7655
+ .option('--json', 'Output as JSON')
7496
7656
  .action(async (id, opts) => {
7657
+ const piBrain = await requirePiBrain();
7658
+ const config = getBrainConfig(opts);
7497
7659
  try {
7498
- const client = await getPiBrainClient(opts);
7660
+ const client = new piBrain.PiBrainClient(config);
7499
7661
  const result = await client.get(id);
7500
- if (opts.json || !process.stdout.isTTY) {
7501
- console.log(JSON.stringify(result, null, 2));
7502
- } else {
7503
- console.log(chalk.bold.cyan(result.title || result.id));
7504
- if (result.category) console.log(chalk.dim(`Category: ${result.category}`));
7505
- if (result.content) console.log(`\n${result.content}`);
7506
- if (result.tags && result.tags.length) console.log(chalk.dim(`\nTags: ${result.tags.join(', ')}`));
7507
- if (result.contributor_pseudonym) console.log(chalk.dim(`Contributor: ${result.contributor_pseudonym}`));
7508
- }
7509
- } catch (e) {
7510
- console.error(chalk.red('Get failed:'), e.message);
7511
- process.exit(1);
7512
- }
7662
+ if (opts.json || !process.stdout.isTTY) { console.log(JSON.stringify(result, null, 2)); return; }
7663
+ console.log(chalk.bold.cyan(`\nMemory: ${id}\n`));
7664
+ if (result.title) console.log(` ${chalk.bold('Title:')} ${result.title}`);
7665
+ if (result.content) console.log(` ${chalk.bold('Content:')} ${result.content}`);
7666
+ if (result.category) console.log(` ${chalk.bold('Category:')} ${result.category}`);
7667
+ console.log();
7668
+ } catch (e) { console.error(chalk.red(`Error: ${e.message}`)); process.exit(1); }
7513
7669
  });
7514
7670
 
7515
7671
  brainCmd.command('vote <id> <direction>')
7516
- .description('Vote on knowledge quality (up or down)')
7672
+ .description('Quality vote on a memory (up or down)')
7517
7673
  .option('--url <url>', 'Brain server URL')
7518
- .option('--key <key>', 'PI key')
7674
+ .option('--key <key>', 'Pi key')
7519
7675
  .action(async (id, direction, opts) => {
7520
- if (!['up', 'down'].includes(direction)) {
7521
- console.error(chalk.red('Direction must be "up" or "down"'));
7522
- process.exit(1);
7523
- }
7676
+ const piBrain = await requirePiBrain();
7677
+ const config = getBrainConfig(opts);
7524
7678
  try {
7525
- const client = await getPiBrainClient(opts);
7679
+ const client = new piBrain.PiBrainClient(config);
7526
7680
  await client.vote(id, direction);
7527
7681
  console.log(chalk.green(`Voted ${direction} on ${id}`));
7528
- } catch (e) {
7529
- console.error(chalk.red('Vote failed:'), e.message);
7530
- process.exit(1);
7531
- }
7682
+ } catch (e) { console.error(chalk.red(`Error: ${e.message}`)); process.exit(1); }
7532
7683
  });
7533
7684
 
7534
7685
  brainCmd.command('list')
@@ -7536,594 +7687,533 @@ brainCmd.command('list')
7536
7687
  .option('-c, --category <cat>', 'Filter by category')
7537
7688
  .option('-l, --limit <n>', 'Max results', '20')
7538
7689
  .option('--url <url>', 'Brain server URL')
7539
- .option('--key <key>', 'PI key')
7540
- .option('--json', 'JSON output')
7690
+ .option('--key <key>', 'Pi key')
7691
+ .option('--json', 'Output as JSON')
7541
7692
  .action(async (opts) => {
7693
+ const piBrain = await requirePiBrain();
7694
+ const config = getBrainConfig(opts);
7542
7695
  try {
7543
- const client = await getPiBrainClient(opts);
7696
+ const client = new piBrain.PiBrainClient(config);
7544
7697
  const results = await client.list({ category: opts.category, limit: parseInt(opts.limit) });
7545
- const items = results.memories || results || [];
7546
- if (opts.json || !process.stdout.isTTY) {
7547
- console.log(JSON.stringify(items, null, 2));
7548
- } else {
7549
- if (items.length === 0) { console.log(chalk.yellow('No memories found.')); return; }
7550
- items.forEach((m, i) => {
7551
- const quality = m.quality_score != null ? chalk.dim(` [${(m.quality_score * 100).toFixed(0)}%]`) : '';
7552
- console.log(`${chalk.bold(i + 1 + '.')} ${m.title || m.id}${quality} ${chalk.dim(m.category || '')}`);
7553
- });
7554
- }
7555
- } catch (e) {
7556
- console.error(chalk.red('List failed:'), e.message);
7557
- process.exit(1);
7558
- }
7698
+ if (opts.json || !process.stdout.isTTY) { console.log(JSON.stringify(results, null, 2)); return; }
7699
+ console.log(chalk.bold.cyan('\nShared Brain Memories\n'));
7700
+ if (!results.length) { console.log(chalk.dim(' No memories found.\n')); return; }
7701
+ results.forEach((r, i) => {
7702
+ console.log(` ${chalk.yellow(i + 1 + '.')} ${chalk.bold(r.title || r.id)} ${chalk.dim(`[${r.category || 'unknown'}]`)}`);
7703
+ });
7704
+ console.log();
7705
+ } catch (e) { console.error(chalk.red(`Error: ${e.message}`)); process.exit(1); }
7559
7706
  });
7560
7707
 
7561
7708
  brainCmd.command('delete <id>')
7562
7709
  .description('Delete your own contribution')
7563
7710
  .option('--url <url>', 'Brain server URL')
7564
- .option('--key <key>', 'PI key')
7711
+ .option('--key <key>', 'Pi key')
7565
7712
  .action(async (id, opts) => {
7713
+ const piBrain = await requirePiBrain();
7714
+ const config = getBrainConfig(opts);
7566
7715
  try {
7567
- const client = await getPiBrainClient(opts);
7716
+ const client = new piBrain.PiBrainClient(config);
7568
7717
  await client.delete(id);
7569
- console.log(chalk.green(`Deleted ${id}`));
7570
- } catch (e) {
7571
- console.error(chalk.red('Delete failed:'), e.message);
7572
- process.exit(1);
7573
- }
7718
+ console.log(chalk.green(`Deleted: ${id}`));
7719
+ } catch (e) { console.error(chalk.red(`Error: ${e.message}`)); process.exit(1); }
7574
7720
  });
7575
7721
 
7576
7722
  brainCmd.command('status')
7577
- .description('Show brain system health and statistics')
7723
+ .description('Show shared brain system health')
7578
7724
  .option('--url <url>', 'Brain server URL')
7579
- .option('--key <key>', 'PI key')
7580
- .option('--json', 'JSON output')
7725
+ .option('--key <key>', 'Pi key')
7726
+ .option('--json', 'Output as JSON')
7581
7727
  .action(async (opts) => {
7728
+ const piBrain = await requirePiBrain();
7729
+ const config = getBrainConfig(opts);
7582
7730
  try {
7583
- const client = await getPiBrainClient(opts);
7731
+ const client = new piBrain.PiBrainClient(config);
7584
7732
  const status = await client.status();
7585
- if (opts.json || !process.stdout.isTTY) {
7586
- console.log(JSON.stringify(status, null, 2));
7587
- } else {
7588
- console.log(chalk.bold.cyan('\nBrain Status'));
7589
- console.log(chalk.dim('-'.repeat(40)));
7590
- Object.entries(status).forEach(([k, v]) => {
7591
- console.log(` ${chalk.bold(k)}: ${typeof v === 'object' ? JSON.stringify(v) : v}`);
7592
- });
7593
- console.log();
7594
- }
7595
- } catch (e) {
7596
- console.error(chalk.red('Status failed:'), e.message);
7597
- process.exit(1);
7598
- }
7733
+ if (opts.json || !process.stdout.isTTY) { console.log(JSON.stringify(status, null, 2)); return; }
7734
+ console.log(chalk.bold.cyan('\nBrain Status\n'));
7735
+ Object.entries(status).forEach(([k, v]) => {
7736
+ console.log(` ${chalk.bold(k + ':')} ${v}`);
7737
+ });
7738
+ console.log();
7739
+ } catch (e) { console.error(chalk.red(`Error: ${e.message}`)); process.exit(1); }
7599
7740
  });
7600
7741
 
7601
7742
  brainCmd.command('drift')
7602
- .description('Check knowledge drift between local and shared')
7743
+ .description('Check if shared knowledge has drifted')
7603
7744
  .option('-d, --domain <domain>', 'Domain to check')
7604
7745
  .option('--url <url>', 'Brain server URL')
7605
- .option('--key <key>', 'PI key')
7606
- .option('--json', 'JSON output')
7746
+ .option('--key <key>', 'Pi key')
7747
+ .option('--json', 'Output as JSON')
7607
7748
  .action(async (opts) => {
7749
+ const piBrain = await requirePiBrain();
7750
+ const config = getBrainConfig(opts);
7608
7751
  try {
7609
- const client = await getPiBrainClient(opts);
7752
+ const client = new piBrain.PiBrainClient(config);
7610
7753
  const report = await client.drift({ domain: opts.domain });
7611
- if (opts.json || !process.stdout.isTTY) {
7612
- console.log(JSON.stringify(report, null, 2));
7613
- } else {
7614
- console.log(chalk.bold.cyan('\nDrift Report'));
7615
- console.log(chalk.dim('-'.repeat(40)));
7616
- console.log(` CV: ${report.cv || 'N/A'}`);
7617
- console.log(` Drifting: ${report.is_drifting ? chalk.red('YES') : chalk.green('NO')}`);
7618
- if (report.suggested_action) console.log(` Action: ${report.suggested_action}`);
7619
- console.log();
7620
- }
7621
- } catch (e) {
7622
- console.error(chalk.red('Drift check failed:'), e.message);
7623
- process.exit(1);
7624
- }
7754
+ if (opts.json || !process.stdout.isTTY) { console.log(JSON.stringify(report, null, 2)); return; }
7755
+ console.log(chalk.bold.cyan('\nDrift Report\n'));
7756
+ console.log(` ${chalk.bold('Drifting:')} ${report.is_drifting ? chalk.red('Yes') : chalk.green('No')}`);
7757
+ if (report.cv) console.log(` ${chalk.bold('CV:')} ${report.cv}`);
7758
+ console.log();
7759
+ } catch (e) { console.error(chalk.red(`Error: ${e.message}`)); process.exit(1); }
7625
7760
  });
7626
7761
 
7627
7762
  brainCmd.command('partition')
7628
- .description('Get knowledge topology via mincut partitioning')
7763
+ .description('Get knowledge partitioned by mincut topology')
7629
7764
  .option('-d, --domain <domain>', 'Domain to partition')
7630
- .option('--min-cluster <n>', 'Minimum cluster size', '3')
7765
+ .option('--min-size <n>', 'Minimum cluster size', '3')
7631
7766
  .option('--url <url>', 'Brain server URL')
7632
- .option('--key <key>', 'PI key')
7633
- .option('--json', 'JSON output')
7767
+ .option('--key <key>', 'Pi key')
7768
+ .option('--json', 'Output as JSON')
7634
7769
  .action(async (opts) => {
7770
+ const piBrain = await requirePiBrain();
7771
+ const config = getBrainConfig(opts);
7635
7772
  try {
7636
- const client = await getPiBrainClient(opts);
7637
- const result = await client.partition({ domain: opts.domain, min_cluster_size: parseInt(opts.minCluster) });
7638
- if (opts.json || !process.stdout.isTTY) {
7639
- console.log(JSON.stringify(result, null, 2));
7640
- } else {
7641
- const clusters = result.clusters || [];
7642
- console.log(chalk.bold.cyan(`\nKnowledge Partitions: ${clusters.length} clusters`));
7643
- console.log(chalk.dim('-'.repeat(40)));
7644
- clusters.forEach((c, i) => {
7645
- console.log(` ${chalk.bold('Cluster ' + (i + 1))}: ${c.size || (c.members && c.members.length) || '?'} memories`);
7646
- if (c.label) console.log(` Label: ${c.label}`);
7647
- if (c.edge_strength != null) console.log(chalk.dim(` Edge strength: ${c.edge_strength.toFixed(3)}`));
7773
+ const client = new piBrain.PiBrainClient(config);
7774
+ const result = await client.partition({ domain: opts.domain, min_cluster_size: parseInt(opts.minSize) });
7775
+ if (opts.json || !process.stdout.isTTY) { console.log(JSON.stringify(result, null, 2)); return; }
7776
+ console.log(chalk.bold.cyan('\nKnowledge Partitions\n'));
7777
+ if (result.clusters) {
7778
+ result.clusters.forEach((c, i) => {
7779
+ console.log(` ${chalk.yellow('Cluster ' + (i + 1) + ':')} ${c.size || 'unknown'} entries`);
7648
7780
  });
7649
- console.log();
7650
7781
  }
7651
- } catch (e) {
7652
- console.error(chalk.red('Partition failed:'), e.message);
7653
- process.exit(1);
7654
- }
7782
+ console.log();
7783
+ } catch (e) { console.error(chalk.red(`Error: ${e.message}`)); process.exit(1); }
7655
7784
  });
7656
7785
 
7657
7786
  brainCmd.command('transfer <source> <target>')
7658
- .description('Transfer learned priors between domains')
7787
+ .description('Apply learned priors from one domain to another')
7659
7788
  .option('--url <url>', 'Brain server URL')
7660
- .option('--key <key>', 'PI key')
7661
- .option('--json', 'JSON output')
7789
+ .option('--key <key>', 'Pi key')
7790
+ .option('--json', 'Output as JSON')
7662
7791
  .action(async (source, target, opts) => {
7663
- const spinner = ora(`Transferring ${source} -> ${target}...`);
7664
- spinner.start();
7792
+ const piBrain = await requirePiBrain();
7793
+ const config = getBrainConfig(opts);
7665
7794
  try {
7666
- const client = await getPiBrainClient(opts);
7795
+ const client = new piBrain.PiBrainClient(config);
7667
7796
  const result = await client.transfer(source, target);
7668
- spinner.stop();
7669
- if (opts.json || !process.stdout.isTTY) {
7670
- console.log(JSON.stringify(result, null, 2));
7671
- } else {
7672
- console.log(chalk.green(`Transfer complete: ${source} -> ${target}`));
7673
- if (result.acceleration_factor) console.log(` Acceleration: ${result.acceleration_factor.toFixed(2)}x`);
7674
- if (result.improved_target != null) console.log(` Target improved: ${result.improved_target ? 'yes' : 'no'}`);
7675
- }
7676
- } catch (e) {
7677
- spinner.stop();
7678
- console.error(chalk.red('Transfer failed:'), e.message);
7679
- process.exit(1);
7680
- }
7797
+ if (opts.json || !process.stdout.isTTY) { console.log(JSON.stringify(result, null, 2)); return; }
7798
+ console.log(chalk.green(`Transfer ${source} -> ${target}: ${result.status || 'OK'}`));
7799
+ } catch (e) { console.error(chalk.red(`Error: ${e.message}`)); process.exit(1); }
7681
7800
  });
7682
7801
 
7683
7802
  brainCmd.command('sync [direction]')
7684
- .description('Sync LoRA weights (pull, push, or both)')
7803
+ .description('Synchronize LoRA weights (pull, push, or both)')
7685
7804
  .option('--url <url>', 'Brain server URL')
7686
- .option('--key <key>', 'PI key')
7805
+ .option('--key <key>', 'Pi key')
7687
7806
  .action(async (direction, opts) => {
7688
- const dir = direction || 'both';
7689
- const spinner = ora(`LoRA sync (${dir})...`);
7690
- spinner.start();
7807
+ const piBrain = await requirePiBrain();
7808
+ const config = getBrainConfig(opts);
7691
7809
  try {
7692
- const client = await getPiBrainClient(opts);
7693
- const result = await client.loraSync({ direction: dir });
7694
- spinner.succeed(chalk.green(`LoRA sync ${dir}: ${result.status || 'ok'}`));
7695
- } catch (e) {
7696
- spinner.fail(chalk.red('Sync failed: ' + e.message));
7697
- process.exit(1);
7698
- }
7810
+ const client = new piBrain.PiBrainClient(config);
7811
+ const result = await client.sync(direction || 'both');
7812
+ console.log(chalk.green(`Sync ${direction || 'both'}: ${result.status || 'OK'}`));
7813
+ } catch (e) { console.error(chalk.red(`Error: ${e.message}`)); process.exit(1); }
7699
7814
  });
7700
7815
 
7701
7816
  // ============================================================================
7702
- // Edge Commands — Distributed compute via edge-net (native fetch)
7817
+ // Edge Commands — Distributed compute via @ruvector/edge-net
7703
7818
  // ============================================================================
7704
7819
 
7705
- const EDGE_GENESIS = 'https://edge-net-genesis-875130704813.us-central1.run.app';
7706
- const EDGE_RELAY = 'https://edge-net-relay-875130704813.us-central1.run.app';
7707
- const EDGE_DASHBOARD = 'https://edge-net-dashboard-875130704813.us-central1.run.app';
7820
+ const edgeCmd = program.command('edge').description('Distributed P2P compute network — status, join, balance, tasks');
7708
7821
 
7709
- const edgeCmd = program.command('edge').description('Edge-net distributed compute network');
7822
+ const EDGE_GENESIS = 'https://edge-net-genesis-875130704813.us-central1.run.app';
7710
7823
 
7711
7824
  edgeCmd.command('status')
7712
- .description('Query network status (genesis, relay, nodes)')
7713
- .option('--json', 'JSON output')
7825
+ .description('Show edge compute network status')
7826
+ .option('--json', 'Output as JSON')
7714
7827
  .action(async (opts) => {
7715
- const spinner = ora('Querying edge network...');
7716
- spinner.start();
7717
7828
  try {
7718
- const res = await fetch(`${EDGE_GENESIS}/api/status`);
7719
- if (!res.ok) throw new Error(`HTTP ${res.status}`);
7720
- const data = await res.json();
7721
- spinner.stop();
7722
- if (opts.json || !process.stdout.isTTY) {
7723
- console.log(JSON.stringify(data, null, 2));
7724
- } else {
7725
- console.log(chalk.bold.cyan('\nEdge Network Status'));
7726
- console.log(chalk.dim('-'.repeat(40)));
7727
- if (data.nodes != null) console.log(` Nodes: ${data.nodes}`);
7728
- if (data.total_compute != null) console.log(` Compute: ${data.total_compute}`);
7729
- if (data.ruv_supply != null) console.log(` rUv Supply: ${data.ruv_supply}`);
7730
- if (data.phase) console.log(` Phase: ${data.phase}`);
7731
- console.log();
7732
- }
7733
- } catch (e) {
7734
- spinner.stop();
7735
- console.error(chalk.red('Edge status failed:'), e.message);
7736
- console.error(chalk.dim(' Genesis URL: ' + EDGE_GENESIS));
7737
- }
7829
+ const resp = await fetch(`${EDGE_GENESIS}/status`);
7830
+ const data = await resp.json();
7831
+ if (opts.json || !process.stdout.isTTY) { console.log(JSON.stringify(data, null, 2)); return; }
7832
+ console.log(chalk.bold.cyan('\nEdge Network Status\n'));
7833
+ Object.entries(data).forEach(([k, v]) => console.log(` ${chalk.bold(k + ':')} ${v}`));
7834
+ console.log();
7835
+ } catch (e) { console.error(chalk.red(`Error: ${e.message}`)); }
7738
7836
  });
7739
7837
 
7740
7838
  edgeCmd.command('join')
7741
- .description('Join as a compute node')
7742
- .option('--contribution <n>', 'Contribution factor (0.0 - 1.0)', '0.3')
7743
- .option('--key <key>', 'PI key')
7839
+ .description('Join the edge compute network as a compute node')
7840
+ .option('--contribution <level>', 'Contribution level 0.0-1.0', '0.3')
7744
7841
  .action(async (opts) => {
7745
- const key = opts.key || process.env.PI || '';
7746
- if (!key) {
7747
- console.error(chalk.red('PI key required. Set PI env var or use --key'));
7748
- console.error(chalk.yellow(' Generate one: npx ruvector identity generate'));
7749
- process.exit(1);
7750
- }
7751
- console.log(chalk.cyan(`Joining edge network (contribution=${opts.contribution})...`));
7752
- console.log(chalk.dim('This is a long-running process. Press Ctrl+C to leave.\n'));
7842
+ const piKey = process.env.PI;
7843
+ if (!piKey) { console.error(chalk.red('Set PI environment variable first. Run: npx ruvector identity generate')); process.exit(1); }
7753
7844
  try {
7754
- const res = await fetch(`${EDGE_RELAY}/api/join`, {
7845
+ const resp = await fetch(`${EDGE_GENESIS}/join`, {
7755
7846
  method: 'POST',
7756
- headers: { 'Content-Type': 'application/json', 'Authorization': `Bearer ${key}` },
7757
- body: JSON.stringify({ contribution: parseFloat(opts.contribution) })
7847
+ headers: { 'Content-Type': 'application/json', 'Authorization': `Bearer ${piKey}` },
7848
+ body: JSON.stringify({ contribution: parseFloat(opts.contribution), pi_key: piKey })
7758
7849
  });
7759
- if (!res.ok) throw new Error(`HTTP ${res.status}: ${await res.text()}`);
7760
- const data = await res.json();
7761
- console.log(chalk.green('Joined network.'), data.node_id ? `Node: ${data.node_id}` : '');
7762
- } catch (e) {
7763
- console.error(chalk.red('Join failed:'), e.message);
7764
- process.exit(1);
7765
- }
7850
+ const data = await resp.json();
7851
+ console.log(chalk.green(`Joined edge network: ${data.node_id || 'OK'}`));
7852
+ } catch (e) { console.error(chalk.red(`Error: ${e.message}`)); }
7766
7853
  });
7767
7854
 
7768
7855
  edgeCmd.command('balance')
7769
- .description('Check rUv balance for current identity')
7770
- .option('--key <key>', 'PI key')
7771
- .option('--json', 'JSON output')
7856
+ .description('Check rUv credit balance')
7857
+ .option('--json', 'Output as JSON')
7772
7858
  .action(async (opts) => {
7773
- const key = opts.key || process.env.PI || '';
7774
- if (!key) { console.error(chalk.red('PI key required')); process.exit(1); }
7859
+ const piKey = process.env.PI;
7860
+ if (!piKey) { console.error(chalk.red('Set PI environment variable first.')); process.exit(1); }
7775
7861
  try {
7776
- const crypto = require('crypto');
7777
- const pseudonym = crypto.createHash('shake256', { outputLength: 16 }).update(key).digest('hex');
7778
- const res = await fetch(`${EDGE_GENESIS}/api/balance/${pseudonym}`);
7779
- if (!res.ok) throw new Error(`HTTP ${res.status}`);
7780
- const data = await res.json();
7781
- if (opts.json || !process.stdout.isTTY) {
7782
- console.log(JSON.stringify(data, null, 2));
7783
- } else {
7784
- console.log(chalk.bold.cyan(`\nrUv Balance: ${data.balance != null ? data.balance : 'N/A'}`));
7785
- if (data.earned != null) console.log(chalk.dim(` Earned: ${data.earned}`));
7786
- if (data.spent != null) console.log(chalk.dim(` Spent: ${data.spent}`));
7787
- console.log();
7788
- }
7789
- } catch (e) {
7790
- console.error(chalk.red('Balance check failed:'), e.message);
7791
- }
7862
+ const resp = await fetch(`${EDGE_GENESIS}/balance/${piKey}`, { headers: { 'Authorization': `Bearer ${piKey}` } });
7863
+ const data = await resp.json();
7864
+ if (opts.json || !process.stdout.isTTY) { console.log(JSON.stringify(data, null, 2)); return; }
7865
+ console.log(chalk.bold.cyan('\nrUv Balance\n'));
7866
+ console.log(` ${chalk.bold('Balance:')} ${data.balance || 0} rUv`);
7867
+ console.log();
7868
+ } catch (e) { console.error(chalk.red(`Error: ${e.message}`)); }
7792
7869
  });
7793
7870
 
7794
7871
  edgeCmd.command('tasks')
7795
7872
  .description('List available distributed compute tasks')
7796
- .option('--json', 'JSON output')
7873
+ .option('-l, --limit <n>', 'Max tasks', '20')
7874
+ .option('--json', 'Output as JSON')
7797
7875
  .action(async (opts) => {
7798
7876
  try {
7799
- const res = await fetch(`${EDGE_GENESIS}/api/tasks`);
7800
- if (!res.ok) throw new Error(`HTTP ${res.status}`);
7801
- const data = await res.json();
7802
- const tasks = data.tasks || data || [];
7803
- if (opts.json || !process.stdout.isTTY) {
7804
- console.log(JSON.stringify(tasks, null, 2));
7805
- } else {
7806
- if (tasks.length === 0) { console.log(chalk.yellow('No tasks available.')); return; }
7807
- console.log(chalk.bold.cyan(`\n${tasks.length} available tasks\n`));
7808
- tasks.forEach((t, i) => {
7809
- console.log(` ${chalk.bold(i + 1 + '.')} ${t.type || t.id} -- ${t.reward || '?'} rUv`);
7810
- });
7811
- console.log();
7812
- }
7813
- } catch (e) {
7814
- console.error(chalk.red('Tasks query failed:'), e.message);
7815
- }
7877
+ const resp = await fetch(`${EDGE_GENESIS}/tasks?limit=${opts.limit}`);
7878
+ const data = await resp.json();
7879
+ if (opts.json || !process.stdout.isTTY) { console.log(JSON.stringify(data, null, 2)); return; }
7880
+ console.log(chalk.bold.cyan('\nEdge Compute Tasks\n'));
7881
+ const tasks = Array.isArray(data) ? data : data.tasks || [];
7882
+ if (!tasks.length) { console.log(chalk.dim(' No tasks available.\n')); return; }
7883
+ tasks.forEach((t, i) => console.log(` ${chalk.yellow(i + 1 + '.')} ${t.name || t.id} ${chalk.dim(`[${t.status || 'pending'}]`)}`));
7884
+ console.log();
7885
+ } catch (e) { console.error(chalk.red(`Error: ${e.message}`)); }
7816
7886
  });
7817
7887
 
7818
7888
  edgeCmd.command('dashboard')
7819
7889
  .description('Open edge-net dashboard in browser')
7820
7890
  .action(() => {
7821
- const url = EDGE_DASHBOARD;
7822
- console.log(chalk.cyan(`Opening: ${url}`));
7823
- const { exec } = require('child_process');
7824
- const cmd = process.platform === 'darwin' ? 'open' : process.platform === 'win32' ? 'start' : 'xdg-open';
7825
- exec(`${cmd} ${url}`, () => {});
7891
+ const url = 'https://edge-net-dashboard-875130704813.us-central1.run.app';
7892
+ console.log(chalk.cyan(`Dashboard: ${url}`));
7893
+ try {
7894
+ const { execSync } = require('child_process');
7895
+ const cmd = process.platform === 'darwin' ? 'open' : process.platform === 'win32' ? 'start' : 'xdg-open';
7896
+ execSync(`${cmd} ${url}`, { stdio: 'ignore' });
7897
+ } catch { console.log(chalk.dim(' Open the URL above in your browser.')); }
7826
7898
  });
7827
7899
 
7828
7900
  // ============================================================================
7829
- // Identity Commands — pi key management (Node.js crypto)
7901
+ // Identity Commands — Pi key management
7830
7902
  // ============================================================================
7831
7903
 
7832
- const identityCmd = program.command('identity').description('Pi key management for brain, edge, and MCP identity');
7904
+ const identityCmd = program.command('identity').description('Pi key identity management generate, show, export, import');
7833
7905
 
7834
7906
  identityCmd.command('generate')
7835
7907
  .description('Generate a new pi key')
7836
7908
  .action(() => {
7837
7909
  const crypto = require('crypto');
7838
7910
  const key = crypto.randomBytes(32).toString('hex');
7839
- const pseudonym = crypto.createHash('shake256', { outputLength: 16 }).update(key).digest('hex');
7840
- console.log(chalk.bold.cyan('\nNew Pi Key Generated\n'));
7841
- console.log(chalk.bold('Key: ') + chalk.yellow(key));
7842
- console.log(chalk.bold('Pseudonym: ') + chalk.green(pseudonym));
7843
- console.log(chalk.dim('\nStore this key securely. Set it as:'));
7844
- console.log(chalk.dim(' export PI=' + key));
7845
- console.log(chalk.dim(' # or add to .env file'));
7911
+ const hash = crypto.createHash('shake256', { outputLength: 16 });
7912
+ hash.update(key);
7913
+ const pseudonym = hash.digest('hex');
7914
+ console.log(chalk.bold.cyan('\nNew Pi Identity Generated\n'));
7915
+ console.log(` ${chalk.bold('Pi Key:')} ${chalk.yellow(key)}`);
7916
+ console.log(` ${chalk.bold('Pseudonym:')} ${chalk.green(pseudonym)}`);
7846
7917
  console.log();
7918
+ console.log(chalk.dim(' Store securely. Set PI env var to use:'));
7919
+ console.log(chalk.cyan(` export PI=${key}\n`));
7847
7920
  });
7848
7921
 
7849
7922
  identityCmd.command('show')
7850
- .description('Display current identity derived from PI key')
7851
- .option('--key <key>', 'PI key (default: PI env var)')
7923
+ .description('Show current pi key pseudonym and derived identities')
7924
+ .option('--json', 'Output as JSON')
7852
7925
  .action((opts) => {
7853
- const key = opts.key || process.env.PI || '';
7854
- if (!key) {
7855
- console.error(chalk.red('No PI key found. Set PI env var or use --key'));
7856
- console.error(chalk.yellow(' Generate one: npx ruvector identity generate'));
7926
+ const piKey = process.env.PI;
7927
+ if (!piKey) {
7928
+ console.error(chalk.red('No PI environment variable set.'));
7929
+ console.error(chalk.yellow(' Run: npx ruvector identity generate'));
7857
7930
  process.exit(1);
7858
7931
  }
7859
7932
  const crypto = require('crypto');
7860
- const pseudonym = crypto.createHash('shake256', { outputLength: 16 }).update(key).digest('hex');
7861
- const mcpToken = crypto.createHmac('sha256', key).update('mcp').digest('hex').slice(0, 32);
7933
+ const hash = crypto.createHash('shake256', { outputLength: 16 });
7934
+ hash.update(piKey);
7935
+ const pseudonym = hash.digest('hex');
7936
+ const mcpToken = crypto.createHmac('sha256', piKey).update('mcp').digest('hex').slice(0, 32);
7937
+ if (opts.json || !process.stdout.isTTY) {
7938
+ console.log(JSON.stringify({ pseudonym, mcp_token: mcpToken, key_prefix: piKey.slice(0, 8) + '...' }, null, 2));
7939
+ return;
7940
+ }
7862
7941
  console.log(chalk.bold.cyan('\nPi Identity\n'));
7863
- console.log(chalk.bold('Brain Pseudonym: ') + chalk.green(pseudonym));
7864
- console.log(chalk.bold('MCP Token: ') + chalk.dim(mcpToken));
7865
- console.log(chalk.bold('Key (first 8): ') + chalk.dim(key.slice(0, 8) + '...'));
7942
+ console.log(` ${chalk.bold('Key:')} ${piKey.slice(0, 8)}...${piKey.slice(-8)}`);
7943
+ console.log(` ${chalk.bold('Pseudonym:')} ${chalk.green(pseudonym)}`);
7944
+ console.log(` ${chalk.bold('MCP Token:')} ${chalk.dim(mcpToken)}`);
7866
7945
  console.log();
7867
7946
  });
7868
7947
 
7869
- identityCmd.command('export')
7870
- .description('Export key to encrypted file')
7871
- .option('-o, --output <path>', 'Output file', 'pi-key.enc')
7872
- .option('--key <key>', 'PI key')
7873
- .action((opts) => {
7874
- const key = opts.key || process.env.PI || '';
7875
- if (!key) { console.error(chalk.red('No PI key found.')); process.exit(1); }
7948
+ identityCmd.command('export <file>')
7949
+ .description('Export pi key to encrypted file')
7950
+ .action((file) => {
7951
+ const piKey = process.env.PI;
7952
+ if (!piKey) { console.error(chalk.red('No PI environment variable set.')); process.exit(1); }
7876
7953
  const crypto = require('crypto');
7877
- const password = crypto.randomBytes(16).toString('hex');
7954
+ const passphrase = crypto.randomBytes(16).toString('hex');
7955
+ const key = crypto.scryptSync(passphrase, 'ruvector-pi', 32);
7878
7956
  const iv = crypto.randomBytes(16);
7879
- const cipher = crypto.createCipheriv('aes-256-gcm', crypto.scryptSync(password, 'ruvector', 32), iv);
7880
- let encrypted = cipher.update(key, 'utf8', 'hex');
7957
+ const cipher = crypto.createCipheriv('aes-256-gcm', key, iv);
7958
+ let encrypted = cipher.update(piKey, 'utf8', 'hex');
7881
7959
  encrypted += cipher.final('hex');
7882
- const tag = cipher.getAuthTag().toString('hex');
7883
- const data = JSON.stringify({ iv: iv.toString('hex'), tag, data: encrypted, v: 1 });
7884
- fs.writeFileSync(opts.output, data);
7885
- console.log(chalk.green(`Key exported to ${opts.output}`));
7886
- console.log(chalk.bold('Passphrase: ') + chalk.yellow(password));
7887
- console.log(chalk.dim('Store this passphrase separately from the export file.'));
7960
+ const tag = cipher.getAuthTag();
7961
+ const data = { iv: iv.toString('hex'), tag: tag.toString('hex'), data: encrypted };
7962
+ fs.writeFileSync(file, JSON.stringify(data));
7963
+ console.log(chalk.green(`Exported to ${file}`));
7964
+ console.log(chalk.bold(`Passphrase: ${chalk.yellow(passphrase)}`));
7965
+ console.log(chalk.dim(' Store passphrase separately from the export file.\n'));
7888
7966
  });
7889
7967
 
7890
7968
  identityCmd.command('import <file>')
7891
- .description('Import key from encrypted backup')
7892
- .option('-p, --passphrase <pass>', 'Decryption passphrase')
7969
+ .description('Import pi key from encrypted backup')
7970
+ .requiredOption('-p, --passphrase <pass>', 'Decryption passphrase')
7893
7971
  .action((file, opts) => {
7894
- if (!opts.passphrase) { console.error(chalk.red('Passphrase required (--passphrase)')); process.exit(1); }
7895
7972
  try {
7896
7973
  const crypto = require('crypto');
7897
7974
  const raw = JSON.parse(fs.readFileSync(file, 'utf8'));
7898
- const decipher = crypto.createDecipheriv('aes-256-gcm', crypto.scryptSync(opts.passphrase, 'ruvector', 32), Buffer.from(raw.iv, 'hex'));
7975
+ const key = crypto.scryptSync(opts.passphrase, 'ruvector-pi', 32);
7976
+ const decipher = crypto.createDecipheriv('aes-256-gcm', key, Buffer.from(raw.iv, 'hex'));
7899
7977
  decipher.setAuthTag(Buffer.from(raw.tag, 'hex'));
7900
- let key = decipher.update(raw.data, 'hex', 'utf8');
7901
- key += decipher.final('utf8');
7902
- const pseudonym = crypto.createHash('shake256', { outputLength: 16 }).update(key).digest('hex');
7978
+ let decrypted = decipher.update(raw.data, 'hex', 'utf8');
7979
+ decrypted += decipher.final('utf8');
7903
7980
  console.log(chalk.green('Key imported successfully.'));
7904
- console.log(chalk.bold('Pseudonym: ') + chalk.green(pseudonym));
7905
- console.log(chalk.dim(`\nSet it: export PI=${key}`));
7906
- } catch (e) {
7907
- console.error(chalk.red('Import failed:'), e.message);
7908
- process.exit(1);
7909
- }
7981
+ console.log(chalk.cyan(` export PI=${decrypted}\n`));
7982
+ } catch (e) { console.error(chalk.red(`Error: ${e.message}`)); process.exit(1); }
7910
7983
  });
7911
7984
 
7912
7985
  // ============================================================================
7913
- // LLM Commands — LLM orchestration via @ruvector/ruvllm (lazy-loaded)
7986
+ // LLM Commands — Text embeddings via @ruvector/ruvllm (lazy-loaded)
7914
7987
  // ============================================================================
7915
7988
 
7916
- const llmCmd = program.command('llm').description('LLM orchestration embeddings, models, benchmarks');
7989
+ const llmCmd = program.command('llm').description('LLM embeddings and inference via @ruvector/ruvllm');
7990
+
7991
+ function requireRuvllm() {
7992
+ try { return require('@ruvector/ruvllm'); } catch {
7993
+ console.error(chalk.red('LLM commands require @ruvector/ruvllm'));
7994
+ console.error(chalk.yellow(' npm install @ruvector/ruvllm'));
7995
+ process.exit(1);
7996
+ }
7997
+ }
7917
7998
 
7918
7999
  llmCmd.command('embed <text>')
7919
- .description('Generate embeddings via ruvllm')
7920
- .option('-m, --model <model>', 'Model name', 'default')
7921
- .option('--json', 'JSON output')
7922
- .action(async (text, opts) => {
8000
+ .description('Generate text embeddings')
8001
+ .option('-m, --model <model>', 'Model name')
8002
+ .option('--json', 'Output as JSON')
8003
+ .action((text, opts) => {
8004
+ const ruvllm = requireRuvllm();
7923
8005
  try {
7924
- const ruvllm = require('@ruvector/ruvllm');
7925
- const embed = ruvllm.embed || (ruvllm.default && ruvllm.default.embed);
7926
- if (!embed) throw new Error('ruvllm.embed not found');
7927
- const result = await embed(text, { model: opts.model });
7928
- if (opts.json || !process.stdout.isTTY) {
7929
- console.log(JSON.stringify(result));
7930
- } else {
7931
- const vec = result.embedding || result;
7932
- console.log(chalk.cyan(`Embedding (dim=${Array.isArray(vec) ? vec.length : '?'}):`));
7933
- if (Array.isArray(vec)) console.log(chalk.dim(` [${vec.slice(0, 8).map(v => v.toFixed(4)).join(', ')}${vec.length > 8 ? ', ...' : ''}]`));
7934
- }
7935
- } catch (e) {
7936
- if (e.code === 'MODULE_NOT_FOUND') {
7937
- console.error(chalk.red('LLM commands require @ruvector/ruvllm'));
7938
- console.error(chalk.yellow(' npm install @ruvector/ruvllm'));
7939
- } else {
7940
- console.error(chalk.red('Embed failed:'), e.message);
7941
- }
7942
- process.exit(1);
7943
- }
8006
+ const embedding = ruvllm.embed ? ruvllm.embed(text, opts.model) : ruvllm.generateEmbedding(text);
8007
+ if (opts.json || !process.stdout.isTTY) { console.log(JSON.stringify({ embedding, dimension: embedding.length })); return; }
8008
+ console.log(chalk.bold.cyan('\nEmbedding Generated\n'));
8009
+ console.log(` ${chalk.bold('Dimension:')} ${embedding.length}`);
8010
+ console.log(` ${chalk.bold('Preview:')} [${embedding.slice(0, 5).map(v => v.toFixed(4)).join(', ')}...]`);
8011
+ console.log();
8012
+ } catch (e) { console.error(chalk.red(`Error: ${e.message}`)); process.exit(1); }
7944
8013
  });
7945
8014
 
7946
8015
  llmCmd.command('models')
7947
8016
  .description('List available LLM models')
7948
- .action(async () => {
8017
+ .action(() => {
8018
+ const ruvllm = requireRuvllm();
7949
8019
  try {
7950
- const ruvllm = require('@ruvector/ruvllm');
7951
- const models = ruvllm.listModels ? await ruvllm.listModels() : [];
7952
- if (models.length === 0) { console.log(chalk.yellow('No models found.')); return; }
7953
- console.log(chalk.bold.cyan('\nAvailable Models\n'));
7954
- models.forEach(m => {
7955
- console.log(` ${chalk.bold(m.name || m.id)} ${chalk.dim(m.provider || '')} ${chalk.dim(m.size || '')}`);
7956
- });
7957
- console.log();
7958
- } catch (e) {
7959
- if (e.code === 'MODULE_NOT_FOUND') {
7960
- console.error(chalk.red('Requires @ruvector/ruvllm: npm install @ruvector/ruvllm'));
8020
+ if (typeof ruvllm.listModels === 'function') {
8021
+ const models = ruvllm.listModels();
8022
+ models.forEach(m => console.log(` ${chalk.green(m.name || m)} ${chalk.dim(m.description || '')}`));
7961
8023
  } else {
7962
- console.error(chalk.red('Failed:'), e.message);
8024
+ console.log(chalk.dim(' Model listing requires @ruvector/ruvllm >=2.1.0'));
8025
+ console.log(chalk.dim(' Available: MiniLM-L6 (default embedding model)'));
7963
8026
  }
7964
- process.exit(1);
7965
- }
8027
+ } catch (e) { console.error(chalk.red(`Error: ${e.message}`)); }
7966
8028
  });
7967
8029
 
7968
8030
  llmCmd.command('benchmark')
7969
8031
  .description('Benchmark LLM inference performance')
7970
- .option('-n, --iterations <n>', 'Iterations', '100')
7971
- .option('-m, --model <model>', 'Model name', 'default')
7972
- .action(async (opts) => {
7973
- const spinner = ora('Running LLM benchmark...');
7974
- spinner.start();
7975
- try {
7976
- const ruvllm = require('@ruvector/ruvllm');
7977
- const embed = ruvllm.embed || (ruvllm.default && ruvllm.default.embed);
7978
- if (!embed) throw new Error('ruvllm.embed not found');
7979
- const n = parseInt(opts.iterations);
8032
+ .option('-n, --iterations <n>', 'Number of iterations', '100')
8033
+ .action((opts) => {
8034
+ const ruvllm = requireRuvllm();
8035
+ const n = parseInt(opts.iterations);
8036
+ const text = 'The quick brown fox jumps over the lazy dog';
8037
+ const times = [];
8038
+ for (let i = 0; i < n; i++) {
7980
8039
  const start = performance.now();
7981
- for (let i = 0; i < n; i++) await embed(`benchmark text ${i}`, { model: opts.model });
7982
- const elapsed = performance.now() - start;
7983
- spinner.stop();
7984
- console.log(chalk.bold.cyan('\nLLM Benchmark Results'));
7985
- console.log(chalk.dim('-'.repeat(40)));
7986
- console.log(` Iterations: ${n}`);
7987
- console.log(` Total: ${(elapsed / 1000).toFixed(2)}s`);
7988
- console.log(` Avg: ${(elapsed / n).toFixed(2)}ms/embed`);
7989
- console.log(` Throughput: ${(n / (elapsed / 1000)).toFixed(1)} embeds/s`);
7990
- console.log();
7991
- } catch (e) {
7992
- spinner.stop();
7993
- if (e.code === 'MODULE_NOT_FOUND') {
7994
- console.error(chalk.red('Requires @ruvector/ruvllm'));
7995
- } else {
7996
- console.error(chalk.red('Benchmark failed:'), e.message);
7997
- }
7998
- process.exit(1);
7999
- }
8040
+ ruvllm.embed ? ruvllm.embed(text) : ruvllm.generateEmbedding(text);
8041
+ times.push(performance.now() - start);
8042
+ }
8043
+ times.sort((a, b) => a - b);
8044
+ console.log(chalk.bold.cyan('\nLLM Benchmark\n'));
8045
+ console.log(` ${chalk.bold('Iterations:')} ${n}`);
8046
+ console.log(` ${chalk.bold('P50:')} ${times[Math.floor(n * 0.5)].toFixed(2)}ms`);
8047
+ console.log(` ${chalk.bold('P95:')} ${times[Math.floor(n * 0.95)].toFixed(2)}ms`);
8048
+ console.log(` ${chalk.bold('P99:')} ${times[Math.floor(n * 0.99)].toFixed(2)}ms`);
8049
+ console.log(` ${chalk.bold('Mean:')} ${(times.reduce((a, b) => a + b, 0) / n).toFixed(2)}ms`);
8050
+ console.log();
8051
+ });
8052
+
8053
+ llmCmd.command('info')
8054
+ .description('Show RuvLLM module information')
8055
+ .action(() => {
8056
+ const ruvllm = requireRuvllm();
8057
+ console.log(chalk.bold.cyan('\nRuvLLM Info\n'));
8058
+ console.log(` ${chalk.bold('Version:')} ${typeof ruvllm.version === 'function' ? ruvllm.version() : ruvllm.version || 'unknown'}`);
8059
+ console.log(` ${chalk.bold('SIMD:')} ${ruvllm.simdEnabled ? 'enabled' : 'not detected'}`);
8060
+ console.log();
8000
8061
  });
8001
8062
 
8002
8063
  // ============================================================================
8003
- // SONA Commands — Self-Optimizing Neural Architecture
8064
+ // SONA Commands — Self-Optimizing Neural Architecture (lazy-loaded)
8004
8065
  // ============================================================================
8005
8066
 
8006
- const sonaCmd = program.command('sona').description('SONA self-optimizing neural architecture');
8067
+ const sonaCmd = program.command('sona').description('SONA adaptive learning — status, patterns, train, export');
8068
+
8069
+ function loadSona() {
8070
+ try { return require('@ruvector/sona'); } catch {
8071
+ console.error(chalk.red('SONA commands require @ruvector/sona'));
8072
+ console.error(chalk.yellow(' npm install @ruvector/sona'));
8073
+ process.exit(1);
8074
+ }
8075
+ }
8007
8076
 
8008
8077
  sonaCmd.command('status')
8009
- .description('Show SONA engine status')
8010
- .option('--json', 'JSON output')
8011
- .action(async (opts) => {
8078
+ .description('Show SONA learning engine status')
8079
+ .option('--json', 'Output as JSON')
8080
+ .action((opts) => {
8081
+ const sona = loadSona();
8012
8082
  try {
8013
- const sona = require('@ruvector/sona');
8014
- const engine = sona.SonaEngine ? new sona.SonaEngine() : sona;
8015
- const status = engine.status ? await engine.status() : { state: 'loaded' };
8016
- if (opts.json || !process.stdout.isTTY) {
8017
- console.log(JSON.stringify(status, null, 2));
8018
- } else {
8019
- console.log(chalk.bold.cyan('\nSONA Engine Status'));
8020
- console.log(chalk.dim('-'.repeat(40)));
8021
- Object.entries(status).forEach(([k, v]) => {
8022
- console.log(` ${chalk.bold(k)}: ${v}`);
8023
- });
8024
- console.log();
8025
- }
8026
- } catch (e) {
8027
- if (e.code === 'MODULE_NOT_FOUND') {
8028
- console.error(chalk.red('Requires @ruvector/sona: npm install @ruvector/sona'));
8029
- } else {
8030
- console.error(chalk.red('Failed:'), e.message);
8031
- }
8032
- process.exit(1);
8033
- }
8083
+ const engine = sona.SonaEngine ? new sona.SonaEngine() : new sona.SonaCoordinator();
8084
+ const status = engine.getStatus ? engine.getStatus() : { ready: true };
8085
+ if (opts.json || !process.stdout.isTTY) { console.log(JSON.stringify(status, null, 2)); return; }
8086
+ console.log(chalk.bold.cyan('\nSONA Status\n'));
8087
+ Object.entries(status).forEach(([k, v]) => console.log(` ${chalk.bold(k + ':')} ${v}`));
8088
+ console.log();
8089
+ } catch (e) { console.error(chalk.red(`Error: ${e.message}`)); }
8034
8090
  });
8035
8091
 
8036
- sonaCmd.command('patterns')
8037
- .description('List learned SONA patterns')
8038
- .option('-l, --limit <n>', 'Max results', '20')
8039
- .option('--json', 'JSON output')
8040
- .action(async (opts) => {
8092
+ sonaCmd.command('patterns <query>')
8093
+ .description('Search learned patterns')
8094
+ .option('-t, --threshold <n>', 'Similarity threshold', '0.5')
8095
+ .option('--json', 'Output as JSON')
8096
+ .action((query, opts) => {
8097
+ const sona = loadSona();
8041
8098
  try {
8042
- const sona = require('@ruvector/sona');
8043
- const engine = sona.SonaEngine ? new sona.SonaEngine() : sona;
8044
- const patterns = engine.findPatterns ? await engine.findPatterns({ limit: parseInt(opts.limit) }) : [];
8045
- if (opts.json || !process.stdout.isTTY) {
8046
- console.log(JSON.stringify(patterns, null, 2));
8047
- } else {
8048
- if (patterns.length === 0) { console.log(chalk.yellow('No patterns found.')); return; }
8049
- console.log(chalk.bold.cyan(`\n${patterns.length} SONA Patterns\n`));
8050
- patterns.forEach((p, i) => {
8051
- console.log(` ${chalk.bold(i + 1 + '.')} ${p.name || p.type || p.id} ${chalk.dim(p.confidence ? `(${(p.confidence * 100).toFixed(0)}%)` : '')}`);
8052
- });
8053
- console.log();
8054
- }
8055
- } catch (e) {
8056
- if (e.code === 'MODULE_NOT_FOUND') {
8057
- console.error(chalk.red('Requires @ruvector/sona'));
8058
- } else {
8059
- console.error(chalk.red('Failed:'), e.message);
8060
- }
8061
- process.exit(1);
8062
- }
8099
+ const engine = sona.SonaEngine ? new sona.SonaEngine() : new sona.SonaCoordinator();
8100
+ const patterns = engine.findPatterns ? engine.findPatterns(query, { threshold: parseFloat(opts.threshold) }) : [];
8101
+ if (opts.json || !process.stdout.isTTY) { console.log(JSON.stringify(patterns, null, 2)); return; }
8102
+ console.log(chalk.bold.cyan('\nLearned Patterns\n'));
8103
+ if (!patterns.length) { console.log(chalk.dim(' No patterns found.\n')); return; }
8104
+ patterns.forEach((p, i) => console.log(` ${chalk.yellow(i + 1 + '.')} ${p.name || p.pattern || JSON.stringify(p).slice(0, 80)}`));
8105
+ console.log();
8106
+ } catch (e) { console.error(chalk.red(`Error: ${e.message}`)); }
8063
8107
  });
8064
8108
 
8065
- sonaCmd.command('train')
8066
- .description('Start SONA training trajectory')
8067
- .option('-d, --data <path>', 'Training data path')
8068
- .option('--epochs <n>', 'Epochs', '10')
8069
- .action(async (opts) => {
8070
- const spinner = ora('Training SONA...');
8071
- spinner.start();
8109
+ sonaCmd.command('train <data>')
8110
+ .description('Record a training trajectory')
8111
+ .option('--outcome <outcome>', 'Outcome (success/failure)', 'success')
8112
+ .action((data, opts) => {
8113
+ const sona = loadSona();
8072
8114
  try {
8073
- const sona = require('@ruvector/sona');
8074
- const engine = sona.SonaEngine ? new sona.SonaEngine() : sona;
8075
- if (!engine.beginTrajectory) throw new Error('SONA engine does not support training');
8076
- const result = await engine.beginTrajectory({ data: opts.data, epochs: parseInt(opts.epochs) });
8077
- spinner.succeed(chalk.green('Training complete'));
8078
- if (result) console.log(chalk.dim(JSON.stringify(result, null, 2)));
8079
- } catch (e) {
8080
- spinner.fail(chalk.red('Training failed: ' + e.message));
8081
- process.exit(1);
8082
- }
8115
+ const engine = sona.SonaEngine ? new sona.SonaEngine() : new sona.SonaCoordinator();
8116
+ if (engine.recordTrajectory) { engine.recordTrajectory(data, opts.outcome); }
8117
+ else if (engine.train) { engine.train(data); }
8118
+ console.log(chalk.green('Training trajectory recorded.'));
8119
+ } catch (e) { console.error(chalk.red(`Error: ${e.message}`)); }
8083
8120
  });
8084
8121
 
8085
8122
  sonaCmd.command('export')
8086
- .description('Export SONA model weights')
8087
- .option('-o, --output <path>', 'Output path', 'sona-weights.json')
8088
- .action(async (opts) => {
8123
+ .description('Export SONA learned weights to JSON')
8124
+ .option('-o, --output <file>', 'Output file', 'sona-weights.json')
8125
+ .action((opts) => {
8126
+ const sona = loadSona();
8089
8127
  try {
8090
- const sona = require('@ruvector/sona');
8091
- const engine = sona.SonaEngine ? new sona.SonaEngine() : sona;
8092
- const weights = engine.exportWeights ? await engine.exportWeights() : {};
8128
+ const engine = sona.SonaEngine ? new sona.SonaEngine() : new sona.SonaCoordinator();
8129
+ const weights = engine.exportWeights ? engine.exportWeights() : engine.export ? engine.export() : {};
8093
8130
  fs.writeFileSync(opts.output, JSON.stringify(weights, null, 2));
8094
8131
  console.log(chalk.green(`Exported to ${opts.output}`));
8095
- } catch (e) {
8096
- console.error(chalk.red('Export failed:'), e.message);
8097
- process.exit(1);
8098
- }
8132
+ } catch (e) { console.error(chalk.red(`Error: ${e.message}`)); }
8099
8133
  });
8100
8134
 
8101
8135
  sonaCmd.command('stats')
8102
- .description('Show SONA learning statistics')
8103
- .option('--json', 'JSON output')
8104
- .action(async (opts) => {
8136
+ .description('Show detailed SONA learning statistics')
8137
+ .option('--json', 'Output as JSON')
8138
+ .action((opts) => {
8139
+ const sona = loadSona();
8105
8140
  try {
8106
- const sona = require('@ruvector/sona');
8107
- const engine = sona.SonaEngine ? new sona.SonaEngine() : sona;
8108
- const stats = engine.stats ? await engine.stats() : { trajectories: 0, patterns: 0 };
8109
- if (opts.json || !process.stdout.isTTY) {
8110
- console.log(JSON.stringify(stats, null, 2));
8111
- } else {
8112
- console.log(chalk.bold.cyan('\nSONA Statistics'));
8113
- console.log(chalk.dim('-'.repeat(40)));
8114
- Object.entries(stats).forEach(([k, v]) => {
8115
- console.log(` ${chalk.bold(k)}: ${v}`);
8116
- });
8117
- console.log();
8118
- }
8119
- } catch (e) {
8120
- if (e.code === 'MODULE_NOT_FOUND') {
8121
- console.error(chalk.red('Requires @ruvector/sona'));
8122
- } else {
8123
- console.error(chalk.red('Failed:'), e.message);
8124
- }
8125
- process.exit(1);
8126
- }
8141
+ const engine = sona.SonaEngine ? new sona.SonaEngine() : new sona.SonaCoordinator();
8142
+ const stats = engine.getStats ? engine.getStats() : engine.stats ? engine.stats() : {};
8143
+ if (opts.json || !process.stdout.isTTY) { console.log(JSON.stringify(stats, null, 2)); return; }
8144
+ console.log(chalk.bold.cyan('\nSONA Statistics\n'));
8145
+ Object.entries(stats).forEach(([k, v]) => console.log(` ${chalk.bold(k + ':')} ${v}`));
8146
+ console.log();
8147
+ } catch (e) { console.error(chalk.red(`Error: ${e.message}`)); }
8148
+ });
8149
+
8150
+ sonaCmd.command('info')
8151
+ .description('Show SONA module availability')
8152
+ .action(() => {
8153
+ const sona = loadSona();
8154
+ console.log(chalk.bold.cyan('\nSONA Info\n'));
8155
+ console.log(` ${chalk.bold('Version:')} ${typeof sona.version === 'function' ? sona.version() : sona.version || 'unknown'}`);
8156
+ console.log(` ${chalk.bold('Engine:')} ${sona.SonaEngine ? 'Native' : 'JS Fallback'}`);
8157
+ console.log();
8158
+ });
8159
+
8160
+ // ============================================================================
8161
+ // Route Commands — Semantic routing via @ruvector/router (lazy-loaded)
8162
+ // ============================================================================
8163
+
8164
+ const routeCmd = program.command('route').description('Semantic routing — classify inputs to routes via HNSW + SIMD');
8165
+
8166
+ function requireRouter() {
8167
+ try { return require('@ruvector/router'); } catch {
8168
+ console.error(chalk.red('Route commands require @ruvector/router'));
8169
+ console.error(chalk.yellow(' npm install @ruvector/router'));
8170
+ process.exit(1);
8171
+ }
8172
+ }
8173
+
8174
+ routeCmd.command('classify <input>')
8175
+ .description('Classify input to a semantic route')
8176
+ .option('--json', 'Output as JSON')
8177
+ .action((input, opts) => {
8178
+ const router = requireRouter();
8179
+ try {
8180
+ const result = router.classify ? router.classify(input) : { route: 'default', confidence: 1.0 };
8181
+ if (opts.json || !process.stdout.isTTY) { console.log(JSON.stringify(result)); return; }
8182
+ console.log(chalk.bold.cyan('\nRoute Classification\n'));
8183
+ console.log(` ${chalk.bold('Input:')} ${input}`);
8184
+ console.log(` ${chalk.bold('Route:')} ${chalk.green(result.route)}`);
8185
+ console.log(` ${chalk.bold('Confidence:')} ${result.confidence}`);
8186
+ console.log();
8187
+ } catch (e) { console.error(chalk.red(`Error: ${e.message}`)); }
8188
+ });
8189
+
8190
+ routeCmd.command('benchmark')
8191
+ .description('Benchmark routing throughput')
8192
+ .option('-n, --iterations <n>', 'Number of iterations', '1000')
8193
+ .action((opts) => {
8194
+ const router = requireRouter();
8195
+ const n = parseInt(opts.iterations);
8196
+ const input = 'test input for routing benchmark';
8197
+ const start = performance.now();
8198
+ for (let i = 0; i < n; i++) {
8199
+ router.classify ? router.classify(input) : null;
8200
+ }
8201
+ const elapsed = performance.now() - start;
8202
+ console.log(chalk.bold.cyan('\nRoute Benchmark\n'));
8203
+ console.log(` ${chalk.bold('Iterations:')} ${n}`);
8204
+ console.log(` ${chalk.bold('Total:')} ${elapsed.toFixed(2)}ms`);
8205
+ console.log(` ${chalk.bold('Per-route:')} ${(elapsed / n).toFixed(3)}ms`);
8206
+ console.log(` ${chalk.bold('Throughput:')} ${Math.floor(n / (elapsed / 1000))}/sec`);
8207
+ console.log();
8208
+ });
8209
+
8210
+ routeCmd.command('info')
8211
+ .description('Show router module information')
8212
+ .action(() => {
8213
+ const router = requireRouter();
8214
+ console.log(chalk.bold.cyan('\nRouter Info\n'));
8215
+ console.log(` ${chalk.bold('Version:')} ${typeof router.version === 'function' ? router.version() : router.version || 'unknown'}`);
8216
+ console.log();
8127
8217
  });
8128
8218
 
8129
8219
  program.parse();