ruvector 0.1.99 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/bin/cli.js CHANGED
@@ -4,11 +4,21 @@
4
4
  process.env.RUVECTOR_CLI = '1';
5
5
 
6
6
  const { Command } = require('commander');
7
- const chalk = require('chalk');
8
- const ora = require('ora');
7
+ const _chalk = require('chalk');
8
+ const chalk = _chalk.default || _chalk;
9
9
  const fs = require('fs');
10
10
  const path = require('path');
11
11
 
12
+ // Lazy load ora (spinner) - only needed for commands with progress indicators
13
+ let _oraModule = null;
14
+ function ora(text) {
15
+ if (_oraModule === null) {
16
+ const _ora = require('ora');
17
+ _oraModule = _ora.default || _ora;
18
+ }
19
+ return _oraModule(text);
20
+ }
21
+
12
22
  // Lazy load ruvector (only when needed, not for install/help commands)
13
23
  let VectorDB, getVersion, getImplementationType;
14
24
  let ruvectorLoaded = false;
@@ -35,59 +45,79 @@ function requireRuvector() {
35
45
  }
36
46
  }
37
47
 
38
- // Import GNN (optional - graceful fallback if not available)
48
+ // Lazy load GNN (optional - loaded on first use, not at startup)
49
+ // Saves ~6ms startup time by deferring require('@ruvector/gnn')
50
+ let _gnnModule = undefined; // undefined = not yet attempted, null = failed, object = loaded
39
51
  let RuvectorLayer, TensorCompress, differentiableSearch, getCompressionLevel, hierarchicalForward;
40
52
  let gnnAvailable = false;
41
- try {
42
- const gnn = require('@ruvector/gnn');
43
- RuvectorLayer = gnn.RuvectorLayer;
44
- TensorCompress = gnn.TensorCompress;
45
- differentiableSearch = gnn.differentiableSearch;
46
- getCompressionLevel = gnn.getCompressionLevel;
47
- hierarchicalForward = gnn.hierarchicalForward;
48
- gnnAvailable = true;
49
- } catch (e) {
50
- // GNN not available - commands will show helpful message
53
+
54
+ function loadGnn() {
55
+ if (_gnnModule !== undefined) return _gnnModule;
56
+ try {
57
+ const gnn = require('@ruvector/gnn');
58
+ RuvectorLayer = gnn.RuvectorLayer;
59
+ TensorCompress = gnn.TensorCompress;
60
+ differentiableSearch = gnn.differentiableSearch;
61
+ getCompressionLevel = gnn.getCompressionLevel;
62
+ hierarchicalForward = gnn.hierarchicalForward;
63
+ _gnnModule = gnn;
64
+ gnnAvailable = true;
65
+ return gnn;
66
+ } catch (e) {
67
+ _gnnModule = null;
68
+ gnnAvailable = false;
69
+ return null;
70
+ }
51
71
  }
52
72
 
53
- // Import Attention (optional - graceful fallback if not available)
73
+ // Lazy load Attention (optional - loaded on first use, not at startup)
74
+ // Saves ~5ms startup time by deferring require('@ruvector/attention')
75
+ let _attentionModule = undefined; // undefined = not yet attempted
54
76
  let DotProductAttention, MultiHeadAttention, HyperbolicAttention, FlashAttention, LinearAttention, MoEAttention;
55
77
  let GraphRoPeAttention, EdgeFeaturedAttention, DualSpaceAttention, LocalGlobalAttention;
56
78
  let benchmarkAttention, computeAttentionAsync, batchAttentionCompute, parallelAttentionCompute;
57
79
  let expMap, logMap, mobiusAddition, poincareDistance, projectToPoincareBall;
58
80
  let attentionInfo, attentionVersion;
59
81
  let attentionAvailable = false;
60
- try {
61
- const attention = require('@ruvector/attention');
62
- // Core mechanisms
63
- DotProductAttention = attention.DotProductAttention;
64
- MultiHeadAttention = attention.MultiHeadAttention;
65
- HyperbolicAttention = attention.HyperbolicAttention;
66
- FlashAttention = attention.FlashAttention;
67
- LinearAttention = attention.LinearAttention;
68
- MoEAttention = attention.MoEAttention;
69
- // Graph attention
70
- GraphRoPeAttention = attention.GraphRoPeAttention;
71
- EdgeFeaturedAttention = attention.EdgeFeaturedAttention;
72
- DualSpaceAttention = attention.DualSpaceAttention;
73
- LocalGlobalAttention = attention.LocalGlobalAttention;
74
- // Utilities
75
- benchmarkAttention = attention.benchmarkAttention;
76
- computeAttentionAsync = attention.computeAttentionAsync;
77
- batchAttentionCompute = attention.batchAttentionCompute;
78
- parallelAttentionCompute = attention.parallelAttentionCompute;
79
- // Hyperbolic math
80
- expMap = attention.expMap;
81
- logMap = attention.logMap;
82
- mobiusAddition = attention.mobiusAddition;
83
- poincareDistance = attention.poincareDistance;
84
- projectToPoincareBall = attention.projectToPoincareBall;
85
- // Meta
86
- attentionInfo = attention.info;
87
- attentionVersion = attention.version;
88
- attentionAvailable = true;
89
- } catch (e) {
90
- // Attention not available - commands will show helpful message
82
+
83
+ function loadAttention() {
84
+ if (_attentionModule !== undefined) return _attentionModule;
85
+ try {
86
+ const attention = require('@ruvector/attention');
87
+ // Core mechanisms
88
+ DotProductAttention = attention.DotProductAttention;
89
+ MultiHeadAttention = attention.MultiHeadAttention;
90
+ HyperbolicAttention = attention.HyperbolicAttention;
91
+ FlashAttention = attention.FlashAttention;
92
+ LinearAttention = attention.LinearAttention;
93
+ MoEAttention = attention.MoEAttention;
94
+ // Graph attention
95
+ GraphRoPeAttention = attention.GraphRoPeAttention;
96
+ EdgeFeaturedAttention = attention.EdgeFeaturedAttention;
97
+ DualSpaceAttention = attention.DualSpaceAttention;
98
+ LocalGlobalAttention = attention.LocalGlobalAttention;
99
+ // Utilities
100
+ benchmarkAttention = attention.benchmarkAttention;
101
+ computeAttentionAsync = attention.computeAttentionAsync;
102
+ batchAttentionCompute = attention.batchAttentionCompute;
103
+ parallelAttentionCompute = attention.parallelAttentionCompute;
104
+ // Hyperbolic math
105
+ expMap = attention.expMap;
106
+ logMap = attention.logMap;
107
+ mobiusAddition = attention.mobiusAddition;
108
+ poincareDistance = attention.poincareDistance;
109
+ projectToPoincareBall = attention.projectToPoincareBall;
110
+ // Meta
111
+ attentionInfo = attention.info;
112
+ attentionVersion = attention.version;
113
+ _attentionModule = attention;
114
+ attentionAvailable = true;
115
+ return attention;
116
+ } catch (e) {
117
+ _attentionModule = null;
118
+ attentionAvailable = false;
119
+ return null;
120
+ }
91
121
  }
92
122
 
93
123
  const program = new Command();
@@ -354,6 +384,10 @@ program
354
384
  .command('info')
355
385
  .description('Show ruvector information')
356
386
  .action(() => {
387
+ // Trigger lazy load of optional modules for availability check
388
+ loadGnn();
389
+ loadAttention();
390
+
357
391
  console.log(chalk.cyan('\nruvector Information'));
358
392
  console.log(chalk.white(` CLI Version: ${chalk.yellow(packageJson.version)}`));
359
393
 
@@ -390,6 +424,9 @@ program
390
424
  .action(async (packages, options) => {
391
425
  const { execSync } = require('child_process');
392
426
 
427
+ // Trigger lazy load to check availability
428
+ loadGnn();
429
+
393
430
  // Available optional packages - all ruvector npm packages
394
431
  const availablePackages = {
395
432
  // Core packages
@@ -677,8 +714,9 @@ program
677
714
  // GNN Commands
678
715
  // =============================================================================
679
716
 
680
- // Helper to check GNN availability
717
+ // Helper to check GNN availability (triggers lazy load)
681
718
  function requireGnn() {
719
+ loadGnn();
682
720
  if (!gnnAvailable) {
683
721
  console.error(chalk.red('Error: GNN module not available.'));
684
722
  console.error(chalk.yellow('Install it with: npm install @ruvector/gnn'));
@@ -874,6 +912,7 @@ gnnCmd
874
912
  .command('info')
875
913
  .description('Show GNN module information')
876
914
  .action(() => {
915
+ loadGnn();
877
916
  if (!gnnAvailable) {
878
917
  console.log(chalk.yellow('\nGNN Module: Not installed'));
879
918
  console.log(chalk.white('Install with: npm install @ruvector/gnn'));
@@ -903,8 +942,9 @@ gnnCmd
903
942
  // Attention Commands
904
943
  // =============================================================================
905
944
 
906
- // Helper to require attention module
945
+ // Helper to require attention module (triggers lazy load)
907
946
  function requireAttention() {
947
+ loadAttention();
908
948
  if (!attentionAvailable) {
909
949
  console.error(chalk.red('Error: @ruvector/attention is not installed'));
910
950
  console.error(chalk.yellow('Install it with: npm install @ruvector/attention'));
@@ -1232,6 +1272,7 @@ attentionCmd
1232
1272
  .command('info')
1233
1273
  .description('Show attention module information')
1234
1274
  .action(() => {
1275
+ loadAttention();
1235
1276
  if (!attentionAvailable) {
1236
1277
  console.log(chalk.yellow('\nAttention Module: Not installed'));
1237
1278
  console.log(chalk.white('Install with: npm install @ruvector/attention'));
@@ -1277,6 +1318,7 @@ attentionCmd
1277
1318
  .description('List all available attention mechanisms')
1278
1319
  .option('-v, --verbose', 'Show detailed information')
1279
1320
  .action((options) => {
1321
+ loadAttention();
1280
1322
  console.log(chalk.cyan('\n═══════════════════════════════════════════════════════════════'));
1281
1323
  console.log(chalk.cyan(' Available Attention Mechanisms'));
1282
1324
  console.log(chalk.cyan('═══════════════════════════════════════════════════════════════\n'));
@@ -1333,6 +1375,10 @@ program
1333
1375
  .action(async (options) => {
1334
1376
  const { execSync } = require('child_process');
1335
1377
 
1378
+ // Trigger lazy load of optional modules for availability check
1379
+ loadGnn();
1380
+ loadAttention();
1381
+
1336
1382
  console.log(chalk.cyan('\n═══════════════════════════════════════════════════════════════'));
1337
1383
  console.log(chalk.cyan(' RuVector Doctor'));
1338
1384
  console.log(chalk.cyan('═══════════════════════════════════════════════════════════════\n'));
@@ -2521,6 +2567,7 @@ program
2521
2567
  }
2522
2568
 
2523
2569
  if (options.gnn) {
2570
+ loadGnn();
2524
2571
  if (!gnnAvailable) {
2525
2572
  console.log(chalk.yellow(' @ruvector/gnn not installed.'));
2526
2573
  console.log(chalk.white(' Install with: npm install @ruvector/gnn'));
@@ -7353,4 +7400,730 @@ mcpCmd.command('info')
7353
7400
  console.log();
7354
7401
  });
7355
7402
 
7403
+ // ============================================================================
7404
+ // Brain Commands — Shared intelligence via @ruvector/pi-brain (lazy-loaded)
7405
+ // ============================================================================
7406
+
7407
+ let _piBrainClient = null;
7408
+ async function getPiBrainClient(opts = {}) {
7409
+ if (_piBrainClient) return _piBrainClient;
7410
+ try {
7411
+ const piBrain = require('@ruvector/pi-brain');
7412
+ const PiBrainClient = piBrain.PiBrainClient || piBrain.default;
7413
+ const url = opts.url || process.env.BRAIN_URL || 'https://pi.ruv.io';
7414
+ const key = opts.key || process.env.PI || '';
7415
+ _piBrainClient = new PiBrainClient({ url, key });
7416
+ return _piBrainClient;
7417
+ } catch {
7418
+ console.error(chalk.red('Brain commands require @ruvector/pi-brain'));
7419
+ console.error(chalk.yellow(' npm install @ruvector/pi-brain'));
7420
+ process.exit(1);
7421
+ }
7422
+ }
7423
+
7424
+ const brainCmd = program.command('brain').description('Shared intelligence — search, share, vote on collective knowledge');
7425
+
7426
+ brainCmd.command('search <query>')
7427
+ .description('Semantic search across shared knowledge')
7428
+ .option('-l, --limit <n>', 'Max results', '10')
7429
+ .option('-c, --category <cat>', 'Filter by category')
7430
+ .option('--url <url>', 'Brain server URL')
7431
+ .option('--key <key>', 'PI key')
7432
+ .option('--json', 'JSON output')
7433
+ .action(async (query, opts) => {
7434
+ const spinner = ora(`Searching brain for "${query}"...`);
7435
+ spinner.start();
7436
+ try {
7437
+ const client = await getPiBrainClient(opts);
7438
+ const results = await client.search(query, { limit: parseInt(opts.limit), category: opts.category });
7439
+ spinner.stop();
7440
+ if (opts.json || !process.stdout.isTTY) {
7441
+ console.log(JSON.stringify(results, null, 2));
7442
+ } else {
7443
+ const items = results.memories || results.results || results || [];
7444
+ if (items.length === 0) {
7445
+ console.log(chalk.yellow('No results found.'));
7446
+ } else {
7447
+ items.forEach((m, i) => {
7448
+ console.log(chalk.bold.cyan(`${i + 1}. ${m.title || m.id}`));
7449
+ if (m.category) console.log(chalk.dim(` Category: ${m.category}`));
7450
+ if (m.quality_score != null) console.log(chalk.dim(` Quality: ${(m.quality_score * 100).toFixed(0)}%`));
7451
+ if (m.content) console.log(` ${m.content.slice(0, 120)}${m.content.length > 120 ? '...' : ''}`);
7452
+ console.log();
7453
+ });
7454
+ }
7455
+ }
7456
+ } catch (e) {
7457
+ spinner.stop();
7458
+ console.error(chalk.red('Search failed:'), e.message);
7459
+ process.exit(1);
7460
+ }
7461
+ });
7462
+
7463
+ brainCmd.command('share <title>')
7464
+ .description('Share knowledge with the collective brain')
7465
+ .option('-c, --category <cat>', 'Category (pattern, solution, architecture, convention, security, performance, tooling)', 'pattern')
7466
+ .option('-t, --tags <tags>', 'Comma-separated tags')
7467
+ .option('--content <text>', 'Content body (or pipe via stdin)')
7468
+ .option('--url <url>', 'Brain server URL')
7469
+ .option('--key <key>', 'PI key')
7470
+ .action(async (title, opts) => {
7471
+ const spinner = ora('Sharing with brain...');
7472
+ spinner.start();
7473
+ try {
7474
+ const client = await getPiBrainClient(opts);
7475
+ let content = opts.content || '';
7476
+ if (!content && !process.stdin.isTTY) {
7477
+ const chunks = [];
7478
+ for await (const chunk of process.stdin) chunks.push(chunk);
7479
+ content = Buffer.concat(chunks).toString('utf8');
7480
+ }
7481
+ if (!content) { spinner.stop(); console.error(chalk.red('Provide content via --content or stdin')); process.exit(1); }
7482
+ const tags = opts.tags ? opts.tags.split(',').map(t => t.trim()) : [];
7483
+ const result = await client.share({ title, content, category: opts.category, tags });
7484
+ spinner.succeed(chalk.green(`Shared: ${result.id || 'ok'}`));
7485
+ } catch (e) {
7486
+ spinner.fail(chalk.red('Share failed: ' + e.message));
7487
+ process.exit(1);
7488
+ }
7489
+ });
7490
+
7491
+ brainCmd.command('get <id>')
7492
+ .description('Retrieve a specific memory with provenance')
7493
+ .option('--url <url>', 'Brain server URL')
7494
+ .option('--key <key>', 'PI key')
7495
+ .option('--json', 'JSON output')
7496
+ .action(async (id, opts) => {
7497
+ try {
7498
+ const client = await getPiBrainClient(opts);
7499
+ const result = await client.get(id);
7500
+ if (opts.json || !process.stdout.isTTY) {
7501
+ console.log(JSON.stringify(result, null, 2));
7502
+ } else {
7503
+ console.log(chalk.bold.cyan(result.title || result.id));
7504
+ if (result.category) console.log(chalk.dim(`Category: ${result.category}`));
7505
+ if (result.content) console.log(`\n${result.content}`);
7506
+ if (result.tags && result.tags.length) console.log(chalk.dim(`\nTags: ${result.tags.join(', ')}`));
7507
+ if (result.contributor_pseudonym) console.log(chalk.dim(`Contributor: ${result.contributor_pseudonym}`));
7508
+ }
7509
+ } catch (e) {
7510
+ console.error(chalk.red('Get failed:'), e.message);
7511
+ process.exit(1);
7512
+ }
7513
+ });
7514
+
7515
+ brainCmd.command('vote <id> <direction>')
7516
+ .description('Vote on knowledge quality (up or down)')
7517
+ .option('--url <url>', 'Brain server URL')
7518
+ .option('--key <key>', 'PI key')
7519
+ .action(async (id, direction, opts) => {
7520
+ if (!['up', 'down'].includes(direction)) {
7521
+ console.error(chalk.red('Direction must be "up" or "down"'));
7522
+ process.exit(1);
7523
+ }
7524
+ try {
7525
+ const client = await getPiBrainClient(opts);
7526
+ await client.vote(id, direction);
7527
+ console.log(chalk.green(`Voted ${direction} on ${id}`));
7528
+ } catch (e) {
7529
+ console.error(chalk.red('Vote failed:'), e.message);
7530
+ process.exit(1);
7531
+ }
7532
+ });
7533
+
7534
+ brainCmd.command('list')
7535
+ .description('List recent shared memories')
7536
+ .option('-c, --category <cat>', 'Filter by category')
7537
+ .option('-l, --limit <n>', 'Max results', '20')
7538
+ .option('--url <url>', 'Brain server URL')
7539
+ .option('--key <key>', 'PI key')
7540
+ .option('--json', 'JSON output')
7541
+ .action(async (opts) => {
7542
+ try {
7543
+ const client = await getPiBrainClient(opts);
7544
+ const results = await client.list({ category: opts.category, limit: parseInt(opts.limit) });
7545
+ const items = results.memories || results || [];
7546
+ if (opts.json || !process.stdout.isTTY) {
7547
+ console.log(JSON.stringify(items, null, 2));
7548
+ } else {
7549
+ if (items.length === 0) { console.log(chalk.yellow('No memories found.')); return; }
7550
+ items.forEach((m, i) => {
7551
+ const quality = m.quality_score != null ? chalk.dim(` [${(m.quality_score * 100).toFixed(0)}%]`) : '';
7552
+ console.log(`${chalk.bold(i + 1 + '.')} ${m.title || m.id}${quality} ${chalk.dim(m.category || '')}`);
7553
+ });
7554
+ }
7555
+ } catch (e) {
7556
+ console.error(chalk.red('List failed:'), e.message);
7557
+ process.exit(1);
7558
+ }
7559
+ });
7560
+
7561
+ brainCmd.command('delete <id>')
7562
+ .description('Delete your own contribution')
7563
+ .option('--url <url>', 'Brain server URL')
7564
+ .option('--key <key>', 'PI key')
7565
+ .action(async (id, opts) => {
7566
+ try {
7567
+ const client = await getPiBrainClient(opts);
7568
+ await client.delete(id);
7569
+ console.log(chalk.green(`Deleted ${id}`));
7570
+ } catch (e) {
7571
+ console.error(chalk.red('Delete failed:'), e.message);
7572
+ process.exit(1);
7573
+ }
7574
+ });
7575
+
7576
+ brainCmd.command('status')
7577
+ .description('Show brain system health and statistics')
7578
+ .option('--url <url>', 'Brain server URL')
7579
+ .option('--key <key>', 'PI key')
7580
+ .option('--json', 'JSON output')
7581
+ .action(async (opts) => {
7582
+ try {
7583
+ const client = await getPiBrainClient(opts);
7584
+ const status = await client.status();
7585
+ if (opts.json || !process.stdout.isTTY) {
7586
+ console.log(JSON.stringify(status, null, 2));
7587
+ } else {
7588
+ console.log(chalk.bold.cyan('\nBrain Status'));
7589
+ console.log(chalk.dim('-'.repeat(40)));
7590
+ Object.entries(status).forEach(([k, v]) => {
7591
+ console.log(` ${chalk.bold(k)}: ${typeof v === 'object' ? JSON.stringify(v) : v}`);
7592
+ });
7593
+ console.log();
7594
+ }
7595
+ } catch (e) {
7596
+ console.error(chalk.red('Status failed:'), e.message);
7597
+ process.exit(1);
7598
+ }
7599
+ });
7600
+
7601
+ brainCmd.command('drift')
7602
+ .description('Check knowledge drift between local and shared')
7603
+ .option('-d, --domain <domain>', 'Domain to check')
7604
+ .option('--url <url>', 'Brain server URL')
7605
+ .option('--key <key>', 'PI key')
7606
+ .option('--json', 'JSON output')
7607
+ .action(async (opts) => {
7608
+ try {
7609
+ const client = await getPiBrainClient(opts);
7610
+ const report = await client.drift({ domain: opts.domain });
7611
+ if (opts.json || !process.stdout.isTTY) {
7612
+ console.log(JSON.stringify(report, null, 2));
7613
+ } else {
7614
+ console.log(chalk.bold.cyan('\nDrift Report'));
7615
+ console.log(chalk.dim('-'.repeat(40)));
7616
+ console.log(` CV: ${report.cv || 'N/A'}`);
7617
+ console.log(` Drifting: ${report.is_drifting ? chalk.red('YES') : chalk.green('NO')}`);
7618
+ if (report.suggested_action) console.log(` Action: ${report.suggested_action}`);
7619
+ console.log();
7620
+ }
7621
+ } catch (e) {
7622
+ console.error(chalk.red('Drift check failed:'), e.message);
7623
+ process.exit(1);
7624
+ }
7625
+ });
7626
+
7627
+ brainCmd.command('partition')
7628
+ .description('Get knowledge topology via mincut partitioning')
7629
+ .option('-d, --domain <domain>', 'Domain to partition')
7630
+ .option('--min-cluster <n>', 'Minimum cluster size', '3')
7631
+ .option('--url <url>', 'Brain server URL')
7632
+ .option('--key <key>', 'PI key')
7633
+ .option('--json', 'JSON output')
7634
+ .action(async (opts) => {
7635
+ try {
7636
+ const client = await getPiBrainClient(opts);
7637
+ const result = await client.partition({ domain: opts.domain, min_cluster_size: parseInt(opts.minCluster) });
7638
+ if (opts.json || !process.stdout.isTTY) {
7639
+ console.log(JSON.stringify(result, null, 2));
7640
+ } else {
7641
+ const clusters = result.clusters || [];
7642
+ console.log(chalk.bold.cyan(`\nKnowledge Partitions: ${clusters.length} clusters`));
7643
+ console.log(chalk.dim('-'.repeat(40)));
7644
+ clusters.forEach((c, i) => {
7645
+ console.log(` ${chalk.bold('Cluster ' + (i + 1))}: ${c.size || (c.members && c.members.length) || '?'} memories`);
7646
+ if (c.label) console.log(` Label: ${c.label}`);
7647
+ if (c.edge_strength != null) console.log(chalk.dim(` Edge strength: ${c.edge_strength.toFixed(3)}`));
7648
+ });
7649
+ console.log();
7650
+ }
7651
+ } catch (e) {
7652
+ console.error(chalk.red('Partition failed:'), e.message);
7653
+ process.exit(1);
7654
+ }
7655
+ });
7656
+
7657
+ brainCmd.command('transfer <source> <target>')
7658
+ .description('Transfer learned priors between domains')
7659
+ .option('--url <url>', 'Brain server URL')
7660
+ .option('--key <key>', 'PI key')
7661
+ .option('--json', 'JSON output')
7662
+ .action(async (source, target, opts) => {
7663
+ const spinner = ora(`Transferring ${source} -> ${target}...`);
7664
+ spinner.start();
7665
+ try {
7666
+ const client = await getPiBrainClient(opts);
7667
+ const result = await client.transfer(source, target);
7668
+ spinner.stop();
7669
+ if (opts.json || !process.stdout.isTTY) {
7670
+ console.log(JSON.stringify(result, null, 2));
7671
+ } else {
7672
+ console.log(chalk.green(`Transfer complete: ${source} -> ${target}`));
7673
+ if (result.acceleration_factor) console.log(` Acceleration: ${result.acceleration_factor.toFixed(2)}x`);
7674
+ if (result.improved_target != null) console.log(` Target improved: ${result.improved_target ? 'yes' : 'no'}`);
7675
+ }
7676
+ } catch (e) {
7677
+ spinner.stop();
7678
+ console.error(chalk.red('Transfer failed:'), e.message);
7679
+ process.exit(1);
7680
+ }
7681
+ });
7682
+
7683
+ brainCmd.command('sync [direction]')
7684
+ .description('Sync LoRA weights (pull, push, or both)')
7685
+ .option('--url <url>', 'Brain server URL')
7686
+ .option('--key <key>', 'PI key')
7687
+ .action(async (direction, opts) => {
7688
+ const dir = direction || 'both';
7689
+ const spinner = ora(`LoRA sync (${dir})...`);
7690
+ spinner.start();
7691
+ try {
7692
+ const client = await getPiBrainClient(opts);
7693
+ const result = await client.loraSync({ direction: dir });
7694
+ spinner.succeed(chalk.green(`LoRA sync ${dir}: ${result.status || 'ok'}`));
7695
+ } catch (e) {
7696
+ spinner.fail(chalk.red('Sync failed: ' + e.message));
7697
+ process.exit(1);
7698
+ }
7699
+ });
7700
+
7701
+ // ============================================================================
7702
+ // Edge Commands — Distributed compute via edge-net (native fetch)
7703
+ // ============================================================================
7704
+
7705
+ const EDGE_GENESIS = 'https://edge-net-genesis-875130704813.us-central1.run.app';
7706
+ const EDGE_RELAY = 'https://edge-net-relay-875130704813.us-central1.run.app';
7707
+ const EDGE_DASHBOARD = 'https://edge-net-dashboard-875130704813.us-central1.run.app';
7708
+
7709
+ const edgeCmd = program.command('edge').description('Edge-net distributed compute network');
7710
+
7711
+ edgeCmd.command('status')
7712
+ .description('Query network status (genesis, relay, nodes)')
7713
+ .option('--json', 'JSON output')
7714
+ .action(async (opts) => {
7715
+ const spinner = ora('Querying edge network...');
7716
+ spinner.start();
7717
+ try {
7718
+ const res = await fetch(`${EDGE_GENESIS}/api/status`);
7719
+ if (!res.ok) throw new Error(`HTTP ${res.status}`);
7720
+ const data = await res.json();
7721
+ spinner.stop();
7722
+ if (opts.json || !process.stdout.isTTY) {
7723
+ console.log(JSON.stringify(data, null, 2));
7724
+ } else {
7725
+ console.log(chalk.bold.cyan('\nEdge Network Status'));
7726
+ console.log(chalk.dim('-'.repeat(40)));
7727
+ if (data.nodes != null) console.log(` Nodes: ${data.nodes}`);
7728
+ if (data.total_compute != null) console.log(` Compute: ${data.total_compute}`);
7729
+ if (data.ruv_supply != null) console.log(` rUv Supply: ${data.ruv_supply}`);
7730
+ if (data.phase) console.log(` Phase: ${data.phase}`);
7731
+ console.log();
7732
+ }
7733
+ } catch (e) {
7734
+ spinner.stop();
7735
+ console.error(chalk.red('Edge status failed:'), e.message);
7736
+ console.error(chalk.dim(' Genesis URL: ' + EDGE_GENESIS));
7737
+ }
7738
+ });
7739
+
7740
+ edgeCmd.command('join')
7741
+ .description('Join as a compute node')
7742
+ .option('--contribution <n>', 'Contribution factor (0.0 - 1.0)', '0.3')
7743
+ .option('--key <key>', 'PI key')
7744
+ .action(async (opts) => {
7745
+ const key = opts.key || process.env.PI || '';
7746
+ if (!key) {
7747
+ console.error(chalk.red('PI key required. Set PI env var or use --key'));
7748
+ console.error(chalk.yellow(' Generate one: npx ruvector identity generate'));
7749
+ process.exit(1);
7750
+ }
7751
+ console.log(chalk.cyan(`Joining edge network (contribution=${opts.contribution})...`));
7752
+ console.log(chalk.dim('This is a long-running process. Press Ctrl+C to leave.\n'));
7753
+ try {
7754
+ const res = await fetch(`${EDGE_RELAY}/api/join`, {
7755
+ method: 'POST',
7756
+ headers: { 'Content-Type': 'application/json', 'Authorization': `Bearer ${key}` },
7757
+ body: JSON.stringify({ contribution: parseFloat(opts.contribution) })
7758
+ });
7759
+ if (!res.ok) throw new Error(`HTTP ${res.status}: ${await res.text()}`);
7760
+ const data = await res.json();
7761
+ console.log(chalk.green('Joined network.'), data.node_id ? `Node: ${data.node_id}` : '');
7762
+ } catch (e) {
7763
+ console.error(chalk.red('Join failed:'), e.message);
7764
+ process.exit(1);
7765
+ }
7766
+ });
7767
+
7768
+ edgeCmd.command('balance')
7769
+ .description('Check rUv balance for current identity')
7770
+ .option('--key <key>', 'PI key')
7771
+ .option('--json', 'JSON output')
7772
+ .action(async (opts) => {
7773
+ const key = opts.key || process.env.PI || '';
7774
+ if (!key) { console.error(chalk.red('PI key required')); process.exit(1); }
7775
+ try {
7776
+ const crypto = require('crypto');
7777
+ const pseudonym = crypto.createHash('shake256', { outputLength: 16 }).update(key).digest('hex');
7778
+ const res = await fetch(`${EDGE_GENESIS}/api/balance/${pseudonym}`);
7779
+ if (!res.ok) throw new Error(`HTTP ${res.status}`);
7780
+ const data = await res.json();
7781
+ if (opts.json || !process.stdout.isTTY) {
7782
+ console.log(JSON.stringify(data, null, 2));
7783
+ } else {
7784
+ console.log(chalk.bold.cyan(`\nrUv Balance: ${data.balance != null ? data.balance : 'N/A'}`));
7785
+ if (data.earned != null) console.log(chalk.dim(` Earned: ${data.earned}`));
7786
+ if (data.spent != null) console.log(chalk.dim(` Spent: ${data.spent}`));
7787
+ console.log();
7788
+ }
7789
+ } catch (e) {
7790
+ console.error(chalk.red('Balance check failed:'), e.message);
7791
+ }
7792
+ });
7793
+
7794
+ edgeCmd.command('tasks')
7795
+ .description('List available distributed compute tasks')
7796
+ .option('--json', 'JSON output')
7797
+ .action(async (opts) => {
7798
+ try {
7799
+ const res = await fetch(`${EDGE_GENESIS}/api/tasks`);
7800
+ if (!res.ok) throw new Error(`HTTP ${res.status}`);
7801
+ const data = await res.json();
7802
+ const tasks = data.tasks || data || [];
7803
+ if (opts.json || !process.stdout.isTTY) {
7804
+ console.log(JSON.stringify(tasks, null, 2));
7805
+ } else {
7806
+ if (tasks.length === 0) { console.log(chalk.yellow('No tasks available.')); return; }
7807
+ console.log(chalk.bold.cyan(`\n${tasks.length} available tasks\n`));
7808
+ tasks.forEach((t, i) => {
7809
+ console.log(` ${chalk.bold(i + 1 + '.')} ${t.type || t.id} -- ${t.reward || '?'} rUv`);
7810
+ });
7811
+ console.log();
7812
+ }
7813
+ } catch (e) {
7814
+ console.error(chalk.red('Tasks query failed:'), e.message);
7815
+ }
7816
+ });
7817
+
7818
+ edgeCmd.command('dashboard')
7819
+ .description('Open edge-net dashboard in browser')
7820
+ .action(() => {
7821
+ const url = EDGE_DASHBOARD;
7822
+ console.log(chalk.cyan(`Opening: ${url}`));
7823
+ const { exec } = require('child_process');
7824
+ const cmd = process.platform === 'darwin' ? 'open' : process.platform === 'win32' ? 'start' : 'xdg-open';
7825
+ exec(`${cmd} ${url}`, () => {});
7826
+ });
7827
+
7828
+ // ============================================================================
7829
+ // Identity Commands — pi key management (Node.js crypto)
7830
+ // ============================================================================
7831
+
7832
+ const identityCmd = program.command('identity').description('Pi key management for brain, edge, and MCP identity');
7833
+
7834
+ identityCmd.command('generate')
7835
+ .description('Generate a new pi key')
7836
+ .action(() => {
7837
+ const crypto = require('crypto');
7838
+ const key = crypto.randomBytes(32).toString('hex');
7839
+ const pseudonym = crypto.createHash('shake256', { outputLength: 16 }).update(key).digest('hex');
7840
+ console.log(chalk.bold.cyan('\nNew Pi Key Generated\n'));
7841
+ console.log(chalk.bold('Key: ') + chalk.yellow(key));
7842
+ console.log(chalk.bold('Pseudonym: ') + chalk.green(pseudonym));
7843
+ console.log(chalk.dim('\nStore this key securely. Set it as:'));
7844
+ console.log(chalk.dim(' export PI=' + key));
7845
+ console.log(chalk.dim(' # or add to .env file'));
7846
+ console.log();
7847
+ });
7848
+
7849
+ identityCmd.command('show')
7850
+ .description('Display current identity derived from PI key')
7851
+ .option('--key <key>', 'PI key (default: PI env var)')
7852
+ .action((opts) => {
7853
+ const key = opts.key || process.env.PI || '';
7854
+ if (!key) {
7855
+ console.error(chalk.red('No PI key found. Set PI env var or use --key'));
7856
+ console.error(chalk.yellow(' Generate one: npx ruvector identity generate'));
7857
+ process.exit(1);
7858
+ }
7859
+ const crypto = require('crypto');
7860
+ const pseudonym = crypto.createHash('shake256', { outputLength: 16 }).update(key).digest('hex');
7861
+ const mcpToken = crypto.createHmac('sha256', key).update('mcp').digest('hex').slice(0, 32);
7862
+ console.log(chalk.bold.cyan('\nPi Identity\n'));
7863
+ console.log(chalk.bold('Brain Pseudonym: ') + chalk.green(pseudonym));
7864
+ console.log(chalk.bold('MCP Token: ') + chalk.dim(mcpToken));
7865
+ console.log(chalk.bold('Key (first 8): ') + chalk.dim(key.slice(0, 8) + '...'));
7866
+ console.log();
7867
+ });
7868
+
7869
+ identityCmd.command('export')
7870
+ .description('Export key to encrypted file')
7871
+ .option('-o, --output <path>', 'Output file', 'pi-key.enc')
7872
+ .option('--key <key>', 'PI key')
7873
+ .action((opts) => {
7874
+ const key = opts.key || process.env.PI || '';
7875
+ if (!key) { console.error(chalk.red('No PI key found.')); process.exit(1); }
7876
+ const crypto = require('crypto');
7877
+ const password = crypto.randomBytes(16).toString('hex');
7878
+ const iv = crypto.randomBytes(16);
7879
+ const cipher = crypto.createCipheriv('aes-256-gcm', crypto.scryptSync(password, 'ruvector', 32), iv);
7880
+ let encrypted = cipher.update(key, 'utf8', 'hex');
7881
+ encrypted += cipher.final('hex');
7882
+ const tag = cipher.getAuthTag().toString('hex');
7883
+ const data = JSON.stringify({ iv: iv.toString('hex'), tag, data: encrypted, v: 1 });
7884
+ fs.writeFileSync(opts.output, data);
7885
+ console.log(chalk.green(`Key exported to ${opts.output}`));
7886
+ console.log(chalk.bold('Passphrase: ') + chalk.yellow(password));
7887
+ console.log(chalk.dim('Store this passphrase separately from the export file.'));
7888
+ });
7889
+
7890
+ identityCmd.command('import <file>')
7891
+ .description('Import key from encrypted backup')
7892
+ .option('-p, --passphrase <pass>', 'Decryption passphrase')
7893
+ .action((file, opts) => {
7894
+ if (!opts.passphrase) { console.error(chalk.red('Passphrase required (--passphrase)')); process.exit(1); }
7895
+ try {
7896
+ const crypto = require('crypto');
7897
+ const raw = JSON.parse(fs.readFileSync(file, 'utf8'));
7898
+ const decipher = crypto.createDecipheriv('aes-256-gcm', crypto.scryptSync(opts.passphrase, 'ruvector', 32), Buffer.from(raw.iv, 'hex'));
7899
+ decipher.setAuthTag(Buffer.from(raw.tag, 'hex'));
7900
+ let key = decipher.update(raw.data, 'hex', 'utf8');
7901
+ key += decipher.final('utf8');
7902
+ const pseudonym = crypto.createHash('shake256', { outputLength: 16 }).update(key).digest('hex');
7903
+ console.log(chalk.green('Key imported successfully.'));
7904
+ console.log(chalk.bold('Pseudonym: ') + chalk.green(pseudonym));
7905
+ console.log(chalk.dim(`\nSet it: export PI=${key}`));
7906
+ } catch (e) {
7907
+ console.error(chalk.red('Import failed:'), e.message);
7908
+ process.exit(1);
7909
+ }
7910
+ });
7911
+
7912
+ // ============================================================================
7913
+ // LLM Commands — LLM orchestration via @ruvector/ruvllm (lazy-loaded)
7914
+ // ============================================================================
7915
+
7916
+ const llmCmd = program.command('llm').description('LLM orchestration — embeddings, models, benchmarks');
7917
+
7918
+ llmCmd.command('embed <text>')
7919
+ .description('Generate embeddings via ruvllm')
7920
+ .option('-m, --model <model>', 'Model name', 'default')
7921
+ .option('--json', 'JSON output')
7922
+ .action(async (text, opts) => {
7923
+ try {
7924
+ const ruvllm = require('@ruvector/ruvllm');
7925
+ const embed = ruvllm.embed || (ruvllm.default && ruvllm.default.embed);
7926
+ if (!embed) throw new Error('ruvllm.embed not found');
7927
+ const result = await embed(text, { model: opts.model });
7928
+ if (opts.json || !process.stdout.isTTY) {
7929
+ console.log(JSON.stringify(result));
7930
+ } else {
7931
+ const vec = result.embedding || result;
7932
+ console.log(chalk.cyan(`Embedding (dim=${Array.isArray(vec) ? vec.length : '?'}):`));
7933
+ if (Array.isArray(vec)) console.log(chalk.dim(` [${vec.slice(0, 8).map(v => v.toFixed(4)).join(', ')}${vec.length > 8 ? ', ...' : ''}]`));
7934
+ }
7935
+ } catch (e) {
7936
+ if (e.code === 'MODULE_NOT_FOUND') {
7937
+ console.error(chalk.red('LLM commands require @ruvector/ruvllm'));
7938
+ console.error(chalk.yellow(' npm install @ruvector/ruvllm'));
7939
+ } else {
7940
+ console.error(chalk.red('Embed failed:'), e.message);
7941
+ }
7942
+ process.exit(1);
7943
+ }
7944
+ });
7945
+
7946
+ llmCmd.command('models')
7947
+ .description('List available LLM models')
7948
+ .action(async () => {
7949
+ try {
7950
+ const ruvllm = require('@ruvector/ruvllm');
7951
+ const models = ruvllm.listModels ? await ruvllm.listModels() : [];
7952
+ if (models.length === 0) { console.log(chalk.yellow('No models found.')); return; }
7953
+ console.log(chalk.bold.cyan('\nAvailable Models\n'));
7954
+ models.forEach(m => {
7955
+ console.log(` ${chalk.bold(m.name || m.id)} ${chalk.dim(m.provider || '')} ${chalk.dim(m.size || '')}`);
7956
+ });
7957
+ console.log();
7958
+ } catch (e) {
7959
+ if (e.code === 'MODULE_NOT_FOUND') {
7960
+ console.error(chalk.red('Requires @ruvector/ruvllm: npm install @ruvector/ruvllm'));
7961
+ } else {
7962
+ console.error(chalk.red('Failed:'), e.message);
7963
+ }
7964
+ process.exit(1);
7965
+ }
7966
+ });
7967
+
7968
+ llmCmd.command('benchmark')
7969
+ .description('Benchmark LLM inference performance')
7970
+ .option('-n, --iterations <n>', 'Iterations', '100')
7971
+ .option('-m, --model <model>', 'Model name', 'default')
7972
+ .action(async (opts) => {
7973
+ const spinner = ora('Running LLM benchmark...');
7974
+ spinner.start();
7975
+ try {
7976
+ const ruvllm = require('@ruvector/ruvllm');
7977
+ const embed = ruvllm.embed || (ruvllm.default && ruvllm.default.embed);
7978
+ if (!embed) throw new Error('ruvllm.embed not found');
7979
+ const n = parseInt(opts.iterations);
7980
+ const start = performance.now();
7981
+ for (let i = 0; i < n; i++) await embed(`benchmark text ${i}`, { model: opts.model });
7982
+ const elapsed = performance.now() - start;
7983
+ spinner.stop();
7984
+ console.log(chalk.bold.cyan('\nLLM Benchmark Results'));
7985
+ console.log(chalk.dim('-'.repeat(40)));
7986
+ console.log(` Iterations: ${n}`);
7987
+ console.log(` Total: ${(elapsed / 1000).toFixed(2)}s`);
7988
+ console.log(` Avg: ${(elapsed / n).toFixed(2)}ms/embed`);
7989
+ console.log(` Throughput: ${(n / (elapsed / 1000)).toFixed(1)} embeds/s`);
7990
+ console.log();
7991
+ } catch (e) {
7992
+ spinner.stop();
7993
+ if (e.code === 'MODULE_NOT_FOUND') {
7994
+ console.error(chalk.red('Requires @ruvector/ruvllm'));
7995
+ } else {
7996
+ console.error(chalk.red('Benchmark failed:'), e.message);
7997
+ }
7998
+ process.exit(1);
7999
+ }
8000
+ });
8001
+
8002
+ // ============================================================================
8003
+ // SONA Commands — Self-Optimizing Neural Architecture
8004
+ // ============================================================================
8005
+
8006
+ const sonaCmd = program.command('sona').description('SONA self-optimizing neural architecture');
8007
+
8008
+ sonaCmd.command('status')
8009
+ .description('Show SONA engine status')
8010
+ .option('--json', 'JSON output')
8011
+ .action(async (opts) => {
8012
+ try {
8013
+ const sona = require('@ruvector/sona');
8014
+ const engine = sona.SonaEngine ? new sona.SonaEngine() : sona;
8015
+ const status = engine.status ? await engine.status() : { state: 'loaded' };
8016
+ if (opts.json || !process.stdout.isTTY) {
8017
+ console.log(JSON.stringify(status, null, 2));
8018
+ } else {
8019
+ console.log(chalk.bold.cyan('\nSONA Engine Status'));
8020
+ console.log(chalk.dim('-'.repeat(40)));
8021
+ Object.entries(status).forEach(([k, v]) => {
8022
+ console.log(` ${chalk.bold(k)}: ${v}`);
8023
+ });
8024
+ console.log();
8025
+ }
8026
+ } catch (e) {
8027
+ if (e.code === 'MODULE_NOT_FOUND') {
8028
+ console.error(chalk.red('Requires @ruvector/sona: npm install @ruvector/sona'));
8029
+ } else {
8030
+ console.error(chalk.red('Failed:'), e.message);
8031
+ }
8032
+ process.exit(1);
8033
+ }
8034
+ });
8035
+
8036
+ sonaCmd.command('patterns')
8037
+ .description('List learned SONA patterns')
8038
+ .option('-l, --limit <n>', 'Max results', '20')
8039
+ .option('--json', 'JSON output')
8040
+ .action(async (opts) => {
8041
+ try {
8042
+ const sona = require('@ruvector/sona');
8043
+ const engine = sona.SonaEngine ? new sona.SonaEngine() : sona;
8044
+ const patterns = engine.findPatterns ? await engine.findPatterns({ limit: parseInt(opts.limit) }) : [];
8045
+ if (opts.json || !process.stdout.isTTY) {
8046
+ console.log(JSON.stringify(patterns, null, 2));
8047
+ } else {
8048
+ if (patterns.length === 0) { console.log(chalk.yellow('No patterns found.')); return; }
8049
+ console.log(chalk.bold.cyan(`\n${patterns.length} SONA Patterns\n`));
8050
+ patterns.forEach((p, i) => {
8051
+ console.log(` ${chalk.bold(i + 1 + '.')} ${p.name || p.type || p.id} ${chalk.dim(p.confidence ? `(${(p.confidence * 100).toFixed(0)}%)` : '')}`);
8052
+ });
8053
+ console.log();
8054
+ }
8055
+ } catch (e) {
8056
+ if (e.code === 'MODULE_NOT_FOUND') {
8057
+ console.error(chalk.red('Requires @ruvector/sona'));
8058
+ } else {
8059
+ console.error(chalk.red('Failed:'), e.message);
8060
+ }
8061
+ process.exit(1);
8062
+ }
8063
+ });
8064
+
8065
+ sonaCmd.command('train')
8066
+ .description('Start SONA training trajectory')
8067
+ .option('-d, --data <path>', 'Training data path')
8068
+ .option('--epochs <n>', 'Epochs', '10')
8069
+ .action(async (opts) => {
8070
+ const spinner = ora('Training SONA...');
8071
+ spinner.start();
8072
+ try {
8073
+ const sona = require('@ruvector/sona');
8074
+ const engine = sona.SonaEngine ? new sona.SonaEngine() : sona;
8075
+ if (!engine.beginTrajectory) throw new Error('SONA engine does not support training');
8076
+ const result = await engine.beginTrajectory({ data: opts.data, epochs: parseInt(opts.epochs) });
8077
+ spinner.succeed(chalk.green('Training complete'));
8078
+ if (result) console.log(chalk.dim(JSON.stringify(result, null, 2)));
8079
+ } catch (e) {
8080
+ spinner.fail(chalk.red('Training failed: ' + e.message));
8081
+ process.exit(1);
8082
+ }
8083
+ });
8084
+
8085
+ sonaCmd.command('export')
8086
+ .description('Export SONA model weights')
8087
+ .option('-o, --output <path>', 'Output path', 'sona-weights.json')
8088
+ .action(async (opts) => {
8089
+ try {
8090
+ const sona = require('@ruvector/sona');
8091
+ const engine = sona.SonaEngine ? new sona.SonaEngine() : sona;
8092
+ const weights = engine.exportWeights ? await engine.exportWeights() : {};
8093
+ fs.writeFileSync(opts.output, JSON.stringify(weights, null, 2));
8094
+ console.log(chalk.green(`Exported to ${opts.output}`));
8095
+ } catch (e) {
8096
+ console.error(chalk.red('Export failed:'), e.message);
8097
+ process.exit(1);
8098
+ }
8099
+ });
8100
+
8101
+ sonaCmd.command('stats')
8102
+ .description('Show SONA learning statistics')
8103
+ .option('--json', 'JSON output')
8104
+ .action(async (opts) => {
8105
+ try {
8106
+ const sona = require('@ruvector/sona');
8107
+ const engine = sona.SonaEngine ? new sona.SonaEngine() : sona;
8108
+ const stats = engine.stats ? await engine.stats() : { trajectories: 0, patterns: 0 };
8109
+ if (opts.json || !process.stdout.isTTY) {
8110
+ console.log(JSON.stringify(stats, null, 2));
8111
+ } else {
8112
+ console.log(chalk.bold.cyan('\nSONA Statistics'));
8113
+ console.log(chalk.dim('-'.repeat(40)));
8114
+ Object.entries(stats).forEach(([k, v]) => {
8115
+ console.log(` ${chalk.bold(k)}: ${v}`);
8116
+ });
8117
+ console.log();
8118
+ }
8119
+ } catch (e) {
8120
+ if (e.code === 'MODULE_NOT_FOUND') {
8121
+ console.error(chalk.red('Requires @ruvector/sona'));
8122
+ } else {
8123
+ console.error(chalk.red('Failed:'), e.message);
8124
+ }
8125
+ process.exit(1);
8126
+ }
8127
+ });
8128
+
7356
8129
  program.parse();