ruvector 0.2.0 → 0.2.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/bin/cli.js +1116 -663
  2. package/bin/mcp-server.js +516 -26
  3. package/package.json +7 -7
package/bin/cli.js CHANGED
@@ -9,6 +9,52 @@ const chalk = _chalk.default || _chalk;
9
9
  const fs = require('fs');
10
10
  const path = require('path');
11
11
 
12
+ // Load .env from current directory (if exists)
13
+ try {
14
+ const envPath = path.join(process.cwd(), '.env');
15
+ if (fs.existsSync(envPath)) {
16
+ const envContent = fs.readFileSync(envPath, 'utf8');
17
+ for (const line of envContent.split('\n')) {
18
+ const trimmed = line.trim();
19
+ if (!trimmed || trimmed.startsWith('#')) continue;
20
+ const eqIdx = trimmed.indexOf('=');
21
+ if (eqIdx > 0) {
22
+ const key = trimmed.slice(0, eqIdx).trim();
23
+ let value = trimmed.slice(eqIdx + 1).trim();
24
+ // Strip surrounding quotes
25
+ if ((value.startsWith('"') && value.endsWith('"')) || (value.startsWith("'") && value.endsWith("'"))) {
26
+ value = value.slice(1, -1);
27
+ }
28
+ // Don't override existing env vars
29
+ if (!process.env[key]) {
30
+ process.env[key] = value;
31
+ }
32
+ }
33
+ }
34
+ }
35
+ } catch {}
36
+
37
+ // Load global config from ~/.ruvector/config.json (if exists)
38
+ try {
39
+ const os = require('os');
40
+ const configPath = path.join(os.homedir(), '.ruvector', 'config.json');
41
+ if (fs.existsSync(configPath)) {
42
+ const config = JSON.parse(fs.readFileSync(configPath, 'utf8'));
43
+ // Map config keys to env vars (don't override existing)
44
+ const configMap = {
45
+ brain_url: 'BRAIN_URL',
46
+ pi_key: 'PI',
47
+ edge_genesis_url: 'EDGE_GENESIS_URL',
48
+ edge_relay_url: 'EDGE_RELAY_URL',
49
+ };
50
+ for (const [configKey, envKey] of Object.entries(configMap)) {
51
+ if (config[configKey] && !process.env[envKey]) {
52
+ process.env[envKey] = config[configKey];
53
+ }
54
+ }
55
+ }
56
+ } catch {}
57
+
12
58
  // Lazy load ora (spinner) - only needed for commands with progress indicators
13
59
  let _oraModule = null;
14
60
  function ora(text) {
@@ -46,8 +92,7 @@ function requireRuvector() {
46
92
  }
47
93
 
48
94
  // Lazy load GNN (optional - loaded on first use, not at startup)
49
- // Saves ~6ms startup time by deferring require('@ruvector/gnn')
50
- let _gnnModule = undefined; // undefined = not yet attempted, null = failed, object = loaded
95
+ let _gnnModule = undefined;
51
96
  let RuvectorLayer, TensorCompress, differentiableSearch, getCompressionLevel, hierarchicalForward;
52
97
  let gnnAvailable = false;
53
98
 
@@ -63,7 +108,7 @@ function loadGnn() {
63
108
  _gnnModule = gnn;
64
109
  gnnAvailable = true;
65
110
  return gnn;
66
- } catch (e) {
111
+ } catch {
67
112
  _gnnModule = null;
68
113
  gnnAvailable = false;
69
114
  return null;
@@ -71,8 +116,7 @@ function loadGnn() {
71
116
  }
72
117
 
73
118
  // Lazy load Attention (optional - loaded on first use, not at startup)
74
- // Saves ~5ms startup time by deferring require('@ruvector/attention')
75
- let _attentionModule = undefined; // undefined = not yet attempted
119
+ let _attentionModule = undefined;
76
120
  let DotProductAttention, MultiHeadAttention, HyperbolicAttention, FlashAttention, LinearAttention, MoEAttention;
77
121
  let GraphRoPeAttention, EdgeFeaturedAttention, DualSpaceAttention, LocalGlobalAttention;
78
122
  let benchmarkAttention, computeAttentionAsync, batchAttentionCompute, parallelAttentionCompute;
@@ -84,36 +128,31 @@ function loadAttention() {
84
128
  if (_attentionModule !== undefined) return _attentionModule;
85
129
  try {
86
130
  const attention = require('@ruvector/attention');
87
- // Core mechanisms
88
131
  DotProductAttention = attention.DotProductAttention;
89
132
  MultiHeadAttention = attention.MultiHeadAttention;
90
133
  HyperbolicAttention = attention.HyperbolicAttention;
91
134
  FlashAttention = attention.FlashAttention;
92
135
  LinearAttention = attention.LinearAttention;
93
136
  MoEAttention = attention.MoEAttention;
94
- // Graph attention
95
137
  GraphRoPeAttention = attention.GraphRoPeAttention;
96
138
  EdgeFeaturedAttention = attention.EdgeFeaturedAttention;
97
139
  DualSpaceAttention = attention.DualSpaceAttention;
98
140
  LocalGlobalAttention = attention.LocalGlobalAttention;
99
- // Utilities
100
141
  benchmarkAttention = attention.benchmarkAttention;
101
142
  computeAttentionAsync = attention.computeAttentionAsync;
102
143
  batchAttentionCompute = attention.batchAttentionCompute;
103
144
  parallelAttentionCompute = attention.parallelAttentionCompute;
104
- // Hyperbolic math
105
145
  expMap = attention.expMap;
106
146
  logMap = attention.logMap;
107
147
  mobiusAddition = attention.mobiusAddition;
108
148
  poincareDistance = attention.poincareDistance;
109
149
  projectToPoincareBall = attention.projectToPoincareBall;
110
- // Meta
111
- attentionInfo = attention.info;
112
- attentionVersion = attention.version;
150
+ attentionInfo = attention.attentionInfo;
151
+ attentionVersion = attention.attentionVersion;
113
152
  _attentionModule = attention;
114
153
  attentionAvailable = true;
115
154
  return attention;
116
- } catch (e) {
155
+ } catch {
117
156
  _attentionModule = null;
118
157
  attentionAvailable = false;
119
158
  return null;
@@ -384,16 +423,13 @@ program
384
423
  .command('info')
385
424
  .description('Show ruvector information')
386
425
  .action(() => {
387
- // Trigger lazy load of optional modules for availability check
388
- loadGnn();
389
- loadAttention();
390
-
391
426
  console.log(chalk.cyan('\nruvector Information'));
392
427
  console.log(chalk.white(` CLI Version: ${chalk.yellow(packageJson.version)}`));
393
428
 
394
429
  // Try to load ruvector for implementation info
395
430
  if (loadRuvector()) {
396
- const version = typeof getVersion === 'function' ? getVersion() : 'unknown';
431
+ const versionInfo = typeof getVersion === 'function' ? getVersion() : null;
432
+ const version = versionInfo && versionInfo.version ? versionInfo.version : 'unknown';
397
433
  const impl = typeof getImplementationType === 'function' ? getImplementationType() : 'native';
398
434
  console.log(chalk.white(` Core Version: ${chalk.yellow(version)}`));
399
435
  console.log(chalk.white(` Implementation: ${chalk.yellow(impl)}`));
@@ -401,6 +437,7 @@ program
401
437
  console.log(chalk.white(` Core: ${chalk.gray('Not loaded (install @ruvector/core)')}`));
402
438
  }
403
439
 
440
+ loadGnn();
404
441
  console.log(chalk.white(` GNN Module: ${gnnAvailable ? chalk.green('Available') : chalk.gray('Not installed')}`));
405
442
  console.log(chalk.white(` Node Version: ${chalk.yellow(process.version)}`));
406
443
  console.log(chalk.white(` Platform: ${chalk.yellow(process.platform)}`));
@@ -424,10 +461,8 @@ program
424
461
  .action(async (packages, options) => {
425
462
  const { execSync } = require('child_process');
426
463
 
427
- // Trigger lazy load to check availability
428
- loadGnn();
429
-
430
464
  // Available optional packages - all ruvector npm packages
465
+ loadGnn();
431
466
  const availablePackages = {
432
467
  // Core packages
433
468
  core: {
@@ -714,7 +749,7 @@ program
714
749
  // GNN Commands
715
750
  // =============================================================================
716
751
 
717
- // Helper to check GNN availability (triggers lazy load)
752
+ // Helper to check GNN availability
718
753
  function requireGnn() {
719
754
  loadGnn();
720
755
  if (!gnnAvailable) {
@@ -942,7 +977,7 @@ gnnCmd
942
977
  // Attention Commands
943
978
  // =============================================================================
944
979
 
945
- // Helper to require attention module (triggers lazy load)
980
+ // Helper to require attention module
946
981
  function requireAttention() {
947
982
  loadAttention();
948
983
  if (!attentionAvailable) {
@@ -1375,10 +1410,6 @@ program
1375
1410
  .action(async (options) => {
1376
1411
  const { execSync } = require('child_process');
1377
1412
 
1378
- // Trigger lazy load of optional modules for availability check
1379
- loadGnn();
1380
- loadAttention();
1381
-
1382
1413
  console.log(chalk.cyan('\n═══════════════════════════════════════════════════════════════'));
1383
1414
  console.log(chalk.cyan(' RuVector Doctor'));
1384
1415
  console.log(chalk.cyan('═══════════════════════════════════════════════════════════════\n'));
@@ -1465,6 +1496,7 @@ program
1465
1496
  }
1466
1497
 
1467
1498
  // Check @ruvector/gnn
1499
+ loadGnn();
1468
1500
  if (gnnAvailable) {
1469
1501
  console.log(chalk.green(` ✓ @ruvector/gnn installed`));
1470
1502
  } else {
@@ -1472,6 +1504,7 @@ program
1472
1504
  }
1473
1505
 
1474
1506
  // Check @ruvector/attention
1507
+ loadAttention();
1475
1508
  if (attentionAvailable) {
1476
1509
  console.log(chalk.green(` ✓ @ruvector/attention installed`));
1477
1510
  } else {
@@ -7167,165 +7200,356 @@ rvfCmd.command('export <path>')
7167
7200
  } catch (e) { console.error(chalk.red(e.message)); process.exit(1); }
7168
7201
  });
7169
7202
 
7170
- // RVF example download/list commands
7171
- const RVF_EXAMPLES = [
7172
- { name: 'basic_store', size: '152 KB', desc: '1,000 vectors, dim 128, cosine metric' },
7173
- { name: 'semantic_search', size: '755 KB', desc: 'Semantic search with HNSW index' },
7174
- { name: 'rag_pipeline', size: '303 KB', desc: 'RAG pipeline with embeddings' },
7175
- { name: 'embedding_cache', size: '755 KB', desc: 'Cached embedding store' },
7176
- { name: 'quantization', size: '1.5 MB', desc: 'PQ-compressed vectors' },
7177
- { name: 'progressive_index', size: '2.5 MB', desc: 'Large-scale progressive HNSW index' },
7178
- { name: 'filtered_search', size: '255 KB', desc: 'Metadata-filtered vector search' },
7179
- { name: 'recommendation', size: '102 KB', desc: 'Recommendation engine vectors' },
7180
- { name: 'agent_memory', size: '32 KB', desc: 'AI agent episodic memory' },
7181
- { name: 'swarm_knowledge', size: '86 KB', desc: 'Multi-agent shared knowledge base' },
7182
- { name: 'experience_replay', size: '27 KB', desc: 'RL experience replay buffer' },
7183
- { name: 'tool_cache', size: '26 KB', desc: 'MCP tool call cache' },
7184
- { name: 'mcp_in_rvf', size: '32 KB', desc: 'MCP server embedded in RVF' },
7185
- { name: 'ruvbot', size: '51 KB', desc: 'Chatbot knowledge store' },
7186
- { name: 'claude_code_appliance', size: '17 KB', desc: 'Claude Code cognitive appliance' },
7187
- { name: 'lineage_parent', size: '52 KB', desc: 'COW parent file' },
7188
- { name: 'lineage_child', size: '26 KB', desc: 'COW child (derived) file' },
7189
- { name: 'self_booting', size: '31 KB', desc: 'Self-booting with KERNEL_SEG' },
7190
- { name: 'linux_microkernel', size: '15 KB', desc: 'Embedded Linux microkernel' },
7191
- { name: 'ebpf_accelerator', size: '153 KB', desc: 'eBPF distance accelerator' },
7192
- { name: 'browser_wasm', size: '14 KB', desc: 'Browser WASM module embedded' },
7193
- { name: 'tee_attestation', size: '102 KB', desc: 'TEE attestation with witnesses' },
7194
- { name: 'zero_knowledge', size: '52 KB', desc: 'ZK-proof witness chain' },
7195
- { name: 'sealed_engine', size: '208 KB', desc: 'Sealed inference engine' },
7196
- { name: 'access_control', size: '77 KB', desc: 'Permission-gated vectors' },
7197
- { name: 'financial_signals', size: '202 KB', desc: 'Financial signal vectors' },
7198
- { name: 'medical_imaging', size: '302 KB', desc: 'Medical imaging embeddings' },
7199
- { name: 'legal_discovery', size: '903 KB', desc: 'Legal document discovery' },
7200
- { name: 'multimodal_fusion', size: '804 KB', desc: 'Multi-modal embedding fusion' },
7201
- { name: 'hyperbolic_taxonomy', size: '23 KB', desc: 'Hyperbolic space taxonomy' },
7202
- { name: 'network_telemetry', size: '16 KB', desc: 'Network telemetry vectors' },
7203
- { name: 'postgres_bridge', size: '152 KB', desc: 'PostgreSQL bridge vectors' },
7204
- { name: 'ruvllm_inference', size: '133 KB', desc: 'RuvLLM inference cache' },
7205
- { name: 'serverless', size: '509 KB', desc: 'Serverless deployment bundle' },
7206
- { name: 'edge_iot', size: '27 KB', desc: 'Edge/IoT lightweight store' },
7207
- { name: 'dedup_detector', size: '153 KB', desc: 'Deduplication detector' },
7208
- { name: 'compacted', size: '77 KB', desc: 'Post-compaction example' },
7209
- { name: 'posix_fileops', size: '52 KB', desc: 'POSIX file operations test' },
7210
- { name: 'network_sync_a', size: '52 KB', desc: 'Network sync peer A' },
7211
- { name: 'network_sync_b', size: '52 KB', desc: 'Network sync peer B' },
7212
- { name: 'agent_handoff_a', size: '31 KB', desc: 'Agent handoff source' },
7213
- { name: 'agent_handoff_b', size: '11 KB', desc: 'Agent handoff target' },
7214
- { name: 'reasoning_parent', size: '5.6 KB', desc: 'Reasoning chain parent' },
7215
- { name: 'reasoning_child', size: '8.1 KB', desc: 'Reasoning chain child' },
7216
- { name: 'reasoning_grandchild', size: '162 B', desc: 'Minimal derived file' },
7203
+ // RVF example catalog - manifest-based with local cache + SHA-256 verification
7204
+ const BUILTIN_RVF_CATALOG = [
7205
+ // Minimal fallback if GCS and cache are both unavailable
7206
+ { name: 'basic_store', size_human: '152 KB', description: '1,000 vectors, dim 128, cosine metric', category: 'core' },
7207
+ { name: 'semantic_search', size_human: '755 KB', description: 'Semantic search with HNSW index', category: 'core' },
7208
+ { name: 'rag_pipeline', size_human: '303 KB', description: 'RAG pipeline with embeddings', category: 'core' },
7209
+ { name: 'agent_memory', size_human: '32 KB', description: 'AI agent episodic memory', category: 'ai' },
7210
+ { name: 'swarm_knowledge', size_human: '86 KB', description: 'Multi-agent shared knowledge base', category: 'ai' },
7211
+ { name: 'self_booting', size_human: '31 KB', description: 'Self-booting with KERNEL_SEG', category: 'compute' },
7212
+ { name: 'ebpf_accelerator', size_human: '153 KB', description: 'eBPF distance accelerator', category: 'compute' },
7213
+ { name: 'tee_attestation', size_human: '102 KB', description: 'TEE attestation with witnesses', category: 'security' },
7214
+ { name: 'claude_code_appliance', size_human: '17 KB', description: 'Claude Code cognitive appliance', category: 'integration' },
7215
+ { name: 'lineage_parent', size_human: '52 KB', description: 'COW parent file', category: 'lineage' },
7216
+ { name: 'financial_signals', size_human: '202 KB', description: 'Financial signal vectors', category: 'industry' },
7217
+ { name: 'mcp_in_rvf', size_human: '32 KB', description: 'MCP server embedded in RVF', category: 'integration' },
7217
7218
  ];
7218
7219
 
7219
- const RVF_BASE_URL = 'https://raw.githubusercontent.com/ruvnet/ruvector/main/examples/rvf/output';
7220
+ const GCS_MANIFEST_URL = 'https://storage.googleapis.com/ruvector-examples/manifest.json';
7221
+ const GITHUB_RAW_BASE = 'https://raw.githubusercontent.com/ruvnet/ruvector/main/examples/rvf/output';
7222
+
7223
+ function getRvfCacheDir() {
7224
+ const os = require('os');
7225
+ return path.join(os.homedir(), '.ruvector', 'examples');
7226
+ }
7227
+
7228
+ async function getRvfManifest(opts = {}) {
7229
+ const cacheDir = getRvfCacheDir();
7230
+ const manifestPath = path.join(cacheDir, 'manifest.json');
7231
+
7232
+ // Check cache (1 hour TTL)
7233
+ if (!opts.refresh && fs.existsSync(manifestPath)) {
7234
+ try {
7235
+ const stat = fs.statSync(manifestPath);
7236
+ const age = Date.now() - stat.mtimeMs;
7237
+ if (age < 3600000) {
7238
+ return JSON.parse(fs.readFileSync(manifestPath, 'utf8'));
7239
+ }
7240
+ } catch {}
7241
+ }
7242
+
7243
+ if (opts.offline) {
7244
+ // Offline mode - use cache even if stale
7245
+ if (fs.existsSync(manifestPath)) {
7246
+ return JSON.parse(fs.readFileSync(manifestPath, 'utf8'));
7247
+ }
7248
+ return { examples: BUILTIN_RVF_CATALOG, base_url: GITHUB_RAW_BASE, version: 'builtin', offline: true };
7249
+ }
7250
+
7251
+ // Try GCS
7252
+ try {
7253
+ const resp = await fetch(GCS_MANIFEST_URL);
7254
+ if (resp.ok) {
7255
+ const manifest = await resp.json();
7256
+ fs.mkdirSync(cacheDir, { recursive: true });
7257
+ fs.writeFileSync(manifestPath, JSON.stringify(manifest, null, 2));
7258
+ return manifest;
7259
+ }
7260
+ } catch {}
7261
+
7262
+ // Fallback: stale cache
7263
+ if (fs.existsSync(manifestPath)) {
7264
+ try {
7265
+ const manifest = JSON.parse(fs.readFileSync(manifestPath, 'utf8'));
7266
+ manifest._stale = true;
7267
+ return manifest;
7268
+ } catch {}
7269
+ }
7270
+
7271
+ // Final fallback: builtin catalog with GitHub URLs
7272
+ return { examples: BUILTIN_RVF_CATALOG, base_url: GITHUB_RAW_BASE, version: 'builtin' };
7273
+ }
7274
+
7275
+ function verifyRvfFile(filePath, expectedSha256) {
7276
+ if (!expectedSha256) return { verified: false, reason: 'No checksum available' };
7277
+ const crypto = require('crypto');
7278
+ const hash = crypto.createHash('sha256');
7279
+ const data = fs.readFileSync(filePath);
7280
+ hash.update(data);
7281
+ const actual = hash.digest('hex');
7282
+ return { verified: actual === expectedSha256, actual, expected: expectedSha256 };
7283
+ }
7220
7284
 
7221
7285
  rvfCmd.command('examples')
7222
- .description('List available example .rvf files')
7286
+ .description('List available example .rvf files from the catalog')
7287
+ .option('--category <cat>', 'Filter by category (core, ai, security, compute, lineage, industry, network, integration)')
7288
+ .option('--refresh', 'Force refresh manifest from server')
7289
+ .option('--offline', 'Use only cached data')
7223
7290
  .option('--json', 'Output as JSON')
7224
- .action((opts) => {
7291
+ .action(async (opts) => {
7292
+ const manifest = await getRvfManifest({ refresh: opts.refresh, offline: opts.offline });
7293
+ let examples = manifest.examples || [];
7294
+
7295
+ if (opts.category) {
7296
+ examples = examples.filter(e => e.category === opts.category);
7297
+ }
7298
+
7225
7299
  if (opts.json) {
7226
- console.log(JSON.stringify(RVF_EXAMPLES, null, 2));
7300
+ console.log(JSON.stringify({ version: manifest.version, count: examples.length, examples }, null, 2));
7227
7301
  return;
7228
7302
  }
7229
- console.log(chalk.bold.cyan('\nAvailable RVF Example Files (45 total)\n'));
7230
- console.log(chalk.dim(`Download: npx ruvector rvf download <name>\n`));
7231
- const maxName = Math.max(...RVF_EXAMPLES.map(e => e.name.length));
7232
- const maxSize = Math.max(...RVF_EXAMPLES.map(e => e.size.length));
7233
- for (const ex of RVF_EXAMPLES) {
7234
- const name = chalk.green(ex.name.padEnd(maxName));
7235
- const size = chalk.yellow(ex.size.padStart(maxSize));
7236
- console.log(` ${name} ${size} ${chalk.dim(ex.desc)}`);
7303
+
7304
+ console.log(chalk.bold.cyan(`\nRVF Example Files (${examples.length} of ${(manifest.examples || []).length} total)\n`));
7305
+ if (manifest._stale) console.log(chalk.yellow(' (Using stale cached manifest)\n'));
7306
+ if (manifest.version === 'builtin') console.log(chalk.yellow(' (Using built-in catalog -- run without --offline for full list)\n'));
7307
+ console.log(chalk.dim(` Download: npx ruvector rvf download <name>`));
7308
+ console.log(chalk.dim(` Filter: npx ruvector rvf examples --category ai\n`));
7309
+
7310
+ // Group by category
7311
+ const grouped = {};
7312
+ for (const ex of examples) {
7313
+ const cat = ex.category || 'other';
7314
+ if (!grouped[cat]) grouped[cat] = [];
7315
+ grouped[cat].push(ex);
7316
+ }
7317
+
7318
+ for (const [cat, items] of Object.entries(grouped).sort()) {
7319
+ const catDesc = manifest.categories ? manifest.categories[cat] || '' : '';
7320
+ console.log(chalk.bold.yellow(` ${cat} ${catDesc ? chalk.dim(`-- ${catDesc}`) : ''}`));
7321
+ for (const ex of items) {
7322
+ const name = chalk.green(ex.name.padEnd(28));
7323
+ const size = chalk.yellow((ex.size_human || '').padStart(8));
7324
+ console.log(` ${name} ${size} ${chalk.dim(ex.description || '')}`);
7325
+ }
7326
+ console.log();
7327
+ }
7328
+
7329
+ if (manifest.categories && !opts.category) {
7330
+ console.log(chalk.dim(` Categories: ${Object.keys(manifest.categories).join(', ')}\n`));
7237
7331
  }
7238
- console.log(chalk.dim(`\nFull catalog: https://github.com/ruvnet/ruvector/tree/main/examples/rvf/output\n`));
7239
7332
  });
7240
7333
 
7241
7334
  rvfCmd.command('download [names...]')
7242
- .description('Download example .rvf files from GitHub')
7243
- .option('-a, --all', 'Download all 45 examples (~11 MB)')
7335
+ .description('Download example .rvf files with integrity verification')
7336
+ .option('-a, --all', 'Download all examples')
7337
+ .option('-c, --category <cat>', 'Download all examples in a category')
7244
7338
  .option('-o, --output <dir>', 'Output directory', '.')
7339
+ .option('--verify', 'Re-verify cached files')
7340
+ .option('--no-cache', 'Skip cache, always download fresh')
7341
+ .option('--offline', 'Use only cached files')
7342
+ .option('--refresh', 'Refresh manifest before download')
7245
7343
  .action(async (names, opts) => {
7246
- const https = require('https');
7247
- const ALLOWED_REDIRECT_HOSTS = ['raw.githubusercontent.com', 'objects.githubusercontent.com', 'github.com'];
7248
- const sanitizeFileName = (name) => {
7249
- // Strip path separators and parent directory references
7250
- const base = path.basename(name);
7251
- // Only allow alphanumeric, underscores, hyphens, dots
7252
- if (!/^[\w\-.]+$/.test(base)) throw new Error(`Invalid filename: ${base}`);
7253
- return base;
7254
- };
7255
- const downloadFile = (url, dest) => new Promise((resolve, reject) => {
7256
- const file = fs.createWriteStream(dest);
7257
- https.get(url, (res) => {
7258
- if (res.statusCode === 302 || res.statusCode === 301) {
7259
- const redirectUrl = res.headers.location;
7260
- try {
7261
- const redirectHost = new URL(redirectUrl).hostname;
7262
- if (!ALLOWED_REDIRECT_HOSTS.includes(redirectHost)) {
7263
- file.close();
7264
- reject(new Error(`Redirect to untrusted host: ${redirectHost}`));
7265
- return;
7266
- }
7267
- } catch { file.close(); reject(new Error('Invalid redirect URL')); return; }
7268
- https.get(redirectUrl, (res2) => { res2.pipe(file); file.on('finish', () => { file.close(); resolve(); }); }).on('error', reject);
7269
- return;
7270
- }
7271
- if (res.statusCode !== 200) { file.close(); fs.unlinkSync(dest); reject(new Error(`HTTP ${res.statusCode}`)); return; }
7272
- res.pipe(file);
7273
- file.on('finish', () => { file.close(); resolve(); });
7274
- }).on('error', reject);
7275
- });
7344
+ const manifest = await getRvfManifest({ refresh: opts.refresh, offline: opts.offline });
7345
+ const examples = manifest.examples || [];
7346
+ const baseUrl = manifest.base_url || GITHUB_RAW_BASE;
7276
7347
 
7277
7348
  let toDownload = [];
7278
7349
  if (opts.all) {
7279
- toDownload = RVF_EXAMPLES.map(e => e.name);
7350
+ toDownload = examples;
7351
+ } else if (opts.category) {
7352
+ toDownload = examples.filter(e => e.category === opts.category);
7353
+ if (!toDownload.length) {
7354
+ console.error(chalk.red(`No examples in category '${opts.category}'`));
7355
+ process.exit(1);
7356
+ }
7280
7357
  } else if (names && names.length > 0) {
7281
- toDownload = names;
7358
+ for (const name of names) {
7359
+ const cleanName = name.replace(/\.rvf$/, '');
7360
+ const found = examples.find(e => e.name === cleanName);
7361
+ if (found) {
7362
+ toDownload.push(found);
7363
+ } else {
7364
+ console.error(chalk.red(`Unknown example: ${cleanName}. Run 'npx ruvector rvf examples' to list.`));
7365
+ }
7366
+ }
7367
+ if (!toDownload.length) process.exit(1);
7282
7368
  } else {
7283
- console.error(chalk.red('Specify example names or use --all. Run `npx ruvector rvf examples` to list.'));
7369
+ console.error(chalk.red('Specify example names, --all, or --category. Run `npx ruvector rvf examples` to list.'));
7284
7370
  process.exit(1);
7285
7371
  }
7286
7372
 
7287
7373
  const outDir = path.resolve(opts.output);
7288
7374
  if (!fs.existsSync(outDir)) fs.mkdirSync(outDir, { recursive: true });
7375
+ const cacheDir = getRvfCacheDir();
7376
+ fs.mkdirSync(cacheDir, { recursive: true });
7289
7377
 
7290
7378
  console.log(chalk.bold.cyan(`\nDownloading ${toDownload.length} .rvf file(s) to ${outDir}\n`));
7291
- let ok = 0, fail = 0;
7292
- for (const name of toDownload) {
7293
- const rawName = name.endsWith('.rvf') ? name : `${name}.rvf`;
7294
- let fileName;
7295
- try { fileName = sanitizeFileName(rawName); } catch (e) {
7296
- console.log(chalk.red(`SKIPPED: ${e.message}`));
7379
+
7380
+ const https = require('https');
7381
+ const crypto = require('crypto');
7382
+ const ALLOWED_REDIRECT_HOSTS = ['raw.githubusercontent.com', 'objects.githubusercontent.com', 'github.com', 'storage.googleapis.com'];
7383
+
7384
+ const downloadFile = (url, dest) => new Promise((resolve, reject) => {
7385
+ const doGet = (getUrl) => {
7386
+ const mod = getUrl.startsWith('https') ? https : require('http');
7387
+ mod.get(getUrl, (res) => {
7388
+ if (res.statusCode === 301 || res.statusCode === 302) {
7389
+ const loc = res.headers.location;
7390
+ try {
7391
+ const host = new URL(loc).hostname;
7392
+ if (!ALLOWED_REDIRECT_HOSTS.includes(host)) {
7393
+ reject(new Error(`Redirect to untrusted host: ${host}`));
7394
+ return;
7395
+ }
7396
+ } catch { reject(new Error('Invalid redirect URL')); return; }
7397
+ doGet(loc);
7398
+ return;
7399
+ }
7400
+ if (res.statusCode !== 200) {
7401
+ reject(new Error(`HTTP ${res.statusCode}`));
7402
+ return;
7403
+ }
7404
+ const file = fs.createWriteStream(dest);
7405
+ res.pipe(file);
7406
+ file.on('finish', () => { file.close(); resolve(); });
7407
+ file.on('error', reject);
7408
+ }).on('error', reject);
7409
+ };
7410
+ doGet(url);
7411
+ });
7412
+
7413
+ let ok = 0, cached = 0, fail = 0, verified = 0;
7414
+
7415
+ for (const ex of toDownload) {
7416
+ const fileName = `${ex.name}.rvf`;
7417
+ // Sanitize filename
7418
+ if (!/^[\w\-.]+$/.test(fileName)) {
7419
+ console.log(` ${chalk.red('SKIP')} ${fileName} (invalid filename)`);
7420
+ fail++;
7421
+ continue;
7422
+ }
7423
+
7424
+ const destPath = path.join(outDir, fileName);
7425
+ const cachePath = path.join(cacheDir, fileName);
7426
+
7427
+ // Path containment check
7428
+ if (!path.resolve(destPath).startsWith(path.resolve(outDir))) {
7429
+ console.log(` ${chalk.red('SKIP')} ${fileName} (path traversal)`);
7297
7430
  fail++;
7298
7431
  continue;
7299
7432
  }
7300
- // Validate against known examples when not using --all
7301
- if (!opts.all) {
7302
- const baseName = fileName.replace(/\.rvf$/, '');
7303
- if (!RVF_EXAMPLES.some(e => e.name === baseName)) {
7304
- console.log(chalk.red(`SKIPPED: Unknown example '${baseName}'. Run 'npx ruvector rvf examples' to list.`));
7305
- fail++;
7433
+
7434
+ // Check cache first
7435
+ if (opts.cache !== false && fs.existsSync(cachePath) && !opts.verify) {
7436
+ // Verify if checksum available
7437
+ if (ex.sha256) {
7438
+ const check = verifyRvfFile(cachePath, ex.sha256);
7439
+ if (check.verified) {
7440
+ // Copy from cache
7441
+ if (path.resolve(destPath) !== path.resolve(cachePath)) {
7442
+ fs.copyFileSync(cachePath, destPath);
7443
+ }
7444
+ console.log(` ${chalk.green('CACHED')} ${chalk.cyan(fileName)} ${chalk.dim(ex.size_human || '')}`);
7445
+ cached++;
7446
+ continue;
7447
+ } else {
7448
+ // Cache corrupted, re-download
7449
+ console.log(` ${chalk.yellow('STALE')} ${fileName} -- re-downloading`);
7450
+ }
7451
+ } else {
7452
+ // Copy from cache (no checksum to verify)
7453
+ if (path.resolve(destPath) !== path.resolve(cachePath)) {
7454
+ fs.copyFileSync(cachePath, destPath);
7455
+ }
7456
+ console.log(` ${chalk.green('CACHED')} ${chalk.cyan(fileName)} ${chalk.dim(ex.size_human || '')}`);
7457
+ cached++;
7306
7458
  continue;
7307
7459
  }
7308
7460
  }
7309
- const url = `${RVF_BASE_URL}/${encodeURIComponent(fileName)}`;
7310
- const dest = path.join(outDir, fileName);
7311
- // Path containment check
7312
- if (!path.resolve(dest).startsWith(path.resolve(outDir) + path.sep) && path.resolve(dest) !== path.resolve(outDir)) {
7313
- console.log(chalk.red(`SKIPPED: Path traversal detected for '${fileName}'`));
7461
+
7462
+ if (opts.offline) {
7463
+ console.log(` ${chalk.yellow('SKIP')} ${fileName} (offline mode, not cached)`);
7314
7464
  fail++;
7315
7465
  continue;
7316
7466
  }
7467
+
7468
+ // Download
7469
+ const url = `${baseUrl}/${encodeURIComponent(fileName)}`;
7317
7470
  try {
7318
- process.stdout.write(chalk.dim(` ${fileName} ... `));
7319
- await downloadFile(url, dest);
7320
- const stat = fs.statSync(dest);
7321
- console.log(chalk.green(`OK (${(stat.size / 1024).toFixed(0)} KB)`));
7471
+ await downloadFile(url, cachePath);
7472
+
7473
+ // SHA-256 verify
7474
+ if (ex.sha256) {
7475
+ const check = verifyRvfFile(cachePath, ex.sha256);
7476
+ if (check.verified) {
7477
+ verified++;
7478
+ console.log(` ${chalk.green('OK')} ${chalk.cyan(fileName)} ${chalk.dim(ex.size_human || '')} ${chalk.green('SHA-256 verified')}`);
7479
+ } else {
7480
+ console.log(` ${chalk.red('FAIL')} ${fileName} -- SHA-256 mismatch! Expected ${ex.sha256.slice(0, 12)}... got ${check.actual.slice(0, 12)}...`);
7481
+ fs.unlinkSync(cachePath);
7482
+ fail++;
7483
+ continue;
7484
+ }
7485
+ } else {
7486
+ console.log(` ${chalk.green('OK')} ${chalk.cyan(fileName)} ${chalk.dim(ex.size_human || '')} ${chalk.yellow('(no checksum)')}`);
7487
+ }
7488
+
7489
+ // Copy to output dir if different from cache
7490
+ if (path.resolve(destPath) !== path.resolve(cachePath)) {
7491
+ fs.copyFileSync(cachePath, destPath);
7492
+ }
7322
7493
  ok++;
7323
7494
  } catch (e) {
7324
- console.log(chalk.red(`FAILED: ${e.message}`));
7495
+ console.log(` ${chalk.red('FAIL')} ${fileName}: ${e.message}`);
7325
7496
  fail++;
7326
7497
  }
7327
7498
  }
7328
- console.log(chalk.bold(`\nDone: ${ok} downloaded, ${fail} failed\n`));
7499
+
7500
+ console.log(chalk.bold(`\n Downloaded: ${ok}, Cached: ${cached}, Failed: ${fail}${verified ? `, Verified: ${verified}` : ''}\n`));
7501
+ });
7502
+
7503
+ // RVF cache management
7504
+ rvfCmd.command('cache <action>')
7505
+ .description('Manage local .rvf example cache (status, clear)')
7506
+ .action((action) => {
7507
+ const cacheDir = getRvfCacheDir();
7508
+
7509
+ switch (action) {
7510
+ case 'status': {
7511
+ if (!fs.existsSync(cacheDir)) {
7512
+ console.log(chalk.dim('\n No cache directory found.\n'));
7513
+ return;
7514
+ }
7515
+ const files = fs.readdirSync(cacheDir).filter(f => f.endsWith('.rvf'));
7516
+ const manifestExists = fs.existsSync(path.join(cacheDir, 'manifest.json'));
7517
+ let totalSize = 0;
7518
+ for (const f of files) {
7519
+ totalSize += fs.statSync(path.join(cacheDir, f)).size;
7520
+ }
7521
+ console.log(chalk.bold.cyan('\nRVF Cache Status\n'));
7522
+ console.log(` ${chalk.bold('Location:')} ${cacheDir}`);
7523
+ console.log(` ${chalk.bold('Files:')} ${files.length} .rvf files`);
7524
+ console.log(` ${chalk.bold('Size:')} ${(totalSize / (1024 * 1024)).toFixed(1)} MB`);
7525
+ console.log(` ${chalk.bold('Manifest:')} ${manifestExists ? chalk.green('cached') : chalk.dim('not cached')}`);
7526
+ if (manifestExists) {
7527
+ const stat = fs.statSync(path.join(cacheDir, 'manifest.json'));
7528
+ const age = Date.now() - stat.mtimeMs;
7529
+ const fresh = age < 3600000;
7530
+ console.log(` ${chalk.bold('Age:')} ${Math.floor(age / 60000)} min ${fresh ? chalk.green('(fresh)') : chalk.yellow('(stale)')}`);
7531
+ }
7532
+ console.log();
7533
+ break;
7534
+ }
7535
+ case 'clear': {
7536
+ if (!fs.existsSync(cacheDir)) {
7537
+ console.log(chalk.dim('\n No cache to clear.\n'));
7538
+ return;
7539
+ }
7540
+ const files = fs.readdirSync(cacheDir);
7541
+ let cleared = 0;
7542
+ for (const f of files) {
7543
+ fs.unlinkSync(path.join(cacheDir, f));
7544
+ cleared++;
7545
+ }
7546
+ console.log(chalk.green(`\n Cleared ${cleared} cached files from ${cacheDir}\n`));
7547
+ break;
7548
+ }
7549
+ default:
7550
+ console.error(chalk.red(`Unknown cache action: ${action}. Use: status, clear`));
7551
+ process.exit(1);
7552
+ }
7329
7553
  });
7330
7554
 
7331
7555
  // MCP Server command
@@ -7333,8 +7557,15 @@ const mcpCmd = program.command('mcp').description('MCP (Model Context Protocol)
7333
7557
 
7334
7558
  mcpCmd.command('start')
7335
7559
  .description('Start the RuVector MCP server')
7336
- .action(() => {
7337
- // Execute the mcp-server.js directly
7560
+ .option('-t, --transport <type>', 'Transport type: stdio or sse', 'stdio')
7561
+ .option('-p, --port <number>', 'Port for SSE transport', '8080')
7562
+ .option('--host <host>', 'Host to bind for SSE', '0.0.0.0')
7563
+ .action((opts) => {
7564
+ if (opts.transport === 'sse') {
7565
+ process.env.MCP_TRANSPORT = 'sse';
7566
+ process.env.MCP_PORT = opts.port;
7567
+ process.env.MCP_HOST = opts.host;
7568
+ }
7338
7569
  const mcpServerPath = path.join(__dirname, 'mcp-server.js');
7339
7570
  if (!fs.existsSync(mcpServerPath)) {
7340
7571
  console.error(chalk.red('Error: MCP server not found at'), mcpServerPath);
@@ -7400,20 +7631,232 @@ mcpCmd.command('info')
7400
7631
  console.log();
7401
7632
  });
7402
7633
 
7634
+ // ============================================================================
7635
+ // MCP tools subcommand
7636
+ // ============================================================================
7637
+
7638
+ mcpCmd.command('tools')
7639
+ .description('List all MCP tools organized by group')
7640
+ .option('--group <name>', 'Filter by group (hooks, workers, rvf, rvlite, brain, edge, identity)')
7641
+ .option('--json', 'Output as JSON')
7642
+ .action((opts) => {
7643
+ const toolGroups = {
7644
+ 'hooks-core': [
7645
+ { name: 'hooks_stats', args: '(none)', desc: 'Get intelligence statistics' },
7646
+ { name: 'hooks_route', args: 'task, file?', desc: 'Route task to best agent' },
7647
+ { name: 'hooks_remember', args: 'content, type?', desc: 'Store context in vector memory' },
7648
+ { name: 'hooks_recall', args: 'query, limit?', desc: 'Search vector memory' },
7649
+ { name: 'hooks_init', args: 'project_path?, force?', desc: 'Initialize hooks in project' },
7650
+ { name: 'hooks_pretrain', args: 'scan_path?, patterns?', desc: 'Pretrain from repository' },
7651
+ { name: 'hooks_build_agents', args: 'project_path?', desc: 'Generate agent configs' },
7652
+ { name: 'hooks_verify', args: '(none)', desc: 'Verify hooks configuration' },
7653
+ { name: 'hooks_doctor', args: 'fix?', desc: 'Diagnose setup issues' },
7654
+ { name: 'hooks_export', args: 'format?', desc: 'Export intelligence data' },
7655
+ ],
7656
+ 'hooks-trajectory': [
7657
+ { name: 'hooks_trajectory_start', args: 'task, context?', desc: 'Start learning trajectory' },
7658
+ { name: 'hooks_trajectory_step', args: 'trajectory_id, action, result', desc: 'Record trajectory step' },
7659
+ { name: 'hooks_trajectory_end', args: 'trajectory_id, outcome, score?', desc: 'End trajectory with outcome' },
7660
+ ],
7661
+ 'hooks-coedit': [
7662
+ { name: 'hooks_pre_edit', args: 'file, changes', desc: 'Pre-edit analysis' },
7663
+ { name: 'hooks_post_edit', args: 'file, changes, result', desc: 'Post-edit learning' },
7664
+ { name: 'hooks_pre_command', args: 'command, args?', desc: 'Pre-command analysis' },
7665
+ { name: 'hooks_post_command', args: 'command, exit_code, output?', desc: 'Post-command learning' },
7666
+ { name: 'hooks_pre_task', args: 'task, context?', desc: 'Pre-task routing' },
7667
+ { name: 'hooks_post_task', args: 'task, result, duration?', desc: 'Post-task learning' },
7668
+ ],
7669
+ 'hooks-errors': [
7670
+ { name: 'hooks_error_learn', args: 'error, context?', desc: 'Learn from errors' },
7671
+ { name: 'hooks_error_patterns', args: 'limit?', desc: 'Get learned error patterns' },
7672
+ { name: 'hooks_error_suggest', args: 'error', desc: 'Suggest fix for error' },
7673
+ ],
7674
+ 'hooks-analysis': [
7675
+ { name: 'hooks_complexity', args: 'file', desc: 'Analyze code complexity' },
7676
+ { name: 'hooks_dependencies', args: 'file', desc: 'Analyze dependencies' },
7677
+ { name: 'hooks_security_scan', args: 'file', desc: 'Security vulnerability scan' },
7678
+ { name: 'hooks_test_coverage', args: 'file', desc: 'Estimate test coverage' },
7679
+ { name: 'hooks_dead_code', args: 'file', desc: 'Detect dead code' },
7680
+ { name: 'hooks_duplicate_code', args: 'file', desc: 'Find duplicate code' },
7681
+ ],
7682
+ 'hooks-learning': [
7683
+ { name: 'hooks_pattern_store', args: 'pattern, category, confidence?', desc: 'Store a learned pattern' },
7684
+ { name: 'hooks_pattern_search', args: 'query, category?, limit?', desc: 'Search patterns' },
7685
+ { name: 'hooks_attention', args: 'query, context', desc: 'Attention-weighted relevance' },
7686
+ ],
7687
+ 'hooks-compress': [
7688
+ { name: 'hooks_compress_context', args: 'content, max_tokens?', desc: 'Compress context' },
7689
+ { name: 'hooks_compress_code', args: 'code, language?', desc: 'Compress code representation' },
7690
+ { name: 'hooks_compress_diff', args: 'diff', desc: 'Compress diff' },
7691
+ ],
7692
+ 'hooks-events': [
7693
+ { name: 'hooks_session_start', args: '(none)', desc: 'Signal session start' },
7694
+ { name: 'hooks_session_end', args: 'summary?', desc: 'Signal session end' },
7695
+ { name: 'hooks_notify', args: 'message, level?', desc: 'Send notification' },
7696
+ { name: 'hooks_transfer', args: 'target, data', desc: 'Transfer context' },
7697
+ ],
7698
+ 'hooks-model': [
7699
+ { name: 'hooks_model_route', args: 'task, complexity?', desc: 'Route to optimal model tier' },
7700
+ { name: 'hooks_model_outcome', args: 'model, task, success, tokens?', desc: 'Record model outcome' },
7701
+ { name: 'hooks_model_stats', args: '(none)', desc: 'Get model routing stats' },
7702
+ ],
7703
+ 'workers': [
7704
+ { name: 'workers_list', args: '(none)', desc: 'List available workers' },
7705
+ { name: 'workers_status', args: 'worker_id?', desc: 'Get worker status' },
7706
+ { name: 'workers_dispatch', args: 'worker, task, args?', desc: 'Dispatch task to worker' },
7707
+ { name: 'workers_cancel', args: 'job_id', desc: 'Cancel running job' },
7708
+ { name: 'workers_detect', args: 'file', desc: 'Auto-detect applicable workers' },
7709
+ { name: 'workers_complexity', args: 'file', desc: 'Worker: complexity analysis' },
7710
+ { name: 'workers_dependencies', args: 'file', desc: 'Worker: dependency analysis' },
7711
+ { name: 'workers_security', args: 'file', desc: 'Worker: security scan' },
7712
+ { name: 'workers_coverage', args: 'file', desc: 'Worker: test coverage' },
7713
+ { name: 'workers_dead_code', args: 'file', desc: 'Worker: dead code detection' },
7714
+ { name: 'workers_duplicates', args: 'file', desc: 'Worker: duplicate detection' },
7715
+ { name: 'workers_performance', args: 'file', desc: 'Worker: performance analysis' },
7716
+ ],
7717
+ 'rvf': [
7718
+ { name: 'rvf_create', args: 'path, dimension?, metric?', desc: 'Create new .rvf vector store' },
7719
+ { name: 'rvf_open', args: 'path', desc: 'Open existing .rvf store' },
7720
+ { name: 'rvf_ingest', args: 'path, vectors, ids?, metadata?', desc: 'Insert vectors' },
7721
+ { name: 'rvf_query', args: 'path, vector, k?, filter?', desc: 'Query nearest neighbors' },
7722
+ { name: 'rvf_delete', args: 'path, ids', desc: 'Delete vectors by ID' },
7723
+ { name: 'rvf_status', args: 'path', desc: 'Get store status' },
7724
+ { name: 'rvf_compact', args: 'path', desc: 'Compact store' },
7725
+ { name: 'rvf_derive', args: 'parent_path, child_path', desc: 'COW-branch to child store' },
7726
+ { name: 'rvf_segments', args: 'path', desc: 'List file segments' },
7727
+ { name: 'rvf_examples', args: '(none)', desc: 'List example .rvf files' },
7728
+ ],
7729
+ 'rvlite': [
7730
+ { name: 'rvlite_sql', args: 'query, db_path?', desc: 'Execute SQL query' },
7731
+ { name: 'rvlite_cypher', args: 'query, db_path?', desc: 'Execute Cypher graph query' },
7732
+ { name: 'rvlite_sparql', args: 'query, db_path?', desc: 'Execute SPARQL RDF query' },
7733
+ ],
7734
+ 'brain': [
7735
+ { name: 'brain_search', args: 'query, category?, limit?', desc: 'Semantic search shared brain' },
7736
+ { name: 'brain_share', args: 'title, content, category, tags?, code_snippet?', desc: 'Share knowledge' },
7737
+ { name: 'brain_get', args: 'id', desc: 'Retrieve memory by ID' },
7738
+ { name: 'brain_vote', args: 'id, direction', desc: 'Quality vote (up/down)' },
7739
+ { name: 'brain_list', args: 'category?, limit?', desc: 'List recent memories' },
7740
+ { name: 'brain_delete', args: 'id', desc: 'Delete own contribution' },
7741
+ { name: 'brain_status', args: '(none)', desc: 'System health' },
7742
+ { name: 'brain_drift', args: 'domain?', desc: 'Check knowledge drift' },
7743
+ { name: 'brain_partition', args: 'domain?, min_cluster_size?', desc: 'Knowledge topology' },
7744
+ { name: 'brain_transfer', args: 'source_domain, target_domain', desc: 'Cross-domain transfer' },
7745
+ { name: 'brain_sync', args: 'direction?', desc: 'LoRA weight sync' },
7746
+ ],
7747
+ 'edge': [
7748
+ { name: 'edge_status', args: '(none)', desc: 'Network status' },
7749
+ { name: 'edge_join', args: 'contribution?', desc: 'Join compute network' },
7750
+ { name: 'edge_balance', args: '(none)', desc: 'Check rUv balance' },
7751
+ { name: 'edge_tasks', args: 'limit?', desc: 'List compute tasks' },
7752
+ ],
7753
+ 'identity': [
7754
+ { name: 'identity_generate', args: '(none)', desc: 'Generate new pi key' },
7755
+ { name: 'identity_show', args: '(none)', desc: 'Show current identity' },
7756
+ ],
7757
+ };
7758
+
7759
+ if (opts.json) {
7760
+ const output = {};
7761
+ Object.entries(toolGroups).forEach(([group, tools]) => {
7762
+ if (!opts.group || group === opts.group || group.startsWith(opts.group)) {
7763
+ output[group] = tools;
7764
+ }
7765
+ });
7766
+ console.log(JSON.stringify(output, null, 2));
7767
+ return;
7768
+ }
7769
+
7770
+ console.log(chalk.bold.cyan('\nRuVector MCP Tools\n'));
7771
+ let total = 0;
7772
+ Object.entries(toolGroups).forEach(([group, tools]) => {
7773
+ if (opts.group && group !== opts.group && !group.startsWith(opts.group)) return;
7774
+ console.log(chalk.bold.yellow(` ${group} (${tools.length}):`));
7775
+ tools.forEach(t => {
7776
+ console.log(` ${chalk.green(t.name.padEnd(28))} ${chalk.dim(t.args.padEnd(40))} ${t.desc}`);
7777
+ });
7778
+ console.log();
7779
+ total += tools.length;
7780
+ });
7781
+ console.log(chalk.bold(`Total: ${total} MCP tools\n`));
7782
+ });
7783
+
7784
+ // ============================================================================
7785
+ // MCP test subcommand
7786
+ // ============================================================================
7787
+
7788
+ mcpCmd.command('test')
7789
+ .description('Test MCP server setup and tool registration')
7790
+ .action(() => {
7791
+ console.log(chalk.bold.cyan('\nMCP Server Test Results'));
7792
+ console.log(chalk.dim('-'.repeat(40)));
7793
+
7794
+ const mcpServerPath = path.join(__dirname, 'mcp-server.js');
7795
+ if (fs.existsSync(mcpServerPath)) {
7796
+ console.log(` ${chalk.green('PASS')} mcp-server.js exists`);
7797
+ } else {
7798
+ console.log(` ${chalk.red('FAIL')} mcp-server.js not found`);
7799
+ process.exit(1);
7800
+ }
7801
+
7802
+ try {
7803
+ const { execSync } = require('child_process');
7804
+ execSync(`node -c ${mcpServerPath}`, { stdio: 'pipe' });
7805
+ console.log(` ${chalk.green('PASS')} mcp-server.js syntax valid`);
7806
+ } catch {
7807
+ console.log(` ${chalk.red('FAIL')} mcp-server.js has syntax errors`);
7808
+ process.exit(1);
7809
+ }
7810
+
7811
+ try {
7812
+ require('@modelcontextprotocol/sdk/server/index.js');
7813
+ console.log(` ${chalk.green('PASS')} @modelcontextprotocol/sdk installed`);
7814
+ } catch {
7815
+ console.log(` ${chalk.red('FAIL')} @modelcontextprotocol/sdk not installed`);
7816
+ process.exit(1);
7817
+ }
7818
+
7819
+ try {
7820
+ const src = fs.readFileSync(mcpServerPath, 'utf8');
7821
+ const toolsStart = src.indexOf('const TOOLS = [');
7822
+ const toolsSection = toolsStart >= 0 ? src.slice(toolsStart) : src;
7823
+ const toolDefs = toolsSection.match(/name:\s*'([a-z][a-z0-9_]*)'\s*,\s*\n\s*description:/g) || [];
7824
+ const toolNames = toolDefs.map(m => m.match(/name:\s*'([a-z][a-z0-9_]*)'/)[1]);
7825
+ const groups = {};
7826
+ toolNames.forEach(n => {
7827
+ const g = n.split('_')[0];
7828
+ groups[g] = (groups[g] || 0) + 1;
7829
+ });
7830
+
7831
+ Object.entries(groups).sort((a, b) => b[1] - a[1]).forEach(([group, count]) => {
7832
+ console.log(` ${chalk.green('PASS')} ${group}: ${count} tools`);
7833
+ });
7834
+ console.log(chalk.bold(`\n Total: ${toolNames.length} tools registered`));
7835
+ } catch (e) {
7836
+ console.log(` ${chalk.yellow('WARN')} Could not parse tool count: ${e.message}`);
7837
+ }
7838
+
7839
+ try {
7840
+ const src = fs.readFileSync(mcpServerPath, 'utf8');
7841
+ const verMatch = src.match(/version:\s*'([^']+)'/);
7842
+ if (verMatch) {
7843
+ const pkg = require(path.join(__dirname, '..', 'package.json'));
7844
+ const match = verMatch[1] === pkg.version;
7845
+ console.log(` ${match ? chalk.green('PASS') : chalk.yellow('WARN')} Server version: ${verMatch[1]}${match ? '' : ` (package: ${pkg.version})`}`);
7846
+ }
7847
+ } catch {}
7848
+
7849
+ console.log(chalk.bold.green('\n All checks passed.\n'));
7850
+ console.log(chalk.dim(' Setup: claude mcp add ruvector npx ruvector mcp start\n'));
7851
+ });
7852
+
7403
7853
  // ============================================================================
7404
7854
  // Brain Commands — Shared intelligence via @ruvector/pi-brain (lazy-loaded)
7405
7855
  // ============================================================================
7406
7856
 
7407
- let _piBrainClient = null;
7408
- async function getPiBrainClient(opts = {}) {
7409
- if (_piBrainClient) return _piBrainClient;
7857
+ async function requirePiBrain() {
7410
7858
  try {
7411
- const piBrain = require('@ruvector/pi-brain');
7412
- const PiBrainClient = piBrain.PiBrainClient || piBrain.default;
7413
- const url = opts.url || process.env.BRAIN_URL || 'https://pi.ruv.io';
7414
- const key = opts.key || process.env.PI || '';
7415
- _piBrainClient = new PiBrainClient({ url, key });
7416
- return _piBrainClient;
7859
+ return require('@ruvector/pi-brain');
7417
7860
  } catch {
7418
7861
  console.error(chalk.red('Brain commands require @ruvector/pi-brain'));
7419
7862
  console.error(chalk.yellow(' npm install @ruvector/pi-brain'));
@@ -7421,114 +7864,90 @@ async function getPiBrainClient(opts = {}) {
7421
7864
  }
7422
7865
  }
7423
7866
 
7424
- const brainCmd = program.command('brain').description('Shared intelligence — search, share, vote on collective knowledge');
7867
+ function getBrainConfig(opts) {
7868
+ return {
7869
+ url: opts.url || process.env.BRAIN_URL || 'https://pi.ruv.io',
7870
+ key: opts.key || process.env.PI
7871
+ };
7872
+ }
7873
+
7874
+ const brainCmd = program.command('brain').description('Shared intelligence — search, share, and manage collective knowledge');
7425
7875
 
7426
7876
  brainCmd.command('search <query>')
7427
- .description('Semantic search across shared knowledge')
7428
- .option('-l, --limit <n>', 'Max results', '10')
7877
+ .description('Semantic search across shared brain knowledge')
7429
7878
  .option('-c, --category <cat>', 'Filter by category')
7879
+ .option('-l, --limit <n>', 'Max results', '10')
7430
7880
  .option('--url <url>', 'Brain server URL')
7431
- .option('--key <key>', 'PI key')
7432
- .option('--json', 'JSON output')
7881
+ .option('--key <key>', 'Pi key')
7882
+ .option('--json', 'Output as JSON')
7433
7883
  .action(async (query, opts) => {
7434
- const spinner = ora(`Searching brain for "${query}"...`);
7435
- spinner.start();
7884
+ const piBrain = await requirePiBrain();
7885
+ const config = getBrainConfig(opts);
7436
7886
  try {
7437
- const client = await getPiBrainClient(opts);
7438
- const results = await client.search(query, { limit: parseInt(opts.limit), category: opts.category });
7439
- spinner.stop();
7440
- if (opts.json || !process.stdout.isTTY) {
7441
- console.log(JSON.stringify(results, null, 2));
7442
- } else {
7443
- const items = results.memories || results.results || results || [];
7444
- if (items.length === 0) {
7445
- console.log(chalk.yellow('No results found.'));
7446
- } else {
7447
- items.forEach((m, i) => {
7448
- console.log(chalk.bold.cyan(`${i + 1}. ${m.title || m.id}`));
7449
- if (m.category) console.log(chalk.dim(` Category: ${m.category}`));
7450
- if (m.quality_score != null) console.log(chalk.dim(` Quality: ${(m.quality_score * 100).toFixed(0)}%`));
7451
- if (m.content) console.log(` ${m.content.slice(0, 120)}${m.content.length > 120 ? '...' : ''}`);
7452
- console.log();
7453
- });
7454
- }
7455
- }
7456
- } catch (e) {
7457
- spinner.stop();
7458
- console.error(chalk.red('Search failed:'), e.message);
7459
- process.exit(1);
7460
- }
7887
+ const client = new piBrain.PiBrainClient(config);
7888
+ const results = await client.search(query, { category: opts.category, limit: parseInt(opts.limit) });
7889
+ if (opts.json || !process.stdout.isTTY) { console.log(JSON.stringify(results, null, 2)); return; }
7890
+ console.log(chalk.bold.cyan(`\nBrain Search: "${query}"\n`));
7891
+ if (!results.length) { console.log(chalk.dim(' No results found.\n')); return; }
7892
+ results.forEach((r, i) => {
7893
+ console.log(` ${chalk.yellow(i + 1 + '.')} ${chalk.bold(r.title || r.id)}`);
7894
+ if (r.category) console.log(` ${chalk.dim('Category:')} ${r.category}`);
7895
+ if (r.score) console.log(` ${chalk.dim('Score:')} ${r.score.toFixed(3)}`);
7896
+ console.log();
7897
+ });
7898
+ } catch (e) { console.error(chalk.red(`Error: ${e.message}`)); process.exit(1); }
7461
7899
  });
7462
7900
 
7463
7901
  brainCmd.command('share <title>')
7464
7902
  .description('Share knowledge with the collective brain')
7465
- .option('-c, --category <cat>', 'Category (pattern, solution, architecture, convention, security, performance, tooling)', 'pattern')
7903
+ .requiredOption('-c, --category <cat>', 'Category (pattern, solution, architecture, convention, security, performance, tooling)')
7466
7904
  .option('-t, --tags <tags>', 'Comma-separated tags')
7467
- .option('--content <text>', 'Content body (or pipe via stdin)')
7905
+ .option('--content <text>', 'Content body')
7906
+ .option('--code <snippet>', 'Code snippet')
7468
7907
  .option('--url <url>', 'Brain server URL')
7469
- .option('--key <key>', 'PI key')
7908
+ .option('--key <key>', 'Pi key')
7470
7909
  .action(async (title, opts) => {
7471
- const spinner = ora('Sharing with brain...');
7472
- spinner.start();
7910
+ const piBrain = await requirePiBrain();
7911
+ const config = getBrainConfig(opts);
7473
7912
  try {
7474
- const client = await getPiBrainClient(opts);
7475
- let content = opts.content || '';
7476
- if (!content && !process.stdin.isTTY) {
7477
- const chunks = [];
7478
- for await (const chunk of process.stdin) chunks.push(chunk);
7479
- content = Buffer.concat(chunks).toString('utf8');
7480
- }
7481
- if (!content) { spinner.stop(); console.error(chalk.red('Provide content via --content or stdin')); process.exit(1); }
7482
- const tags = opts.tags ? opts.tags.split(',').map(t => t.trim()) : [];
7483
- const result = await client.share({ title, content, category: opts.category, tags });
7484
- spinner.succeed(chalk.green(`Shared: ${result.id || 'ok'}`));
7485
- } catch (e) {
7486
- spinner.fail(chalk.red('Share failed: ' + e.message));
7487
- process.exit(1);
7488
- }
7913
+ const client = new piBrain.PiBrainClient(config);
7914
+ const result = await client.share({ title, content: opts.content || title, category: opts.category, tags: opts.tags ? opts.tags.split(',').map(t => t.trim()) : [], code_snippet: opts.code });
7915
+ console.log(chalk.green(`Shared: ${result.id || 'OK'}`));
7916
+ } catch (e) { console.error(chalk.red(`Error: ${e.message}`)); process.exit(1); }
7489
7917
  });
7490
7918
 
7491
7919
  brainCmd.command('get <id>')
7492
- .description('Retrieve a specific memory with provenance')
7920
+ .description('Retrieve a specific memory by ID')
7493
7921
  .option('--url <url>', 'Brain server URL')
7494
- .option('--key <key>', 'PI key')
7495
- .option('--json', 'JSON output')
7922
+ .option('--key <key>', 'Pi key')
7923
+ .option('--json', 'Output as JSON')
7496
7924
  .action(async (id, opts) => {
7925
+ const piBrain = await requirePiBrain();
7926
+ const config = getBrainConfig(opts);
7497
7927
  try {
7498
- const client = await getPiBrainClient(opts);
7928
+ const client = new piBrain.PiBrainClient(config);
7499
7929
  const result = await client.get(id);
7500
- if (opts.json || !process.stdout.isTTY) {
7501
- console.log(JSON.stringify(result, null, 2));
7502
- } else {
7503
- console.log(chalk.bold.cyan(result.title || result.id));
7504
- if (result.category) console.log(chalk.dim(`Category: ${result.category}`));
7505
- if (result.content) console.log(`\n${result.content}`);
7506
- if (result.tags && result.tags.length) console.log(chalk.dim(`\nTags: ${result.tags.join(', ')}`));
7507
- if (result.contributor_pseudonym) console.log(chalk.dim(`Contributor: ${result.contributor_pseudonym}`));
7508
- }
7509
- } catch (e) {
7510
- console.error(chalk.red('Get failed:'), e.message);
7511
- process.exit(1);
7512
- }
7930
+ if (opts.json || !process.stdout.isTTY) { console.log(JSON.stringify(result, null, 2)); return; }
7931
+ console.log(chalk.bold.cyan(`\nMemory: ${id}\n`));
7932
+ if (result.title) console.log(` ${chalk.bold('Title:')} ${result.title}`);
7933
+ if (result.content) console.log(` ${chalk.bold('Content:')} ${result.content}`);
7934
+ if (result.category) console.log(` ${chalk.bold('Category:')} ${result.category}`);
7935
+ console.log();
7936
+ } catch (e) { console.error(chalk.red(`Error: ${e.message}`)); process.exit(1); }
7513
7937
  });
7514
7938
 
7515
7939
  brainCmd.command('vote <id> <direction>')
7516
- .description('Vote on knowledge quality (up or down)')
7940
+ .description('Quality vote on a memory (up or down)')
7517
7941
  .option('--url <url>', 'Brain server URL')
7518
- .option('--key <key>', 'PI key')
7942
+ .option('--key <key>', 'Pi key')
7519
7943
  .action(async (id, direction, opts) => {
7520
- if (!['up', 'down'].includes(direction)) {
7521
- console.error(chalk.red('Direction must be "up" or "down"'));
7522
- process.exit(1);
7523
- }
7944
+ const piBrain = await requirePiBrain();
7945
+ const config = getBrainConfig(opts);
7524
7946
  try {
7525
- const client = await getPiBrainClient(opts);
7947
+ const client = new piBrain.PiBrainClient(config);
7526
7948
  await client.vote(id, direction);
7527
7949
  console.log(chalk.green(`Voted ${direction} on ${id}`));
7528
- } catch (e) {
7529
- console.error(chalk.red('Vote failed:'), e.message);
7530
- process.exit(1);
7531
- }
7950
+ } catch (e) { console.error(chalk.red(`Error: ${e.message}`)); process.exit(1); }
7532
7951
  });
7533
7952
 
7534
7953
  brainCmd.command('list')
@@ -7536,594 +7955,628 @@ brainCmd.command('list')
7536
7955
  .option('-c, --category <cat>', 'Filter by category')
7537
7956
  .option('-l, --limit <n>', 'Max results', '20')
7538
7957
  .option('--url <url>', 'Brain server URL')
7539
- .option('--key <key>', 'PI key')
7540
- .option('--json', 'JSON output')
7958
+ .option('--key <key>', 'Pi key')
7959
+ .option('--json', 'Output as JSON')
7541
7960
  .action(async (opts) => {
7961
+ const piBrain = await requirePiBrain();
7962
+ const config = getBrainConfig(opts);
7542
7963
  try {
7543
- const client = await getPiBrainClient(opts);
7964
+ const client = new piBrain.PiBrainClient(config);
7544
7965
  const results = await client.list({ category: opts.category, limit: parseInt(opts.limit) });
7545
- const items = results.memories || results || [];
7546
- if (opts.json || !process.stdout.isTTY) {
7547
- console.log(JSON.stringify(items, null, 2));
7548
- } else {
7549
- if (items.length === 0) { console.log(chalk.yellow('No memories found.')); return; }
7550
- items.forEach((m, i) => {
7551
- const quality = m.quality_score != null ? chalk.dim(` [${(m.quality_score * 100).toFixed(0)}%]`) : '';
7552
- console.log(`${chalk.bold(i + 1 + '.')} ${m.title || m.id}${quality} ${chalk.dim(m.category || '')}`);
7553
- });
7554
- }
7555
- } catch (e) {
7556
- console.error(chalk.red('List failed:'), e.message);
7557
- process.exit(1);
7558
- }
7966
+ if (opts.json || !process.stdout.isTTY) { console.log(JSON.stringify(results, null, 2)); return; }
7967
+ console.log(chalk.bold.cyan('\nShared Brain Memories\n'));
7968
+ if (!results.length) { console.log(chalk.dim(' No memories found.\n')); return; }
7969
+ results.forEach((r, i) => {
7970
+ console.log(` ${chalk.yellow(i + 1 + '.')} ${chalk.bold(r.title || r.id)} ${chalk.dim(`[${r.category || 'unknown'}]`)}`);
7971
+ });
7972
+ console.log();
7973
+ } catch (e) { console.error(chalk.red(`Error: ${e.message}`)); process.exit(1); }
7559
7974
  });
7560
7975
 
7561
7976
  brainCmd.command('delete <id>')
7562
7977
  .description('Delete your own contribution')
7563
7978
  .option('--url <url>', 'Brain server URL')
7564
- .option('--key <key>', 'PI key')
7979
+ .option('--key <key>', 'Pi key')
7565
7980
  .action(async (id, opts) => {
7981
+ const piBrain = await requirePiBrain();
7982
+ const config = getBrainConfig(opts);
7566
7983
  try {
7567
- const client = await getPiBrainClient(opts);
7984
+ const client = new piBrain.PiBrainClient(config);
7568
7985
  await client.delete(id);
7569
- console.log(chalk.green(`Deleted ${id}`));
7570
- } catch (e) {
7571
- console.error(chalk.red('Delete failed:'), e.message);
7572
- process.exit(1);
7573
- }
7986
+ console.log(chalk.green(`Deleted: ${id}`));
7987
+ } catch (e) { console.error(chalk.red(`Error: ${e.message}`)); process.exit(1); }
7574
7988
  });
7575
7989
 
7576
7990
  brainCmd.command('status')
7577
- .description('Show brain system health and statistics')
7991
+ .description('Show shared brain system health')
7578
7992
  .option('--url <url>', 'Brain server URL')
7579
- .option('--key <key>', 'PI key')
7580
- .option('--json', 'JSON output')
7993
+ .option('--key <key>', 'Pi key')
7994
+ .option('--json', 'Output as JSON')
7581
7995
  .action(async (opts) => {
7996
+ const piBrain = await requirePiBrain();
7997
+ const config = getBrainConfig(opts);
7582
7998
  try {
7583
- const client = await getPiBrainClient(opts);
7999
+ const client = new piBrain.PiBrainClient(config);
7584
8000
  const status = await client.status();
7585
- if (opts.json || !process.stdout.isTTY) {
7586
- console.log(JSON.stringify(status, null, 2));
7587
- } else {
7588
- console.log(chalk.bold.cyan('\nBrain Status'));
7589
- console.log(chalk.dim('-'.repeat(40)));
7590
- Object.entries(status).forEach(([k, v]) => {
7591
- console.log(` ${chalk.bold(k)}: ${typeof v === 'object' ? JSON.stringify(v) : v}`);
7592
- });
7593
- console.log();
7594
- }
7595
- } catch (e) {
7596
- console.error(chalk.red('Status failed:'), e.message);
7597
- process.exit(1);
7598
- }
8001
+ if (opts.json || !process.stdout.isTTY) { console.log(JSON.stringify(status, null, 2)); return; }
8002
+ console.log(chalk.bold.cyan('\nBrain Status\n'));
8003
+ Object.entries(status).forEach(([k, v]) => {
8004
+ console.log(` ${chalk.bold(k + ':')} ${v}`);
8005
+ });
8006
+ console.log();
8007
+ } catch (e) { console.error(chalk.red(`Error: ${e.message}`)); process.exit(1); }
7599
8008
  });
7600
8009
 
7601
8010
  brainCmd.command('drift')
7602
- .description('Check knowledge drift between local and shared')
8011
+ .description('Check if shared knowledge has drifted')
7603
8012
  .option('-d, --domain <domain>', 'Domain to check')
7604
8013
  .option('--url <url>', 'Brain server URL')
7605
- .option('--key <key>', 'PI key')
7606
- .option('--json', 'JSON output')
8014
+ .option('--key <key>', 'Pi key')
8015
+ .option('--json', 'Output as JSON')
7607
8016
  .action(async (opts) => {
8017
+ const piBrain = await requirePiBrain();
8018
+ const config = getBrainConfig(opts);
7608
8019
  try {
7609
- const client = await getPiBrainClient(opts);
8020
+ const client = new piBrain.PiBrainClient(config);
7610
8021
  const report = await client.drift({ domain: opts.domain });
7611
- if (opts.json || !process.stdout.isTTY) {
7612
- console.log(JSON.stringify(report, null, 2));
7613
- } else {
7614
- console.log(chalk.bold.cyan('\nDrift Report'));
7615
- console.log(chalk.dim('-'.repeat(40)));
7616
- console.log(` CV: ${report.cv || 'N/A'}`);
7617
- console.log(` Drifting: ${report.is_drifting ? chalk.red('YES') : chalk.green('NO')}`);
7618
- if (report.suggested_action) console.log(` Action: ${report.suggested_action}`);
7619
- console.log();
7620
- }
7621
- } catch (e) {
7622
- console.error(chalk.red('Drift check failed:'), e.message);
7623
- process.exit(1);
7624
- }
8022
+ if (opts.json || !process.stdout.isTTY) { console.log(JSON.stringify(report, null, 2)); return; }
8023
+ console.log(chalk.bold.cyan('\nDrift Report\n'));
8024
+ console.log(` ${chalk.bold('Drifting:')} ${report.is_drifting ? chalk.red('Yes') : chalk.green('No')}`);
8025
+ if (report.cv) console.log(` ${chalk.bold('CV:')} ${report.cv}`);
8026
+ console.log();
8027
+ } catch (e) { console.error(chalk.red(`Error: ${e.message}`)); process.exit(1); }
7625
8028
  });
7626
8029
 
7627
8030
  brainCmd.command('partition')
7628
- .description('Get knowledge topology via mincut partitioning')
8031
+ .description('Get knowledge partitioned by mincut topology')
7629
8032
  .option('-d, --domain <domain>', 'Domain to partition')
7630
- .option('--min-cluster <n>', 'Minimum cluster size', '3')
8033
+ .option('--min-size <n>', 'Minimum cluster size', '3')
7631
8034
  .option('--url <url>', 'Brain server URL')
7632
- .option('--key <key>', 'PI key')
7633
- .option('--json', 'JSON output')
8035
+ .option('--key <key>', 'Pi key')
8036
+ .option('--json', 'Output as JSON')
7634
8037
  .action(async (opts) => {
8038
+ const piBrain = await requirePiBrain();
8039
+ const config = getBrainConfig(opts);
7635
8040
  try {
7636
- const client = await getPiBrainClient(opts);
7637
- const result = await client.partition({ domain: opts.domain, min_cluster_size: parseInt(opts.minCluster) });
7638
- if (opts.json || !process.stdout.isTTY) {
7639
- console.log(JSON.stringify(result, null, 2));
7640
- } else {
7641
- const clusters = result.clusters || [];
7642
- console.log(chalk.bold.cyan(`\nKnowledge Partitions: ${clusters.length} clusters`));
7643
- console.log(chalk.dim('-'.repeat(40)));
7644
- clusters.forEach((c, i) => {
7645
- console.log(` ${chalk.bold('Cluster ' + (i + 1))}: ${c.size || (c.members && c.members.length) || '?'} memories`);
7646
- if (c.label) console.log(` Label: ${c.label}`);
7647
- if (c.edge_strength != null) console.log(chalk.dim(` Edge strength: ${c.edge_strength.toFixed(3)}`));
8041
+ const client = new piBrain.PiBrainClient(config);
8042
+ const result = await client.partition({ domain: opts.domain, min_cluster_size: parseInt(opts.minSize) });
8043
+ if (opts.json || !process.stdout.isTTY) { console.log(JSON.stringify(result, null, 2)); return; }
8044
+ console.log(chalk.bold.cyan('\nKnowledge Partitions\n'));
8045
+ if (result.clusters) {
8046
+ result.clusters.forEach((c, i) => {
8047
+ console.log(` ${chalk.yellow('Cluster ' + (i + 1) + ':')} ${c.size || 'unknown'} entries`);
7648
8048
  });
7649
- console.log();
7650
8049
  }
7651
- } catch (e) {
7652
- console.error(chalk.red('Partition failed:'), e.message);
7653
- process.exit(1);
7654
- }
8050
+ console.log();
8051
+ } catch (e) { console.error(chalk.red(`Error: ${e.message}`)); process.exit(1); }
7655
8052
  });
7656
8053
 
7657
8054
  brainCmd.command('transfer <source> <target>')
7658
- .description('Transfer learned priors between domains')
8055
+ .description('Apply learned priors from one domain to another')
7659
8056
  .option('--url <url>', 'Brain server URL')
7660
- .option('--key <key>', 'PI key')
7661
- .option('--json', 'JSON output')
8057
+ .option('--key <key>', 'Pi key')
8058
+ .option('--json', 'Output as JSON')
7662
8059
  .action(async (source, target, opts) => {
7663
- const spinner = ora(`Transferring ${source} -> ${target}...`);
7664
- spinner.start();
8060
+ const piBrain = await requirePiBrain();
8061
+ const config = getBrainConfig(opts);
7665
8062
  try {
7666
- const client = await getPiBrainClient(opts);
8063
+ const client = new piBrain.PiBrainClient(config);
7667
8064
  const result = await client.transfer(source, target);
7668
- spinner.stop();
7669
- if (opts.json || !process.stdout.isTTY) {
7670
- console.log(JSON.stringify(result, null, 2));
7671
- } else {
7672
- console.log(chalk.green(`Transfer complete: ${source} -> ${target}`));
7673
- if (result.acceleration_factor) console.log(` Acceleration: ${result.acceleration_factor.toFixed(2)}x`);
7674
- if (result.improved_target != null) console.log(` Target improved: ${result.improved_target ? 'yes' : 'no'}`);
7675
- }
7676
- } catch (e) {
7677
- spinner.stop();
7678
- console.error(chalk.red('Transfer failed:'), e.message);
7679
- process.exit(1);
7680
- }
8065
+ if (opts.json || !process.stdout.isTTY) { console.log(JSON.stringify(result, null, 2)); return; }
8066
+ console.log(chalk.green(`Transfer ${source} -> ${target}: ${result.status || 'OK'}`));
8067
+ } catch (e) { console.error(chalk.red(`Error: ${e.message}`)); process.exit(1); }
7681
8068
  });
7682
8069
 
7683
8070
  brainCmd.command('sync [direction]')
7684
- .description('Sync LoRA weights (pull, push, or both)')
8071
+ .description('Synchronize LoRA weights (pull, push, or both)')
7685
8072
  .option('--url <url>', 'Brain server URL')
7686
- .option('--key <key>', 'PI key')
8073
+ .option('--key <key>', 'Pi key')
7687
8074
  .action(async (direction, opts) => {
7688
- const dir = direction || 'both';
7689
- const spinner = ora(`LoRA sync (${dir})...`);
7690
- spinner.start();
8075
+ const piBrain = await requirePiBrain();
8076
+ const config = getBrainConfig(opts);
7691
8077
  try {
7692
- const client = await getPiBrainClient(opts);
7693
- const result = await client.loraSync({ direction: dir });
7694
- spinner.succeed(chalk.green(`LoRA sync ${dir}: ${result.status || 'ok'}`));
7695
- } catch (e) {
7696
- spinner.fail(chalk.red('Sync failed: ' + e.message));
7697
- process.exit(1);
7698
- }
8078
+ const client = new piBrain.PiBrainClient(config);
8079
+ const result = await client.sync(direction || 'both');
8080
+ console.log(chalk.green(`Sync ${direction || 'both'}: ${result.status || 'OK'}`));
8081
+ } catch (e) { console.error(chalk.red(`Error: ${e.message}`)); process.exit(1); }
8082
+ });
8083
+
8084
+ brainCmd.command('page <action> [args...]')
8085
+ .description('Brainpedia page management (list, get, create, update, delete)')
8086
+ .option('--url <url>', 'Brain server URL')
8087
+ .option('--key <key>', 'Pi key')
8088
+ .option('--json', 'Output as JSON')
8089
+ .action(async (action, args, opts) => {
8090
+ const piBrain = await requirePiBrain();
8091
+ const config = getBrainConfig(opts);
8092
+ try {
8093
+ const client = new piBrain.PiBrainClient(config);
8094
+ let result;
8095
+ switch (action) {
8096
+ case 'list':
8097
+ result = await client.listPages ? client.listPages({ limit: 20 }) : { pages: [], message: 'Brainpedia not yet available on this server' };
8098
+ break;
8099
+ case 'get':
8100
+ if (!args[0]) { console.error(chalk.red('Usage: brain page get <slug>')); process.exit(1); }
8101
+ result = await client.getPage ? client.getPage(args[0]) : { error: 'Brainpedia not yet available' };
8102
+ break;
8103
+ case 'create':
8104
+ if (!args[0]) { console.error(chalk.red('Usage: brain page create <title> [--content <text>]')); process.exit(1); }
8105
+ result = await client.createPage ? client.createPage({ title: args[0], content: opts.content || '' }) : { error: 'Brainpedia not yet available' };
8106
+ break;
8107
+ case 'update':
8108
+ if (!args[0]) { console.error(chalk.red('Usage: brain page update <slug> [--content <text>]')); process.exit(1); }
8109
+ result = await client.updatePage ? client.updatePage(args[0], { content: opts.content || '' }) : { error: 'Brainpedia not yet available' };
8110
+ break;
8111
+ case 'delete':
8112
+ if (!args[0]) { console.error(chalk.red('Usage: brain page delete <slug>')); process.exit(1); }
8113
+ result = await client.deletePage ? client.deletePage(args[0]) : { error: 'Brainpedia not yet available' };
8114
+ break;
8115
+ default:
8116
+ console.error(chalk.red(`Unknown page action: ${action}. Use: list, get, create, update, delete`));
8117
+ process.exit(1);
8118
+ }
8119
+ if (opts.json || !process.stdout.isTTY) { console.log(JSON.stringify(result, null, 2)); return; }
8120
+ if (result.pages) {
8121
+ console.log(chalk.bold.cyan('\nBrainpedia Pages\n'));
8122
+ result.pages.forEach((p, i) => console.log(` ${chalk.yellow(i + 1 + '.')} ${chalk.bold(p.title || p.slug)} ${chalk.dim(p.updated || '')}`));
8123
+ } else if (result.title) {
8124
+ console.log(chalk.bold.cyan(`\n${result.title}\n`));
8125
+ if (result.content) console.log(result.content);
8126
+ } else {
8127
+ console.log(JSON.stringify(result, null, 2));
8128
+ }
8129
+ console.log();
8130
+ } catch (e) { console.error(chalk.red(`Error: ${e.message}`)); process.exit(1); }
8131
+ });
8132
+
8133
+ brainCmd.command('node <action> [args...]')
8134
+ .description('WASM compute node management (publish, list, status)')
8135
+ .option('--url <url>', 'Brain server URL')
8136
+ .option('--key <key>', 'Pi key')
8137
+ .option('--json', 'Output as JSON')
8138
+ .action(async (action, args, opts) => {
8139
+ const piBrain = await requirePiBrain();
8140
+ const config = getBrainConfig(opts);
8141
+ try {
8142
+ const client = new piBrain.PiBrainClient(config);
8143
+ let result;
8144
+ switch (action) {
8145
+ case 'publish':
8146
+ if (!args[0]) { console.error(chalk.red('Usage: brain node publish <wasm-file>')); process.exit(1); }
8147
+ const wasmPath = path.resolve(args[0]);
8148
+ if (!fs.existsSync(wasmPath)) { console.error(chalk.red(`File not found: ${wasmPath}`)); process.exit(1); }
8149
+ const wasmBytes = fs.readFileSync(wasmPath);
8150
+ result = await client.publishNode ? client.publishNode({ wasm: wasmBytes, name: path.basename(wasmPath, '.wasm') }) : { error: 'WASM node publish not yet available on this server' };
8151
+ break;
8152
+ case 'list':
8153
+ result = await client.listNodes ? client.listNodes({ limit: 20 }) : { nodes: [], message: 'WASM node listing not yet available' };
8154
+ break;
8155
+ case 'status':
8156
+ if (!args[0]) { console.error(chalk.red('Usage: brain node status <node-id>')); process.exit(1); }
8157
+ result = await client.nodeStatus ? client.nodeStatus(args[0]) : { error: 'WASM node status not yet available' };
8158
+ break;
8159
+ default:
8160
+ console.error(chalk.red(`Unknown node action: ${action}. Use: publish, list, status`));
8161
+ process.exit(1);
8162
+ }
8163
+ if (opts.json || !process.stdout.isTTY) { console.log(JSON.stringify(result, null, 2)); return; }
8164
+ if (result.nodes) {
8165
+ console.log(chalk.bold.cyan('\nWASM Compute Nodes\n'));
8166
+ result.nodes.forEach((n, i) => console.log(` ${chalk.yellow(i + 1 + '.')} ${chalk.bold(n.name || n.id)} ${chalk.dim(n.status || '')}`));
8167
+ } else if (result.id) {
8168
+ console.log(chalk.green(`Published node: ${result.id}`));
8169
+ } else {
8170
+ console.log(JSON.stringify(result, null, 2));
8171
+ }
8172
+ console.log();
8173
+ } catch (e) { console.error(chalk.red(`Error: ${e.message}`)); process.exit(1); }
7699
8174
  });
7700
8175
 
7701
8176
  // ============================================================================
7702
- // Edge Commands — Distributed compute via edge-net (native fetch)
8177
+ // Edge Commands — Distributed compute via @ruvector/edge-net
7703
8178
  // ============================================================================
7704
8179
 
7705
- const EDGE_GENESIS = 'https://edge-net-genesis-875130704813.us-central1.run.app';
7706
- const EDGE_RELAY = 'https://edge-net-relay-875130704813.us-central1.run.app';
7707
- const EDGE_DASHBOARD = 'https://edge-net-dashboard-875130704813.us-central1.run.app';
8180
+ const edgeCmd = program.command('edge').description('Distributed P2P compute network — status, join, balance, tasks');
7708
8181
 
7709
- const edgeCmd = program.command('edge').description('Edge-net distributed compute network');
8182
+ const EDGE_GENESIS = 'https://edge-net-genesis-875130704813.us-central1.run.app';
7710
8183
 
7711
8184
  edgeCmd.command('status')
7712
- .description('Query network status (genesis, relay, nodes)')
7713
- .option('--json', 'JSON output')
8185
+ .description('Show edge compute network status')
8186
+ .option('--json', 'Output as JSON')
7714
8187
  .action(async (opts) => {
7715
- const spinner = ora('Querying edge network...');
7716
- spinner.start();
7717
8188
  try {
7718
- const res = await fetch(`${EDGE_GENESIS}/api/status`);
7719
- if (!res.ok) throw new Error(`HTTP ${res.status}`);
7720
- const data = await res.json();
7721
- spinner.stop();
7722
- if (opts.json || !process.stdout.isTTY) {
7723
- console.log(JSON.stringify(data, null, 2));
7724
- } else {
7725
- console.log(chalk.bold.cyan('\nEdge Network Status'));
7726
- console.log(chalk.dim('-'.repeat(40)));
7727
- if (data.nodes != null) console.log(` Nodes: ${data.nodes}`);
7728
- if (data.total_compute != null) console.log(` Compute: ${data.total_compute}`);
7729
- if (data.ruv_supply != null) console.log(` rUv Supply: ${data.ruv_supply}`);
7730
- if (data.phase) console.log(` Phase: ${data.phase}`);
7731
- console.log();
7732
- }
7733
- } catch (e) {
7734
- spinner.stop();
7735
- console.error(chalk.red('Edge status failed:'), e.message);
7736
- console.error(chalk.dim(' Genesis URL: ' + EDGE_GENESIS));
7737
- }
8189
+ const resp = await fetch(`${EDGE_GENESIS}/status`);
8190
+ const data = await resp.json();
8191
+ if (opts.json || !process.stdout.isTTY) { console.log(JSON.stringify(data, null, 2)); return; }
8192
+ console.log(chalk.bold.cyan('\nEdge Network Status\n'));
8193
+ Object.entries(data).forEach(([k, v]) => console.log(` ${chalk.bold(k + ':')} ${v}`));
8194
+ console.log();
8195
+ } catch (e) { console.error(chalk.red(`Error: ${e.message}`)); }
7738
8196
  });
7739
8197
 
7740
8198
  edgeCmd.command('join')
7741
- .description('Join as a compute node')
7742
- .option('--contribution <n>', 'Contribution factor (0.0 - 1.0)', '0.3')
7743
- .option('--key <key>', 'PI key')
8199
+ .description('Join the edge compute network as a compute node')
8200
+ .option('--contribution <level>', 'Contribution level 0.0-1.0', '0.3')
7744
8201
  .action(async (opts) => {
7745
- const key = opts.key || process.env.PI || '';
7746
- if (!key) {
7747
- console.error(chalk.red('PI key required. Set PI env var or use --key'));
7748
- console.error(chalk.yellow(' Generate one: npx ruvector identity generate'));
7749
- process.exit(1);
7750
- }
7751
- console.log(chalk.cyan(`Joining edge network (contribution=${opts.contribution})...`));
7752
- console.log(chalk.dim('This is a long-running process. Press Ctrl+C to leave.\n'));
8202
+ const piKey = process.env.PI;
8203
+ if (!piKey) { console.error(chalk.red('Set PI environment variable first. Run: npx ruvector identity generate')); process.exit(1); }
7753
8204
  try {
7754
- const res = await fetch(`${EDGE_RELAY}/api/join`, {
8205
+ const resp = await fetch(`${EDGE_GENESIS}/join`, {
7755
8206
  method: 'POST',
7756
- headers: { 'Content-Type': 'application/json', 'Authorization': `Bearer ${key}` },
7757
- body: JSON.stringify({ contribution: parseFloat(opts.contribution) })
8207
+ headers: { 'Content-Type': 'application/json', 'Authorization': `Bearer ${piKey}` },
8208
+ body: JSON.stringify({ contribution: parseFloat(opts.contribution), pi_key: piKey })
7758
8209
  });
7759
- if (!res.ok) throw new Error(`HTTP ${res.status}: ${await res.text()}`);
7760
- const data = await res.json();
7761
- console.log(chalk.green('Joined network.'), data.node_id ? `Node: ${data.node_id}` : '');
7762
- } catch (e) {
7763
- console.error(chalk.red('Join failed:'), e.message);
7764
- process.exit(1);
7765
- }
8210
+ const data = await resp.json();
8211
+ console.log(chalk.green(`Joined edge network: ${data.node_id || 'OK'}`));
8212
+ } catch (e) { console.error(chalk.red(`Error: ${e.message}`)); }
7766
8213
  });
7767
8214
 
7768
8215
  edgeCmd.command('balance')
7769
- .description('Check rUv balance for current identity')
7770
- .option('--key <key>', 'PI key')
7771
- .option('--json', 'JSON output')
8216
+ .description('Check rUv credit balance')
8217
+ .option('--json', 'Output as JSON')
7772
8218
  .action(async (opts) => {
7773
- const key = opts.key || process.env.PI || '';
7774
- if (!key) { console.error(chalk.red('PI key required')); process.exit(1); }
8219
+ const piKey = process.env.PI;
8220
+ if (!piKey) { console.error(chalk.red('Set PI environment variable first.')); process.exit(1); }
7775
8221
  try {
7776
- const crypto = require('crypto');
7777
- const pseudonym = crypto.createHash('shake256', { outputLength: 16 }).update(key).digest('hex');
7778
- const res = await fetch(`${EDGE_GENESIS}/api/balance/${pseudonym}`);
7779
- if (!res.ok) throw new Error(`HTTP ${res.status}`);
7780
- const data = await res.json();
7781
- if (opts.json || !process.stdout.isTTY) {
7782
- console.log(JSON.stringify(data, null, 2));
7783
- } else {
7784
- console.log(chalk.bold.cyan(`\nrUv Balance: ${data.balance != null ? data.balance : 'N/A'}`));
7785
- if (data.earned != null) console.log(chalk.dim(` Earned: ${data.earned}`));
7786
- if (data.spent != null) console.log(chalk.dim(` Spent: ${data.spent}`));
7787
- console.log();
7788
- }
7789
- } catch (e) {
7790
- console.error(chalk.red('Balance check failed:'), e.message);
7791
- }
8222
+ const resp = await fetch(`${EDGE_GENESIS}/balance/${piKey}`, { headers: { 'Authorization': `Bearer ${piKey}` } });
8223
+ const data = await resp.json();
8224
+ if (opts.json || !process.stdout.isTTY) { console.log(JSON.stringify(data, null, 2)); return; }
8225
+ console.log(chalk.bold.cyan('\nrUv Balance\n'));
8226
+ console.log(` ${chalk.bold('Balance:')} ${data.balance || 0} rUv`);
8227
+ console.log();
8228
+ } catch (e) { console.error(chalk.red(`Error: ${e.message}`)); }
7792
8229
  });
7793
8230
 
7794
8231
  edgeCmd.command('tasks')
7795
8232
  .description('List available distributed compute tasks')
7796
- .option('--json', 'JSON output')
8233
+ .option('-l, --limit <n>', 'Max tasks', '20')
8234
+ .option('--json', 'Output as JSON')
7797
8235
  .action(async (opts) => {
7798
8236
  try {
7799
- const res = await fetch(`${EDGE_GENESIS}/api/tasks`);
7800
- if (!res.ok) throw new Error(`HTTP ${res.status}`);
7801
- const data = await res.json();
7802
- const tasks = data.tasks || data || [];
7803
- if (opts.json || !process.stdout.isTTY) {
7804
- console.log(JSON.stringify(tasks, null, 2));
7805
- } else {
7806
- if (tasks.length === 0) { console.log(chalk.yellow('No tasks available.')); return; }
7807
- console.log(chalk.bold.cyan(`\n${tasks.length} available tasks\n`));
7808
- tasks.forEach((t, i) => {
7809
- console.log(` ${chalk.bold(i + 1 + '.')} ${t.type || t.id} -- ${t.reward || '?'} rUv`);
7810
- });
7811
- console.log();
7812
- }
7813
- } catch (e) {
7814
- console.error(chalk.red('Tasks query failed:'), e.message);
7815
- }
8237
+ const resp = await fetch(`${EDGE_GENESIS}/tasks?limit=${opts.limit}`);
8238
+ const data = await resp.json();
8239
+ if (opts.json || !process.stdout.isTTY) { console.log(JSON.stringify(data, null, 2)); return; }
8240
+ console.log(chalk.bold.cyan('\nEdge Compute Tasks\n'));
8241
+ const tasks = Array.isArray(data) ? data : data.tasks || [];
8242
+ if (!tasks.length) { console.log(chalk.dim(' No tasks available.\n')); return; }
8243
+ tasks.forEach((t, i) => console.log(` ${chalk.yellow(i + 1 + '.')} ${t.name || t.id} ${chalk.dim(`[${t.status || 'pending'}]`)}`));
8244
+ console.log();
8245
+ } catch (e) { console.error(chalk.red(`Error: ${e.message}`)); }
7816
8246
  });
7817
8247
 
7818
8248
  edgeCmd.command('dashboard')
7819
8249
  .description('Open edge-net dashboard in browser')
7820
8250
  .action(() => {
7821
- const url = EDGE_DASHBOARD;
7822
- console.log(chalk.cyan(`Opening: ${url}`));
7823
- const { exec } = require('child_process');
7824
- const cmd = process.platform === 'darwin' ? 'open' : process.platform === 'win32' ? 'start' : 'xdg-open';
7825
- exec(`${cmd} ${url}`, () => {});
8251
+ const url = 'https://edge-net-dashboard-875130704813.us-central1.run.app';
8252
+ console.log(chalk.cyan(`Dashboard: ${url}`));
8253
+ try {
8254
+ const { execSync } = require('child_process');
8255
+ const cmd = process.platform === 'darwin' ? 'open' : process.platform === 'win32' ? 'start' : 'xdg-open';
8256
+ execSync(`${cmd} ${url}`, { stdio: 'ignore' });
8257
+ } catch { console.log(chalk.dim(' Open the URL above in your browser.')); }
7826
8258
  });
7827
8259
 
7828
8260
  // ============================================================================
7829
- // Identity Commands — pi key management (Node.js crypto)
8261
+ // Identity Commands — Pi key management
7830
8262
  // ============================================================================
7831
8263
 
7832
- const identityCmd = program.command('identity').description('Pi key management for brain, edge, and MCP identity');
8264
+ const identityCmd = program.command('identity').description('Pi key identity management generate, show, export, import');
7833
8265
 
7834
8266
  identityCmd.command('generate')
7835
8267
  .description('Generate a new pi key')
7836
8268
  .action(() => {
7837
8269
  const crypto = require('crypto');
7838
8270
  const key = crypto.randomBytes(32).toString('hex');
7839
- const pseudonym = crypto.createHash('shake256', { outputLength: 16 }).update(key).digest('hex');
7840
- console.log(chalk.bold.cyan('\nNew Pi Key Generated\n'));
7841
- console.log(chalk.bold('Key: ') + chalk.yellow(key));
7842
- console.log(chalk.bold('Pseudonym: ') + chalk.green(pseudonym));
7843
- console.log(chalk.dim('\nStore this key securely. Set it as:'));
7844
- console.log(chalk.dim(' export PI=' + key));
7845
- console.log(chalk.dim(' # or add to .env file'));
8271
+ const hash = crypto.createHash('shake256', { outputLength: 16 });
8272
+ hash.update(key);
8273
+ const pseudonym = hash.digest('hex');
8274
+ console.log(chalk.bold.cyan('\nNew Pi Identity Generated\n'));
8275
+ console.log(` ${chalk.bold('Pi Key:')} ${chalk.yellow(key)}`);
8276
+ console.log(` ${chalk.bold('Pseudonym:')} ${chalk.green(pseudonym)}`);
7846
8277
  console.log();
8278
+ console.log(chalk.dim(' Store securely. Set PI env var to use:'));
8279
+ console.log(chalk.cyan(` export PI=${key}\n`));
7847
8280
  });
7848
8281
 
7849
8282
  identityCmd.command('show')
7850
- .description('Display current identity derived from PI key')
7851
- .option('--key <key>', 'PI key (default: PI env var)')
8283
+ .description('Show current pi key pseudonym and derived identities')
8284
+ .option('--json', 'Output as JSON')
7852
8285
  .action((opts) => {
7853
- const key = opts.key || process.env.PI || '';
7854
- if (!key) {
7855
- console.error(chalk.red('No PI key found. Set PI env var or use --key'));
7856
- console.error(chalk.yellow(' Generate one: npx ruvector identity generate'));
8286
+ const piKey = process.env.PI;
8287
+ if (!piKey) {
8288
+ console.error(chalk.red('No PI environment variable set.'));
8289
+ console.error(chalk.yellow(' Run: npx ruvector identity generate'));
7857
8290
  process.exit(1);
7858
8291
  }
7859
8292
  const crypto = require('crypto');
7860
- const pseudonym = crypto.createHash('shake256', { outputLength: 16 }).update(key).digest('hex');
7861
- const mcpToken = crypto.createHmac('sha256', key).update('mcp').digest('hex').slice(0, 32);
8293
+ const hash = crypto.createHash('shake256', { outputLength: 16 });
8294
+ hash.update(piKey);
8295
+ const pseudonym = hash.digest('hex');
8296
+ const mcpToken = crypto.createHmac('sha256', piKey).update('mcp').digest('hex').slice(0, 32);
8297
+ const edgeKeyBuf = crypto.createHash('sha512').update(piKey).update('edge-net').digest().slice(0, 32);
8298
+ const edgeKey = edgeKeyBuf.toString('hex');
8299
+ if (opts.json || !process.stdout.isTTY) {
8300
+ console.log(JSON.stringify({ pseudonym, mcp_token: mcpToken, edge_key: edgeKey, key_prefix: piKey.slice(0, 8) + '...' }, null, 2));
8301
+ return;
8302
+ }
7862
8303
  console.log(chalk.bold.cyan('\nPi Identity\n'));
7863
- console.log(chalk.bold('Brain Pseudonym: ') + chalk.green(pseudonym));
7864
- console.log(chalk.bold('MCP Token: ') + chalk.dim(mcpToken));
7865
- console.log(chalk.bold('Key (first 8): ') + chalk.dim(key.slice(0, 8) + '...'));
8304
+ console.log(` ${chalk.bold('Key:')} ${piKey.slice(0, 8)}...${piKey.slice(-8)}`);
8305
+ console.log(` ${chalk.bold('Pseudonym:')} ${chalk.green(pseudonym)}`);
8306
+ console.log(` ${chalk.bold('MCP Token:')} ${chalk.dim(mcpToken)}`);
8307
+ console.log(` ${chalk.bold('Edge Key:')} ${chalk.dim(edgeKey)}`);
7866
8308
  console.log();
7867
8309
  });
7868
8310
 
7869
- identityCmd.command('export')
7870
- .description('Export key to encrypted file')
7871
- .option('-o, --output <path>', 'Output file', 'pi-key.enc')
7872
- .option('--key <key>', 'PI key')
7873
- .action((opts) => {
7874
- const key = opts.key || process.env.PI || '';
7875
- if (!key) { console.error(chalk.red('No PI key found.')); process.exit(1); }
8311
+ identityCmd.command('export <file>')
8312
+ .description('Export pi key to encrypted file')
8313
+ .action((file) => {
8314
+ const piKey = process.env.PI;
8315
+ if (!piKey) { console.error(chalk.red('No PI environment variable set.')); process.exit(1); }
7876
8316
  const crypto = require('crypto');
7877
- const password = crypto.randomBytes(16).toString('hex');
8317
+ const passphrase = crypto.randomBytes(16).toString('hex');
8318
+ const key = crypto.scryptSync(passphrase, 'ruvector-pi', 32);
7878
8319
  const iv = crypto.randomBytes(16);
7879
- const cipher = crypto.createCipheriv('aes-256-gcm', crypto.scryptSync(password, 'ruvector', 32), iv);
7880
- let encrypted = cipher.update(key, 'utf8', 'hex');
8320
+ const cipher = crypto.createCipheriv('aes-256-gcm', key, iv);
8321
+ let encrypted = cipher.update(piKey, 'utf8', 'hex');
7881
8322
  encrypted += cipher.final('hex');
7882
- const tag = cipher.getAuthTag().toString('hex');
7883
- const data = JSON.stringify({ iv: iv.toString('hex'), tag, data: encrypted, v: 1 });
7884
- fs.writeFileSync(opts.output, data);
7885
- console.log(chalk.green(`Key exported to ${opts.output}`));
7886
- console.log(chalk.bold('Passphrase: ') + chalk.yellow(password));
7887
- console.log(chalk.dim('Store this passphrase separately from the export file.'));
8323
+ const tag = cipher.getAuthTag();
8324
+ const data = { iv: iv.toString('hex'), tag: tag.toString('hex'), data: encrypted };
8325
+ fs.writeFileSync(file, JSON.stringify(data));
8326
+ console.log(chalk.green(`Exported to ${file}`));
8327
+ console.log(chalk.bold(`Passphrase: ${chalk.yellow(passphrase)}`));
8328
+ console.log(chalk.dim(' Store passphrase separately from the export file.\n'));
7888
8329
  });
7889
8330
 
7890
8331
  identityCmd.command('import <file>')
7891
- .description('Import key from encrypted backup')
7892
- .option('-p, --passphrase <pass>', 'Decryption passphrase')
8332
+ .description('Import pi key from encrypted backup')
8333
+ .requiredOption('-p, --passphrase <pass>', 'Decryption passphrase')
7893
8334
  .action((file, opts) => {
7894
- if (!opts.passphrase) { console.error(chalk.red('Passphrase required (--passphrase)')); process.exit(1); }
7895
8335
  try {
7896
8336
  const crypto = require('crypto');
7897
8337
  const raw = JSON.parse(fs.readFileSync(file, 'utf8'));
7898
- const decipher = crypto.createDecipheriv('aes-256-gcm', crypto.scryptSync(opts.passphrase, 'ruvector', 32), Buffer.from(raw.iv, 'hex'));
8338
+ const key = crypto.scryptSync(opts.passphrase, 'ruvector-pi', 32);
8339
+ const decipher = crypto.createDecipheriv('aes-256-gcm', key, Buffer.from(raw.iv, 'hex'));
7899
8340
  decipher.setAuthTag(Buffer.from(raw.tag, 'hex'));
7900
- let key = decipher.update(raw.data, 'hex', 'utf8');
7901
- key += decipher.final('utf8');
7902
- const pseudonym = crypto.createHash('shake256', { outputLength: 16 }).update(key).digest('hex');
8341
+ let decrypted = decipher.update(raw.data, 'hex', 'utf8');
8342
+ decrypted += decipher.final('utf8');
7903
8343
  console.log(chalk.green('Key imported successfully.'));
7904
- console.log(chalk.bold('Pseudonym: ') + chalk.green(pseudonym));
7905
- console.log(chalk.dim(`\nSet it: export PI=${key}`));
7906
- } catch (e) {
7907
- console.error(chalk.red('Import failed:'), e.message);
7908
- process.exit(1);
7909
- }
8344
+ console.log(chalk.cyan(` export PI=${decrypted}\n`));
8345
+ } catch (e) { console.error(chalk.red(`Error: ${e.message}`)); process.exit(1); }
7910
8346
  });
7911
8347
 
7912
8348
  // ============================================================================
7913
- // LLM Commands — LLM orchestration via @ruvector/ruvllm (lazy-loaded)
8349
+ // LLM Commands — Text embeddings via @ruvector/ruvllm (lazy-loaded)
7914
8350
  // ============================================================================
7915
8351
 
7916
- const llmCmd = program.command('llm').description('LLM orchestration embeddings, models, benchmarks');
8352
+ const llmCmd = program.command('llm').description('LLM embeddings and inference via @ruvector/ruvllm');
8353
+
8354
+ function requireRuvllm() {
8355
+ try { return require('@ruvector/ruvllm'); } catch {
8356
+ console.error(chalk.red('LLM commands require @ruvector/ruvllm'));
8357
+ console.error(chalk.yellow(' npm install @ruvector/ruvllm'));
8358
+ process.exit(1);
8359
+ }
8360
+ }
7917
8361
 
7918
8362
  llmCmd.command('embed <text>')
7919
- .description('Generate embeddings via ruvllm')
7920
- .option('-m, --model <model>', 'Model name', 'default')
7921
- .option('--json', 'JSON output')
7922
- .action(async (text, opts) => {
8363
+ .description('Generate text embeddings')
8364
+ .option('-m, --model <model>', 'Model name')
8365
+ .option('--json', 'Output as JSON')
8366
+ .action((text, opts) => {
8367
+ const ruvllm = requireRuvllm();
7923
8368
  try {
7924
- const ruvllm = require('@ruvector/ruvllm');
7925
- const embed = ruvllm.embed || (ruvllm.default && ruvllm.default.embed);
7926
- if (!embed) throw new Error('ruvllm.embed not found');
7927
- const result = await embed(text, { model: opts.model });
7928
- if (opts.json || !process.stdout.isTTY) {
7929
- console.log(JSON.stringify(result));
7930
- } else {
7931
- const vec = result.embedding || result;
7932
- console.log(chalk.cyan(`Embedding (dim=${Array.isArray(vec) ? vec.length : '?'}):`));
7933
- if (Array.isArray(vec)) console.log(chalk.dim(` [${vec.slice(0, 8).map(v => v.toFixed(4)).join(', ')}${vec.length > 8 ? ', ...' : ''}]`));
7934
- }
7935
- } catch (e) {
7936
- if (e.code === 'MODULE_NOT_FOUND') {
7937
- console.error(chalk.red('LLM commands require @ruvector/ruvllm'));
7938
- console.error(chalk.yellow(' npm install @ruvector/ruvllm'));
7939
- } else {
7940
- console.error(chalk.red('Embed failed:'), e.message);
7941
- }
7942
- process.exit(1);
7943
- }
8369
+ const embedding = ruvllm.embed ? ruvllm.embed(text, opts.model) : ruvllm.generateEmbedding(text);
8370
+ if (opts.json || !process.stdout.isTTY) { console.log(JSON.stringify({ embedding, dimension: embedding.length })); return; }
8371
+ console.log(chalk.bold.cyan('\nEmbedding Generated\n'));
8372
+ console.log(` ${chalk.bold('Dimension:')} ${embedding.length}`);
8373
+ console.log(` ${chalk.bold('Preview:')} [${embedding.slice(0, 5).map(v => v.toFixed(4)).join(', ')}...]`);
8374
+ console.log();
8375
+ } catch (e) { console.error(chalk.red(`Error: ${e.message}`)); process.exit(1); }
7944
8376
  });
7945
8377
 
7946
8378
  llmCmd.command('models')
7947
8379
  .description('List available LLM models')
7948
- .action(async () => {
8380
+ .action(() => {
8381
+ const ruvllm = requireRuvllm();
7949
8382
  try {
7950
- const ruvllm = require('@ruvector/ruvllm');
7951
- const models = ruvllm.listModels ? await ruvllm.listModels() : [];
7952
- if (models.length === 0) { console.log(chalk.yellow('No models found.')); return; }
7953
- console.log(chalk.bold.cyan('\nAvailable Models\n'));
7954
- models.forEach(m => {
7955
- console.log(` ${chalk.bold(m.name || m.id)} ${chalk.dim(m.provider || '')} ${chalk.dim(m.size || '')}`);
7956
- });
7957
- console.log();
7958
- } catch (e) {
7959
- if (e.code === 'MODULE_NOT_FOUND') {
7960
- console.error(chalk.red('Requires @ruvector/ruvllm: npm install @ruvector/ruvllm'));
8383
+ if (typeof ruvllm.listModels === 'function') {
8384
+ const models = ruvllm.listModels();
8385
+ models.forEach(m => console.log(` ${chalk.green(m.name || m)} ${chalk.dim(m.description || '')}`));
7961
8386
  } else {
7962
- console.error(chalk.red('Failed:'), e.message);
8387
+ console.log(chalk.dim(' Model listing requires @ruvector/ruvllm >=2.1.0'));
8388
+ console.log(chalk.dim(' Available: MiniLM-L6 (default embedding model)'));
7963
8389
  }
7964
- process.exit(1);
7965
- }
8390
+ } catch (e) { console.error(chalk.red(`Error: ${e.message}`)); }
7966
8391
  });
7967
8392
 
7968
8393
  llmCmd.command('benchmark')
7969
8394
  .description('Benchmark LLM inference performance')
7970
- .option('-n, --iterations <n>', 'Iterations', '100')
7971
- .option('-m, --model <model>', 'Model name', 'default')
7972
- .action(async (opts) => {
7973
- const spinner = ora('Running LLM benchmark...');
7974
- spinner.start();
7975
- try {
7976
- const ruvllm = require('@ruvector/ruvllm');
7977
- const embed = ruvllm.embed || (ruvllm.default && ruvllm.default.embed);
7978
- if (!embed) throw new Error('ruvllm.embed not found');
7979
- const n = parseInt(opts.iterations);
8395
+ .option('-n, --iterations <n>', 'Number of iterations', '100')
8396
+ .action((opts) => {
8397
+ const ruvllm = requireRuvllm();
8398
+ const n = parseInt(opts.iterations);
8399
+ const text = 'The quick brown fox jumps over the lazy dog';
8400
+ const times = [];
8401
+ for (let i = 0; i < n; i++) {
7980
8402
  const start = performance.now();
7981
- for (let i = 0; i < n; i++) await embed(`benchmark text ${i}`, { model: opts.model });
7982
- const elapsed = performance.now() - start;
7983
- spinner.stop();
7984
- console.log(chalk.bold.cyan('\nLLM Benchmark Results'));
7985
- console.log(chalk.dim('-'.repeat(40)));
7986
- console.log(` Iterations: ${n}`);
7987
- console.log(` Total: ${(elapsed / 1000).toFixed(2)}s`);
7988
- console.log(` Avg: ${(elapsed / n).toFixed(2)}ms/embed`);
7989
- console.log(` Throughput: ${(n / (elapsed / 1000)).toFixed(1)} embeds/s`);
7990
- console.log();
7991
- } catch (e) {
7992
- spinner.stop();
7993
- if (e.code === 'MODULE_NOT_FOUND') {
7994
- console.error(chalk.red('Requires @ruvector/ruvllm'));
7995
- } else {
7996
- console.error(chalk.red('Benchmark failed:'), e.message);
7997
- }
7998
- process.exit(1);
7999
- }
8403
+ ruvllm.embed ? ruvllm.embed(text) : ruvllm.generateEmbedding(text);
8404
+ times.push(performance.now() - start);
8405
+ }
8406
+ times.sort((a, b) => a - b);
8407
+ console.log(chalk.bold.cyan('\nLLM Benchmark\n'));
8408
+ console.log(` ${chalk.bold('Iterations:')} ${n}`);
8409
+ console.log(` ${chalk.bold('P50:')} ${times[Math.floor(n * 0.5)].toFixed(2)}ms`);
8410
+ console.log(` ${chalk.bold('P95:')} ${times[Math.floor(n * 0.95)].toFixed(2)}ms`);
8411
+ console.log(` ${chalk.bold('P99:')} ${times[Math.floor(n * 0.99)].toFixed(2)}ms`);
8412
+ console.log(` ${chalk.bold('Mean:')} ${(times.reduce((a, b) => a + b, 0) / n).toFixed(2)}ms`);
8413
+ console.log();
8414
+ });
8415
+
8416
+ llmCmd.command('info')
8417
+ .description('Show RuvLLM module information')
8418
+ .action(() => {
8419
+ const ruvllm = requireRuvllm();
8420
+ console.log(chalk.bold.cyan('\nRuvLLM Info\n'));
8421
+ console.log(` ${chalk.bold('Version:')} ${typeof ruvllm.version === 'function' ? ruvllm.version() : ruvllm.version || 'unknown'}`);
8422
+ console.log(` ${chalk.bold('SIMD:')} ${ruvllm.simdEnabled ? 'enabled' : 'not detected'}`);
8423
+ console.log();
8000
8424
  });
8001
8425
 
8002
8426
  // ============================================================================
8003
- // SONA Commands — Self-Optimizing Neural Architecture
8427
+ // SONA Commands — Self-Optimizing Neural Architecture (lazy-loaded)
8004
8428
  // ============================================================================
8005
8429
 
8006
- const sonaCmd = program.command('sona').description('SONA self-optimizing neural architecture');
8430
+ const sonaCmd = program.command('sona').description('SONA adaptive learning — status, patterns, train, export');
8431
+
8432
+ function loadSona() {
8433
+ try { return require('@ruvector/sona'); } catch {
8434
+ console.error(chalk.red('SONA commands require @ruvector/sona'));
8435
+ console.error(chalk.yellow(' npm install @ruvector/sona'));
8436
+ process.exit(1);
8437
+ }
8438
+ }
8007
8439
 
8008
8440
  sonaCmd.command('status')
8009
- .description('Show SONA engine status')
8010
- .option('--json', 'JSON output')
8011
- .action(async (opts) => {
8441
+ .description('Show SONA learning engine status')
8442
+ .option('--json', 'Output as JSON')
8443
+ .action((opts) => {
8444
+ const sona = loadSona();
8012
8445
  try {
8013
- const sona = require('@ruvector/sona');
8014
- const engine = sona.SonaEngine ? new sona.SonaEngine() : sona;
8015
- const status = engine.status ? await engine.status() : { state: 'loaded' };
8016
- if (opts.json || !process.stdout.isTTY) {
8017
- console.log(JSON.stringify(status, null, 2));
8018
- } else {
8019
- console.log(chalk.bold.cyan('\nSONA Engine Status'));
8020
- console.log(chalk.dim('-'.repeat(40)));
8021
- Object.entries(status).forEach(([k, v]) => {
8022
- console.log(` ${chalk.bold(k)}: ${v}`);
8023
- });
8024
- console.log();
8025
- }
8026
- } catch (e) {
8027
- if (e.code === 'MODULE_NOT_FOUND') {
8028
- console.error(chalk.red('Requires @ruvector/sona: npm install @ruvector/sona'));
8029
- } else {
8030
- console.error(chalk.red('Failed:'), e.message);
8031
- }
8032
- process.exit(1);
8033
- }
8446
+ const engine = sona.SonaEngine ? new sona.SonaEngine() : new sona.SonaCoordinator();
8447
+ const status = engine.getStatus ? engine.getStatus() : { ready: true };
8448
+ if (opts.json || !process.stdout.isTTY) { console.log(JSON.stringify(status, null, 2)); return; }
8449
+ console.log(chalk.bold.cyan('\nSONA Status\n'));
8450
+ Object.entries(status).forEach(([k, v]) => console.log(` ${chalk.bold(k + ':')} ${v}`));
8451
+ console.log();
8452
+ } catch (e) { console.error(chalk.red(`Error: ${e.message}`)); }
8034
8453
  });
8035
8454
 
8036
- sonaCmd.command('patterns')
8037
- .description('List learned SONA patterns')
8038
- .option('-l, --limit <n>', 'Max results', '20')
8039
- .option('--json', 'JSON output')
8040
- .action(async (opts) => {
8455
+ sonaCmd.command('patterns <query>')
8456
+ .description('Search learned patterns')
8457
+ .option('-t, --threshold <n>', 'Similarity threshold', '0.5')
8458
+ .option('--json', 'Output as JSON')
8459
+ .action((query, opts) => {
8460
+ const sona = loadSona();
8041
8461
  try {
8042
- const sona = require('@ruvector/sona');
8043
- const engine = sona.SonaEngine ? new sona.SonaEngine() : sona;
8044
- const patterns = engine.findPatterns ? await engine.findPatterns({ limit: parseInt(opts.limit) }) : [];
8045
- if (opts.json || !process.stdout.isTTY) {
8046
- console.log(JSON.stringify(patterns, null, 2));
8047
- } else {
8048
- if (patterns.length === 0) { console.log(chalk.yellow('No patterns found.')); return; }
8049
- console.log(chalk.bold.cyan(`\n${patterns.length} SONA Patterns\n`));
8050
- patterns.forEach((p, i) => {
8051
- console.log(` ${chalk.bold(i + 1 + '.')} ${p.name || p.type || p.id} ${chalk.dim(p.confidence ? `(${(p.confidence * 100).toFixed(0)}%)` : '')}`);
8052
- });
8053
- console.log();
8054
- }
8055
- } catch (e) {
8056
- if (e.code === 'MODULE_NOT_FOUND') {
8057
- console.error(chalk.red('Requires @ruvector/sona'));
8058
- } else {
8059
- console.error(chalk.red('Failed:'), e.message);
8060
- }
8061
- process.exit(1);
8062
- }
8462
+ const engine = sona.SonaEngine ? new sona.SonaEngine() : new sona.SonaCoordinator();
8463
+ const patterns = engine.findPatterns ? engine.findPatterns(query, { threshold: parseFloat(opts.threshold) }) : [];
8464
+ if (opts.json || !process.stdout.isTTY) { console.log(JSON.stringify(patterns, null, 2)); return; }
8465
+ console.log(chalk.bold.cyan('\nLearned Patterns\n'));
8466
+ if (!patterns.length) { console.log(chalk.dim(' No patterns found.\n')); return; }
8467
+ patterns.forEach((p, i) => console.log(` ${chalk.yellow(i + 1 + '.')} ${p.name || p.pattern || JSON.stringify(p).slice(0, 80)}`));
8468
+ console.log();
8469
+ } catch (e) { console.error(chalk.red(`Error: ${e.message}`)); }
8063
8470
  });
8064
8471
 
8065
- sonaCmd.command('train')
8066
- .description('Start SONA training trajectory')
8067
- .option('-d, --data <path>', 'Training data path')
8068
- .option('--epochs <n>', 'Epochs', '10')
8069
- .action(async (opts) => {
8070
- const spinner = ora('Training SONA...');
8071
- spinner.start();
8472
+ sonaCmd.command('train <data>')
8473
+ .description('Record a training trajectory')
8474
+ .option('--outcome <outcome>', 'Outcome (success/failure)', 'success')
8475
+ .action((data, opts) => {
8476
+ const sona = loadSona();
8072
8477
  try {
8073
- const sona = require('@ruvector/sona');
8074
- const engine = sona.SonaEngine ? new sona.SonaEngine() : sona;
8075
- if (!engine.beginTrajectory) throw new Error('SONA engine does not support training');
8076
- const result = await engine.beginTrajectory({ data: opts.data, epochs: parseInt(opts.epochs) });
8077
- spinner.succeed(chalk.green('Training complete'));
8078
- if (result) console.log(chalk.dim(JSON.stringify(result, null, 2)));
8079
- } catch (e) {
8080
- spinner.fail(chalk.red('Training failed: ' + e.message));
8081
- process.exit(1);
8082
- }
8478
+ const engine = sona.SonaEngine ? new sona.SonaEngine() : new sona.SonaCoordinator();
8479
+ if (engine.recordTrajectory) { engine.recordTrajectory(data, opts.outcome); }
8480
+ else if (engine.train) { engine.train(data); }
8481
+ console.log(chalk.green('Training trajectory recorded.'));
8482
+ } catch (e) { console.error(chalk.red(`Error: ${e.message}`)); }
8083
8483
  });
8084
8484
 
8085
8485
  sonaCmd.command('export')
8086
- .description('Export SONA model weights')
8087
- .option('-o, --output <path>', 'Output path', 'sona-weights.json')
8088
- .action(async (opts) => {
8486
+ .description('Export SONA learned weights to JSON')
8487
+ .option('-o, --output <file>', 'Output file', 'sona-weights.json')
8488
+ .action((opts) => {
8489
+ const sona = loadSona();
8089
8490
  try {
8090
- const sona = require('@ruvector/sona');
8091
- const engine = sona.SonaEngine ? new sona.SonaEngine() : sona;
8092
- const weights = engine.exportWeights ? await engine.exportWeights() : {};
8491
+ const engine = sona.SonaEngine ? new sona.SonaEngine() : new sona.SonaCoordinator();
8492
+ const weights = engine.exportWeights ? engine.exportWeights() : engine.export ? engine.export() : {};
8093
8493
  fs.writeFileSync(opts.output, JSON.stringify(weights, null, 2));
8094
8494
  console.log(chalk.green(`Exported to ${opts.output}`));
8095
- } catch (e) {
8096
- console.error(chalk.red('Export failed:'), e.message);
8097
- process.exit(1);
8098
- }
8495
+ } catch (e) { console.error(chalk.red(`Error: ${e.message}`)); }
8099
8496
  });
8100
8497
 
8101
8498
  sonaCmd.command('stats')
8102
- .description('Show SONA learning statistics')
8103
- .option('--json', 'JSON output')
8104
- .action(async (opts) => {
8499
+ .description('Show detailed SONA learning statistics')
8500
+ .option('--json', 'Output as JSON')
8501
+ .action((opts) => {
8502
+ const sona = loadSona();
8105
8503
  try {
8106
- const sona = require('@ruvector/sona');
8107
- const engine = sona.SonaEngine ? new sona.SonaEngine() : sona;
8108
- const stats = engine.stats ? await engine.stats() : { trajectories: 0, patterns: 0 };
8109
- if (opts.json || !process.stdout.isTTY) {
8110
- console.log(JSON.stringify(stats, null, 2));
8111
- } else {
8112
- console.log(chalk.bold.cyan('\nSONA Statistics'));
8113
- console.log(chalk.dim('-'.repeat(40)));
8114
- Object.entries(stats).forEach(([k, v]) => {
8115
- console.log(` ${chalk.bold(k)}: ${v}`);
8116
- });
8117
- console.log();
8118
- }
8119
- } catch (e) {
8120
- if (e.code === 'MODULE_NOT_FOUND') {
8121
- console.error(chalk.red('Requires @ruvector/sona'));
8122
- } else {
8123
- console.error(chalk.red('Failed:'), e.message);
8124
- }
8125
- process.exit(1);
8126
- }
8504
+ const engine = sona.SonaEngine ? new sona.SonaEngine() : new sona.SonaCoordinator();
8505
+ const stats = engine.getStats ? engine.getStats() : engine.stats ? engine.stats() : {};
8506
+ if (opts.json || !process.stdout.isTTY) { console.log(JSON.stringify(stats, null, 2)); return; }
8507
+ console.log(chalk.bold.cyan('\nSONA Statistics\n'));
8508
+ Object.entries(stats).forEach(([k, v]) => console.log(` ${chalk.bold(k + ':')} ${v}`));
8509
+ console.log();
8510
+ } catch (e) { console.error(chalk.red(`Error: ${e.message}`)); }
8511
+ });
8512
+
8513
+ sonaCmd.command('info')
8514
+ .description('Show SONA module availability')
8515
+ .action(() => {
8516
+ const sona = loadSona();
8517
+ console.log(chalk.bold.cyan('\nSONA Info\n'));
8518
+ console.log(` ${chalk.bold('Version:')} ${typeof sona.version === 'function' ? sona.version() : sona.version || 'unknown'}`);
8519
+ console.log(` ${chalk.bold('Engine:')} ${sona.SonaEngine ? 'Native' : 'JS Fallback'}`);
8520
+ console.log();
8521
+ });
8522
+
8523
+ // ============================================================================
8524
+ // Route Commands — Semantic routing via @ruvector/router (lazy-loaded)
8525
+ // ============================================================================
8526
+
8527
+ const routeCmd = program.command('route').description('Semantic routing — classify inputs to routes via HNSW + SIMD');
8528
+
8529
+ function requireRouter() {
8530
+ try { return require('@ruvector/router'); } catch {
8531
+ console.error(chalk.red('Route commands require @ruvector/router'));
8532
+ console.error(chalk.yellow(' npm install @ruvector/router'));
8533
+ process.exit(1);
8534
+ }
8535
+ }
8536
+
8537
+ routeCmd.command('classify <input>')
8538
+ .description('Classify input to a semantic route')
8539
+ .option('--json', 'Output as JSON')
8540
+ .action((input, opts) => {
8541
+ const router = requireRouter();
8542
+ try {
8543
+ const result = router.classify ? router.classify(input) : { route: 'default', confidence: 1.0 };
8544
+ if (opts.json || !process.stdout.isTTY) { console.log(JSON.stringify(result)); return; }
8545
+ console.log(chalk.bold.cyan('\nRoute Classification\n'));
8546
+ console.log(` ${chalk.bold('Input:')} ${input}`);
8547
+ console.log(` ${chalk.bold('Route:')} ${chalk.green(result.route)}`);
8548
+ console.log(` ${chalk.bold('Confidence:')} ${result.confidence}`);
8549
+ console.log();
8550
+ } catch (e) { console.error(chalk.red(`Error: ${e.message}`)); }
8551
+ });
8552
+
8553
+ routeCmd.command('benchmark')
8554
+ .description('Benchmark routing throughput')
8555
+ .option('-n, --iterations <n>', 'Number of iterations', '1000')
8556
+ .action((opts) => {
8557
+ const router = requireRouter();
8558
+ const n = parseInt(opts.iterations);
8559
+ const input = 'test input for routing benchmark';
8560
+ const start = performance.now();
8561
+ for (let i = 0; i < n; i++) {
8562
+ router.classify ? router.classify(input) : null;
8563
+ }
8564
+ const elapsed = performance.now() - start;
8565
+ console.log(chalk.bold.cyan('\nRoute Benchmark\n'));
8566
+ console.log(` ${chalk.bold('Iterations:')} ${n}`);
8567
+ console.log(` ${chalk.bold('Total:')} ${elapsed.toFixed(2)}ms`);
8568
+ console.log(` ${chalk.bold('Per-route:')} ${(elapsed / n).toFixed(3)}ms`);
8569
+ console.log(` ${chalk.bold('Throughput:')} ${Math.floor(n / (elapsed / 1000))}/sec`);
8570
+ console.log();
8571
+ });
8572
+
8573
+ routeCmd.command('info')
8574
+ .description('Show router module information')
8575
+ .action(() => {
8576
+ const router = requireRouter();
8577
+ console.log(chalk.bold.cyan('\nRouter Info\n'));
8578
+ console.log(` ${chalk.bold('Version:')} ${typeof router.version === 'function' ? router.version() : router.version || 'unknown'}`);
8579
+ console.log();
8127
8580
  });
8128
8581
 
8129
8582
  program.parse();