ruvector 0.1.24 → 0.1.25
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
|
-
"startTime":
|
|
3
|
-
"sessionId": "session-
|
|
4
|
-
"lastActivity":
|
|
2
|
+
"startTime": 1764542370768,
|
|
3
|
+
"sessionId": "session-1764542370768",
|
|
4
|
+
"lastActivity": 1764542370768,
|
|
5
5
|
"sessionDuration": 0,
|
|
6
6
|
"totalTasks": 1,
|
|
7
7
|
"successfulTasks": 1,
|
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
[
|
|
2
2
|
{
|
|
3
|
-
"id": "cmd-hooks-
|
|
3
|
+
"id": "cmd-hooks-1764542370895",
|
|
4
4
|
"type": "hooks",
|
|
5
5
|
"success": true,
|
|
6
|
-
"duration":
|
|
7
|
-
"timestamp":
|
|
6
|
+
"duration": 15.263496999999973,
|
|
7
|
+
"timestamp": 1764542370910,
|
|
8
8
|
"metadata": {}
|
|
9
9
|
}
|
|
10
10
|
]
|
package/bin/cli.js
CHANGED
|
@@ -47,6 +47,46 @@ try {
|
|
|
47
47
|
// GNN not available - commands will show helpful message
|
|
48
48
|
}
|
|
49
49
|
|
|
50
|
+
// Import Attention (optional - graceful fallback if not available)
|
|
51
|
+
let DotProductAttention, MultiHeadAttention, HyperbolicAttention, FlashAttention, LinearAttention, MoEAttention;
|
|
52
|
+
let GraphRoPeAttention, EdgeFeaturedAttention, DualSpaceAttention, LocalGlobalAttention;
|
|
53
|
+
let benchmarkAttention, computeAttentionAsync, batchAttentionCompute, parallelAttentionCompute;
|
|
54
|
+
let expMap, logMap, mobiusAddition, poincareDistance, projectToPoincareBall;
|
|
55
|
+
let attentionInfo, attentionVersion;
|
|
56
|
+
let attentionAvailable = false;
|
|
57
|
+
try {
|
|
58
|
+
const attention = require('@ruvector/attention');
|
|
59
|
+
// Core mechanisms
|
|
60
|
+
DotProductAttention = attention.DotProductAttention;
|
|
61
|
+
MultiHeadAttention = attention.MultiHeadAttention;
|
|
62
|
+
HyperbolicAttention = attention.HyperbolicAttention;
|
|
63
|
+
FlashAttention = attention.FlashAttention;
|
|
64
|
+
LinearAttention = attention.LinearAttention;
|
|
65
|
+
MoEAttention = attention.MoEAttention;
|
|
66
|
+
// Graph attention
|
|
67
|
+
GraphRoPeAttention = attention.GraphRoPeAttention;
|
|
68
|
+
EdgeFeaturedAttention = attention.EdgeFeaturedAttention;
|
|
69
|
+
DualSpaceAttention = attention.DualSpaceAttention;
|
|
70
|
+
LocalGlobalAttention = attention.LocalGlobalAttention;
|
|
71
|
+
// Utilities
|
|
72
|
+
benchmarkAttention = attention.benchmarkAttention;
|
|
73
|
+
computeAttentionAsync = attention.computeAttentionAsync;
|
|
74
|
+
batchAttentionCompute = attention.batchAttentionCompute;
|
|
75
|
+
parallelAttentionCompute = attention.parallelAttentionCompute;
|
|
76
|
+
// Hyperbolic math
|
|
77
|
+
expMap = attention.expMap;
|
|
78
|
+
logMap = attention.logMap;
|
|
79
|
+
mobiusAddition = attention.mobiusAddition;
|
|
80
|
+
poincareDistance = attention.poincareDistance;
|
|
81
|
+
projectToPoincareBall = attention.projectToPoincareBall;
|
|
82
|
+
// Meta
|
|
83
|
+
attentionInfo = attention.info;
|
|
84
|
+
attentionVersion = attention.version;
|
|
85
|
+
attentionAvailable = true;
|
|
86
|
+
} catch (e) {
|
|
87
|
+
// Attention not available - commands will show helpful message
|
|
88
|
+
}
|
|
89
|
+
|
|
50
90
|
const program = new Command();
|
|
51
91
|
|
|
52
92
|
// Get package version from package.json
|
|
@@ -855,6 +895,422 @@ gnnCmd
|
|
|
855
895
|
console.log(chalk.gray(` binary (freq <= 0.01) - ~32x compression, archive`));
|
|
856
896
|
});
|
|
857
897
|
|
|
898
|
+
// =============================================================================
|
|
899
|
+
// Attention Commands
|
|
900
|
+
// =============================================================================
|
|
901
|
+
|
|
902
|
+
// Helper to require attention module
|
|
903
|
+
function requireAttention() {
|
|
904
|
+
if (!attentionAvailable) {
|
|
905
|
+
console.error(chalk.red('Error: @ruvector/attention is not installed'));
|
|
906
|
+
console.error(chalk.yellow('Install it with: npm install @ruvector/attention'));
|
|
907
|
+
process.exit(1);
|
|
908
|
+
}
|
|
909
|
+
}
|
|
910
|
+
|
|
911
|
+
// Attention parent command
|
|
912
|
+
const attentionCmd = program
|
|
913
|
+
.command('attention')
|
|
914
|
+
.description('High-performance attention mechanism operations');
|
|
915
|
+
|
|
916
|
+
// Attention compute command - run attention on input vectors
|
|
917
|
+
attentionCmd
|
|
918
|
+
.command('compute')
|
|
919
|
+
.description('Compute attention over input vectors')
|
|
920
|
+
.requiredOption('-q, --query <json>', 'Query vector as JSON array')
|
|
921
|
+
.requiredOption('-k, --keys <file>', 'Keys file (JSON array of vectors)')
|
|
922
|
+
.option('-v, --values <file>', 'Values file (JSON array of vectors, defaults to keys)')
|
|
923
|
+
.option('-t, --type <type>', 'Attention type (dot|multi-head|flash|hyperbolic|linear)', 'dot')
|
|
924
|
+
.option('-h, --heads <number>', 'Number of attention heads (for multi-head)', '4')
|
|
925
|
+
.option('-d, --head-dim <number>', 'Head dimension (for multi-head)', '64')
|
|
926
|
+
.option('--curvature <number>', 'Curvature for hyperbolic attention', '1.0')
|
|
927
|
+
.option('-o, --output <file>', 'Output file for results')
|
|
928
|
+
.action((options) => {
|
|
929
|
+
requireAttention();
|
|
930
|
+
const spinner = ora('Loading keys...').start();
|
|
931
|
+
|
|
932
|
+
try {
|
|
933
|
+
const query = JSON.parse(options.query);
|
|
934
|
+
const keysData = JSON.parse(fs.readFileSync(options.keys, 'utf8'));
|
|
935
|
+
const keys = keysData.map(k => k.vector || k);
|
|
936
|
+
|
|
937
|
+
let values = keys;
|
|
938
|
+
if (options.values) {
|
|
939
|
+
const valuesData = JSON.parse(fs.readFileSync(options.values, 'utf8'));
|
|
940
|
+
values = valuesData.map(v => v.vector || v);
|
|
941
|
+
}
|
|
942
|
+
|
|
943
|
+
spinner.text = `Computing ${options.type} attention...`;
|
|
944
|
+
|
|
945
|
+
let result;
|
|
946
|
+
let attentionWeights;
|
|
947
|
+
|
|
948
|
+
switch (options.type) {
|
|
949
|
+
case 'dot': {
|
|
950
|
+
const attn = new DotProductAttention();
|
|
951
|
+
const queryMat = [query];
|
|
952
|
+
const output = attn.forward(queryMat, keys, values);
|
|
953
|
+
result = output[0];
|
|
954
|
+
attentionWeights = attn.getLastWeights ? attn.getLastWeights()[0] : null;
|
|
955
|
+
break;
|
|
956
|
+
}
|
|
957
|
+
case 'multi-head': {
|
|
958
|
+
const numHeads = parseInt(options.heads);
|
|
959
|
+
const headDim = parseInt(options.headDim);
|
|
960
|
+
const attn = new MultiHeadAttention(query.length, numHeads, headDim);
|
|
961
|
+
const queryMat = [query];
|
|
962
|
+
const output = attn.forward(queryMat, keys, values);
|
|
963
|
+
result = output[0];
|
|
964
|
+
break;
|
|
965
|
+
}
|
|
966
|
+
case 'flash': {
|
|
967
|
+
const attn = new FlashAttention(query.length);
|
|
968
|
+
const queryMat = [query];
|
|
969
|
+
const output = attn.forward(queryMat, keys, values);
|
|
970
|
+
result = output[0];
|
|
971
|
+
break;
|
|
972
|
+
}
|
|
973
|
+
case 'hyperbolic': {
|
|
974
|
+
const curvature = parseFloat(options.curvature);
|
|
975
|
+
const attn = new HyperbolicAttention(query.length, curvature);
|
|
976
|
+
const queryMat = [query];
|
|
977
|
+
const output = attn.forward(queryMat, keys, values);
|
|
978
|
+
result = output[0];
|
|
979
|
+
break;
|
|
980
|
+
}
|
|
981
|
+
case 'linear': {
|
|
982
|
+
const attn = new LinearAttention(query.length);
|
|
983
|
+
const queryMat = [query];
|
|
984
|
+
const output = attn.forward(queryMat, keys, values);
|
|
985
|
+
result = output[0];
|
|
986
|
+
break;
|
|
987
|
+
}
|
|
988
|
+
default:
|
|
989
|
+
throw new Error(`Unknown attention type: ${options.type}`);
|
|
990
|
+
}
|
|
991
|
+
|
|
992
|
+
spinner.succeed(chalk.green(`Attention computed (${options.type})`));
|
|
993
|
+
|
|
994
|
+
console.log(chalk.cyan('\nAttention Results:'));
|
|
995
|
+
console.log(chalk.white(` Type: ${chalk.yellow(options.type)}`));
|
|
996
|
+
console.log(chalk.white(` Query dim: ${chalk.yellow(query.length)}`));
|
|
997
|
+
console.log(chalk.white(` Num keys: ${chalk.yellow(keys.length)}`));
|
|
998
|
+
console.log(chalk.white(` Output dim: ${chalk.yellow(result.length)}`));
|
|
999
|
+
console.log(chalk.white(` Output: ${chalk.gray(`[${result.slice(0, 4).map(v => v.toFixed(4)).join(', ')}...]`)}`));
|
|
1000
|
+
|
|
1001
|
+
if (attentionWeights) {
|
|
1002
|
+
console.log(chalk.cyan('\nAttention Weights:'));
|
|
1003
|
+
attentionWeights.slice(0, 5).forEach((w, i) => {
|
|
1004
|
+
console.log(chalk.gray(` Key ${i}: ${w.toFixed(4)}`));
|
|
1005
|
+
});
|
|
1006
|
+
if (attentionWeights.length > 5) {
|
|
1007
|
+
console.log(chalk.gray(` ... and ${attentionWeights.length - 5} more`));
|
|
1008
|
+
}
|
|
1009
|
+
}
|
|
1010
|
+
|
|
1011
|
+
if (options.output) {
|
|
1012
|
+
const outputData = { result, attentionWeights };
|
|
1013
|
+
fs.writeFileSync(options.output, JSON.stringify(outputData, null, 2));
|
|
1014
|
+
console.log(chalk.green(`\nResults saved to: ${options.output}`));
|
|
1015
|
+
}
|
|
1016
|
+
} catch (error) {
|
|
1017
|
+
spinner.fail(chalk.red('Failed to compute attention'));
|
|
1018
|
+
console.error(chalk.red(error.message));
|
|
1019
|
+
process.exit(1);
|
|
1020
|
+
}
|
|
1021
|
+
});
|
|
1022
|
+
|
|
1023
|
+
// Attention benchmark command
|
|
1024
|
+
attentionCmd
|
|
1025
|
+
.command('benchmark')
|
|
1026
|
+
.description('Benchmark attention mechanisms')
|
|
1027
|
+
.option('-d, --dimension <number>', 'Vector dimension', '256')
|
|
1028
|
+
.option('-n, --num-vectors <number>', 'Number of vectors', '100')
|
|
1029
|
+
.option('-i, --iterations <number>', 'Benchmark iterations', '100')
|
|
1030
|
+
.option('-t, --types <list>', 'Attention types to benchmark (comma-separated)', 'dot,flash,linear')
|
|
1031
|
+
.action((options) => {
|
|
1032
|
+
requireAttention();
|
|
1033
|
+
const spinner = ora('Setting up benchmark...').start();
|
|
1034
|
+
|
|
1035
|
+
try {
|
|
1036
|
+
const dim = parseInt(options.dimension);
|
|
1037
|
+
const numVectors = parseInt(options.numVectors);
|
|
1038
|
+
const iterations = parseInt(options.iterations);
|
|
1039
|
+
const types = options.types.split(',').map(t => t.trim());
|
|
1040
|
+
|
|
1041
|
+
// Generate random test data
|
|
1042
|
+
spinner.text = 'Generating test data...';
|
|
1043
|
+
const query = Array.from({ length: dim }, () => Math.random());
|
|
1044
|
+
const keys = Array.from({ length: numVectors }, () =>
|
|
1045
|
+
Array.from({ length: dim }, () => Math.random())
|
|
1046
|
+
);
|
|
1047
|
+
|
|
1048
|
+
console.log(chalk.cyan('\n═══════════════════════════════════════════════════════════════'));
|
|
1049
|
+
console.log(chalk.cyan(' Attention Mechanism Benchmark'));
|
|
1050
|
+
console.log(chalk.cyan('═══════════════════════════════════════════════════════════════\n'));
|
|
1051
|
+
|
|
1052
|
+
console.log(chalk.white(` Dimension: ${chalk.yellow(dim)}`));
|
|
1053
|
+
console.log(chalk.white(` Vectors: ${chalk.yellow(numVectors)}`));
|
|
1054
|
+
console.log(chalk.white(` Iterations: ${chalk.yellow(iterations)}`));
|
|
1055
|
+
console.log('');
|
|
1056
|
+
|
|
1057
|
+
const results = [];
|
|
1058
|
+
|
|
1059
|
+
for (const type of types) {
|
|
1060
|
+
spinner.text = `Benchmarking ${type} attention...`;
|
|
1061
|
+
spinner.start();
|
|
1062
|
+
|
|
1063
|
+
let attn;
|
|
1064
|
+
try {
|
|
1065
|
+
switch (type) {
|
|
1066
|
+
case 'dot':
|
|
1067
|
+
attn = new DotProductAttention();
|
|
1068
|
+
break;
|
|
1069
|
+
case 'flash':
|
|
1070
|
+
attn = new FlashAttention(dim);
|
|
1071
|
+
break;
|
|
1072
|
+
case 'linear':
|
|
1073
|
+
attn = new LinearAttention(dim);
|
|
1074
|
+
break;
|
|
1075
|
+
case 'hyperbolic':
|
|
1076
|
+
attn = new HyperbolicAttention(dim, 1.0);
|
|
1077
|
+
break;
|
|
1078
|
+
case 'multi-head':
|
|
1079
|
+
attn = new MultiHeadAttention(dim, 4, 64);
|
|
1080
|
+
break;
|
|
1081
|
+
default:
|
|
1082
|
+
console.log(chalk.yellow(` Skipping unknown type: ${type}`));
|
|
1083
|
+
continue;
|
|
1084
|
+
}
|
|
1085
|
+
} catch (e) {
|
|
1086
|
+
console.log(chalk.yellow(` ${type}: not available`));
|
|
1087
|
+
continue;
|
|
1088
|
+
}
|
|
1089
|
+
|
|
1090
|
+
// Warm up
|
|
1091
|
+
const queryMat = [query];
|
|
1092
|
+
for (let i = 0; i < 5; i++) {
|
|
1093
|
+
attn.forward(queryMat, keys, keys);
|
|
1094
|
+
}
|
|
1095
|
+
|
|
1096
|
+
// Benchmark
|
|
1097
|
+
const start = process.hrtime.bigint();
|
|
1098
|
+
for (let i = 0; i < iterations; i++) {
|
|
1099
|
+
attn.forward(queryMat, keys, keys);
|
|
1100
|
+
}
|
|
1101
|
+
const end = process.hrtime.bigint();
|
|
1102
|
+
const totalMs = Number(end - start) / 1_000_000;
|
|
1103
|
+
const avgMs = totalMs / iterations;
|
|
1104
|
+
const opsPerSec = 1000 / avgMs;
|
|
1105
|
+
|
|
1106
|
+
results.push({ type, avgMs, opsPerSec });
|
|
1107
|
+
spinner.succeed(chalk.green(`${type}: ${avgMs.toFixed(3)} ms/op (${opsPerSec.toFixed(1)} ops/sec)`));
|
|
1108
|
+
}
|
|
1109
|
+
|
|
1110
|
+
// Summary
|
|
1111
|
+
if (results.length > 0) {
|
|
1112
|
+
console.log(chalk.cyan('\n═══════════════════════════════════════════════════════════════'));
|
|
1113
|
+
console.log(chalk.cyan(' Summary'));
|
|
1114
|
+
console.log(chalk.cyan('═══════════════════════════════════════════════════════════════\n'));
|
|
1115
|
+
|
|
1116
|
+
const fastest = results.reduce((a, b) => a.avgMs < b.avgMs ? a : b);
|
|
1117
|
+
console.log(chalk.green(` Fastest: ${fastest.type} (${fastest.avgMs.toFixed(3)} ms/op)\n`));
|
|
1118
|
+
|
|
1119
|
+
console.log(chalk.white(' Relative Performance:'));
|
|
1120
|
+
for (const r of results) {
|
|
1121
|
+
const relPerf = (fastest.avgMs / r.avgMs * 100).toFixed(1);
|
|
1122
|
+
const bar = '█'.repeat(Math.round(relPerf / 5));
|
|
1123
|
+
console.log(chalk.white(` ${r.type.padEnd(12)} ${chalk.cyan(bar)} ${relPerf}%`));
|
|
1124
|
+
}
|
|
1125
|
+
}
|
|
1126
|
+
} catch (error) {
|
|
1127
|
+
spinner.fail(chalk.red('Benchmark failed'));
|
|
1128
|
+
console.error(chalk.red(error.message));
|
|
1129
|
+
process.exit(1);
|
|
1130
|
+
}
|
|
1131
|
+
});
|
|
1132
|
+
|
|
1133
|
+
// Hyperbolic math command
|
|
1134
|
+
attentionCmd
|
|
1135
|
+
.command('hyperbolic')
|
|
1136
|
+
.description('Hyperbolic geometry operations')
|
|
1137
|
+
.requiredOption('-a, --action <type>', 'Action: exp-map|log-map|distance|project|mobius-add')
|
|
1138
|
+
.requiredOption('-v, --vector <json>', 'Input vector(s) as JSON')
|
|
1139
|
+
.option('-b, --vector-b <json>', 'Second vector for binary operations')
|
|
1140
|
+
.option('-c, --curvature <number>', 'Poincaré ball curvature', '1.0')
|
|
1141
|
+
.option('-o, --origin <json>', 'Origin point for exp/log maps')
|
|
1142
|
+
.action((options) => {
|
|
1143
|
+
requireAttention();
|
|
1144
|
+
|
|
1145
|
+
try {
|
|
1146
|
+
const vecArray = JSON.parse(options.vector);
|
|
1147
|
+
const vec = new Float32Array(vecArray);
|
|
1148
|
+
const curvature = parseFloat(options.curvature);
|
|
1149
|
+
|
|
1150
|
+
let result;
|
|
1151
|
+
let description;
|
|
1152
|
+
|
|
1153
|
+
switch (options.action) {
|
|
1154
|
+
case 'exp-map': {
|
|
1155
|
+
const originArray = options.origin ? JSON.parse(options.origin) : Array(vec.length).fill(0);
|
|
1156
|
+
const origin = new Float32Array(originArray);
|
|
1157
|
+
result = expMap(origin, vec, curvature);
|
|
1158
|
+
description = 'Exponential map (tangent → Poincaré ball)';
|
|
1159
|
+
break;
|
|
1160
|
+
}
|
|
1161
|
+
case 'log-map': {
|
|
1162
|
+
const originArray = options.origin ? JSON.parse(options.origin) : Array(vec.length).fill(0);
|
|
1163
|
+
const origin = new Float32Array(originArray);
|
|
1164
|
+
result = logMap(origin, vec, curvature);
|
|
1165
|
+
description = 'Logarithmic map (Poincaré ball → tangent)';
|
|
1166
|
+
break;
|
|
1167
|
+
}
|
|
1168
|
+
case 'distance': {
|
|
1169
|
+
if (!options.vectorB) {
|
|
1170
|
+
throw new Error('--vector-b required for distance calculation');
|
|
1171
|
+
}
|
|
1172
|
+
const vecBArray = JSON.parse(options.vectorB);
|
|
1173
|
+
const vecB = new Float32Array(vecBArray);
|
|
1174
|
+
result = poincareDistance(vec, vecB, curvature);
|
|
1175
|
+
description = 'Poincaré distance';
|
|
1176
|
+
break;
|
|
1177
|
+
}
|
|
1178
|
+
case 'project': {
|
|
1179
|
+
result = projectToPoincareBall(vec, curvature);
|
|
1180
|
+
description = 'Project to Poincaré ball';
|
|
1181
|
+
break;
|
|
1182
|
+
}
|
|
1183
|
+
case 'mobius-add': {
|
|
1184
|
+
if (!options.vectorB) {
|
|
1185
|
+
throw new Error('--vector-b required for Möbius addition');
|
|
1186
|
+
}
|
|
1187
|
+
const vecBArray = JSON.parse(options.vectorB);
|
|
1188
|
+
const vecB = new Float32Array(vecBArray);
|
|
1189
|
+
result = mobiusAddition(vec, vecB, curvature);
|
|
1190
|
+
description = 'Möbius addition';
|
|
1191
|
+
break;
|
|
1192
|
+
}
|
|
1193
|
+
default:
|
|
1194
|
+
throw new Error(`Unknown action: ${options.action}`);
|
|
1195
|
+
}
|
|
1196
|
+
|
|
1197
|
+
console.log(chalk.cyan('\nHyperbolic Operation:'));
|
|
1198
|
+
console.log(chalk.white(` Action: ${chalk.yellow(description)}`));
|
|
1199
|
+
console.log(chalk.white(` Curvature: ${chalk.yellow(curvature)}`));
|
|
1200
|
+
|
|
1201
|
+
if (typeof result === 'number') {
|
|
1202
|
+
console.log(chalk.white(` Result: ${chalk.green(result.toFixed(6))}`));
|
|
1203
|
+
} else {
|
|
1204
|
+
const resultArray = Array.from(result);
|
|
1205
|
+
console.log(chalk.white(` Input dim: ${chalk.yellow(vec.length)}`));
|
|
1206
|
+
console.log(chalk.white(` Output dim: ${chalk.yellow(resultArray.length)}`));
|
|
1207
|
+
console.log(chalk.white(` Result: ${chalk.gray(`[${resultArray.slice(0, 5).map(v => v.toFixed(4)).join(', ')}...]`)}`));
|
|
1208
|
+
|
|
1209
|
+
// Compute norm to verify it's in the ball
|
|
1210
|
+
const norm = Math.sqrt(resultArray.reduce((sum, x) => sum + x * x, 0));
|
|
1211
|
+
console.log(chalk.white(` Norm: ${chalk.yellow(norm.toFixed(6))} ${norm < 1 ? chalk.green('(inside ball)') : chalk.red('(outside ball)')}`));
|
|
1212
|
+
}
|
|
1213
|
+
} catch (error) {
|
|
1214
|
+
console.error(chalk.red('Hyperbolic operation failed:'), error.message);
|
|
1215
|
+
process.exit(1);
|
|
1216
|
+
}
|
|
1217
|
+
});
|
|
1218
|
+
|
|
1219
|
+
// Attention info command
|
|
1220
|
+
attentionCmd
|
|
1221
|
+
.command('info')
|
|
1222
|
+
.description('Show attention module information')
|
|
1223
|
+
.action(() => {
|
|
1224
|
+
if (!attentionAvailable) {
|
|
1225
|
+
console.log(chalk.yellow('\nAttention Module: Not installed'));
|
|
1226
|
+
console.log(chalk.white('Install with: npm install @ruvector/attention'));
|
|
1227
|
+
return;
|
|
1228
|
+
}
|
|
1229
|
+
|
|
1230
|
+
console.log(chalk.cyan('\nAttention Module Information'));
|
|
1231
|
+
console.log(chalk.white(` Status: ${chalk.green('Available')}`));
|
|
1232
|
+
console.log(chalk.white(` Version: ${chalk.yellow(attentionVersion ? attentionVersion() : 'unknown')}`));
|
|
1233
|
+
console.log(chalk.white(` Platform: ${chalk.yellow(process.platform)}`));
|
|
1234
|
+
console.log(chalk.white(` Architecture: ${chalk.yellow(process.arch)}`));
|
|
1235
|
+
|
|
1236
|
+
console.log(chalk.cyan('\nCore Attention Mechanisms:'));
|
|
1237
|
+
console.log(chalk.white(` • DotProductAttention - Scaled dot-product attention`));
|
|
1238
|
+
console.log(chalk.white(` • MultiHeadAttention - Multi-head self-attention`));
|
|
1239
|
+
console.log(chalk.white(` • FlashAttention - Memory-efficient IO-aware attention`));
|
|
1240
|
+
console.log(chalk.white(` • HyperbolicAttention - Poincaré ball attention`));
|
|
1241
|
+
console.log(chalk.white(` • LinearAttention - O(n) linear complexity attention`));
|
|
1242
|
+
console.log(chalk.white(` • MoEAttention - Mixture of Experts attention`));
|
|
1243
|
+
|
|
1244
|
+
console.log(chalk.cyan('\nGraph Attention:'));
|
|
1245
|
+
console.log(chalk.white(` • GraphRoPeAttention - Rotary position embeddings for graphs`));
|
|
1246
|
+
console.log(chalk.white(` • EdgeFeaturedAttention - Edge feature-enhanced attention`));
|
|
1247
|
+
console.log(chalk.white(` • DualSpaceAttention - Euclidean + hyperbolic dual space`));
|
|
1248
|
+
console.log(chalk.white(` • LocalGlobalAttention - Local-global graph attention`));
|
|
1249
|
+
|
|
1250
|
+
console.log(chalk.cyan('\nHyperbolic Math:'));
|
|
1251
|
+
console.log(chalk.white(` • expMap, logMap - Exponential/logarithmic maps`));
|
|
1252
|
+
console.log(chalk.white(` • mobiusAddition - Möbius addition in Poincaré ball`));
|
|
1253
|
+
console.log(chalk.white(` • poincareDistance - Hyperbolic distance metric`));
|
|
1254
|
+
console.log(chalk.white(` • projectToPoincareBall - Project vectors to ball`));
|
|
1255
|
+
|
|
1256
|
+
console.log(chalk.cyan('\nTraining Utilities:'));
|
|
1257
|
+
console.log(chalk.white(` • AdamOptimizer, AdamWOptimizer, SgdOptimizer`));
|
|
1258
|
+
console.log(chalk.white(` • InfoNceLoss, LocalContrastiveLoss`));
|
|
1259
|
+
console.log(chalk.white(` • CurriculumScheduler, TemperatureAnnealing`));
|
|
1260
|
+
console.log(chalk.white(` • HardNegativeMiner, InBatchMiner`));
|
|
1261
|
+
});
|
|
1262
|
+
|
|
1263
|
+
// Attention list command - list available mechanisms
|
|
1264
|
+
attentionCmd
|
|
1265
|
+
.command('list')
|
|
1266
|
+
.description('List all available attention mechanisms')
|
|
1267
|
+
.option('-v, --verbose', 'Show detailed information')
|
|
1268
|
+
.action((options) => {
|
|
1269
|
+
console.log(chalk.cyan('\n═══════════════════════════════════════════════════════════════'));
|
|
1270
|
+
console.log(chalk.cyan(' Available Attention Mechanisms'));
|
|
1271
|
+
console.log(chalk.cyan('═══════════════════════════════════════════════════════════════\n'));
|
|
1272
|
+
|
|
1273
|
+
const mechanisms = [
|
|
1274
|
+
{ name: 'DotProductAttention', type: 'core', complexity: 'O(n²)', available: !!DotProductAttention },
|
|
1275
|
+
{ name: 'MultiHeadAttention', type: 'core', complexity: 'O(n²)', available: !!MultiHeadAttention },
|
|
1276
|
+
{ name: 'FlashAttention', type: 'core', complexity: 'O(n²) IO-optimized', available: !!FlashAttention },
|
|
1277
|
+
{ name: 'HyperbolicAttention', type: 'core', complexity: 'O(n²)', available: !!HyperbolicAttention },
|
|
1278
|
+
{ name: 'LinearAttention', type: 'core', complexity: 'O(n)', available: !!LinearAttention },
|
|
1279
|
+
{ name: 'MoEAttention', type: 'core', complexity: 'O(n*k)', available: !!MoEAttention },
|
|
1280
|
+
{ name: 'GraphRoPeAttention', type: 'graph', complexity: 'O(n²)', available: !!GraphRoPeAttention },
|
|
1281
|
+
{ name: 'EdgeFeaturedAttention', type: 'graph', complexity: 'O(n²)', available: !!EdgeFeaturedAttention },
|
|
1282
|
+
{ name: 'DualSpaceAttention', type: 'graph', complexity: 'O(n²)', available: !!DualSpaceAttention },
|
|
1283
|
+
{ name: 'LocalGlobalAttention', type: 'graph', complexity: 'O(n*k)', available: !!LocalGlobalAttention },
|
|
1284
|
+
];
|
|
1285
|
+
|
|
1286
|
+
console.log(chalk.white(' Core Attention:'));
|
|
1287
|
+
mechanisms.filter(m => m.type === 'core').forEach(m => {
|
|
1288
|
+
const status = m.available ? chalk.green('✓') : chalk.red('✗');
|
|
1289
|
+
console.log(chalk.white(` ${status} ${m.name.padEnd(22)} ${chalk.gray(m.complexity)}`));
|
|
1290
|
+
});
|
|
1291
|
+
|
|
1292
|
+
console.log(chalk.white('\n Graph Attention:'));
|
|
1293
|
+
mechanisms.filter(m => m.type === 'graph').forEach(m => {
|
|
1294
|
+
const status = m.available ? chalk.green('✓') : chalk.red('✗');
|
|
1295
|
+
console.log(chalk.white(` ${status} ${m.name.padEnd(22)} ${chalk.gray(m.complexity)}`));
|
|
1296
|
+
});
|
|
1297
|
+
|
|
1298
|
+
if (!attentionAvailable) {
|
|
1299
|
+
console.log(chalk.yellow('\n Note: @ruvector/attention not installed'));
|
|
1300
|
+
console.log(chalk.white(' Install with: npm install @ruvector/attention'));
|
|
1301
|
+
}
|
|
1302
|
+
|
|
1303
|
+
if (options.verbose) {
|
|
1304
|
+
console.log(chalk.cyan('\n Usage Examples:'));
|
|
1305
|
+
console.log(chalk.gray(' # Compute dot-product attention'));
|
|
1306
|
+
console.log(chalk.white(' npx ruvector attention compute -q "[1,2,3]" -k keys.json -t dot'));
|
|
1307
|
+
console.log(chalk.gray('\n # Benchmark attention mechanisms'));
|
|
1308
|
+
console.log(chalk.white(' npx ruvector attention benchmark -d 256 -n 100'));
|
|
1309
|
+
console.log(chalk.gray('\n # Hyperbolic distance'));
|
|
1310
|
+
console.log(chalk.white(' npx ruvector attention hyperbolic -a distance -v "[0.1,0.2]" -b "[0.3,0.4]"'));
|
|
1311
|
+
}
|
|
1312
|
+
});
|
|
1313
|
+
|
|
858
1314
|
// =============================================================================
|
|
859
1315
|
// Doctor Command - Check system health and dependencies
|
|
860
1316
|
// =============================================================================
|
|
@@ -956,6 +1412,13 @@ program
|
|
|
956
1412
|
console.log(chalk.gray(` ○ @ruvector/gnn not installed (optional)`));
|
|
957
1413
|
}
|
|
958
1414
|
|
|
1415
|
+
// Check @ruvector/attention
|
|
1416
|
+
if (attentionAvailable) {
|
|
1417
|
+
console.log(chalk.green(` ✓ @ruvector/attention installed`));
|
|
1418
|
+
} else {
|
|
1419
|
+
console.log(chalk.gray(` ○ @ruvector/attention not installed (optional)`));
|
|
1420
|
+
}
|
|
1421
|
+
|
|
959
1422
|
// Check @ruvector/graph-node
|
|
960
1423
|
try {
|
|
961
1424
|
require.resolve('@ruvector/graph-node');
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "ruvector",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.25",
|
|
4
4
|
"description": "High-performance vector database for Node.js with automatic native/WASM fallback",
|
|
5
5
|
"main": "dist/index.js",
|
|
6
6
|
"types": "dist/index.d.ts",
|
|
@@ -29,7 +29,11 @@
|
|
|
29
29
|
"wasm",
|
|
30
30
|
"native",
|
|
31
31
|
"ruv",
|
|
32
|
-
"ruvector"
|
|
32
|
+
"ruvector",
|
|
33
|
+
"attention",
|
|
34
|
+
"transformer",
|
|
35
|
+
"flash-attention",
|
|
36
|
+
"hyperbolic"
|
|
33
37
|
],
|
|
34
38
|
"author": "ruv.io Team <info@ruv.io> (https://ruv.io)",
|
|
35
39
|
"homepage": "https://ruv.io",
|
|
@@ -43,12 +47,15 @@
|
|
|
43
47
|
"directory": "npm/packages/ruvector"
|
|
44
48
|
},
|
|
45
49
|
"dependencies": {
|
|
46
|
-
"@ruvector/core": "^0.1.
|
|
50
|
+
"@ruvector/core": "^0.1.16",
|
|
47
51
|
"@ruvector/gnn": "^0.1.15",
|
|
48
52
|
"commander": "^11.1.0",
|
|
49
53
|
"chalk": "^4.1.2",
|
|
50
54
|
"ora": "^5.4.1"
|
|
51
55
|
},
|
|
56
|
+
"optionalDependencies": {
|
|
57
|
+
"@ruvector/attention": "^0.1.1"
|
|
58
|
+
},
|
|
52
59
|
"devDependencies": {
|
|
53
60
|
"@types/node": "^20.10.5",
|
|
54
61
|
"typescript": "^5.3.3"
|