079project 4.0.0 → 5.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/main_Study.cjs CHANGED
@@ -273,7 +273,318 @@ async function deltaCloneRuntime(prevClone, srcRuntime) {
273
273
  clone.__deltaIndexes.vocabHash = srcVocabHash;
274
274
  return clone;
275
275
  }
276
+ // ...existing code...
277
+
278
+ // ===== Linear Algebra Backend (CSR + SpMM + Hash Embedding + PCA/UMAP) =====
279
+ class CSR {
280
+ constructor(rowPtr, colIdx, values, nRows, nCols) {
281
+ this.rowPtr = rowPtr; // Uint32Array length nRows+1
282
+ this.colIdx = colIdx; // Uint32Array length nnz
283
+ this.values = values; // Float32Array length nnz
284
+ this.nRows = nRows | 0;
285
+ this.nCols = nCols | 0;
286
+ this.nnz = this.values.length | 0;
287
+ }
288
+ }
289
+
290
+ class TensorEngine {
291
+ // y = A x (A in CSR, x dense Float32Array)
292
+ spmm(csr, x, out = null) {
293
+ const { rowPtr, colIdx, values, nRows } = csr;
294
+ const y = out instanceof Float32Array && out.length === nRows ? out : new Float32Array(nRows);
295
+ for (let i = 0; i < nRows; i++) {
296
+ let s = 0.0;
297
+ const start = rowPtr[i], end = rowPtr[i + 1];
298
+ for (let p = start; p < end; p++) {
299
+ s += values[p] * x[colIdx[p]];
300
+ }
301
+ y[i] = s;
302
+ }
303
+ return y;
304
+ }
305
+
306
+ // x = a*x + b*y
307
+ axpby(a, x, b, y, out = null) {
308
+ const n = x.length | 0;
309
+ const z = out instanceof Float32Array && out.length === n ? out : new Float32Array(n);
310
+ for (let i = 0; i < n; i++) z[i] = a * x[i] + b * y[i];
311
+ return z;
312
+ }
313
+
314
+ l2NormalizeRows(mat, nRows, nCols) {
315
+ for (let i = 0; i < nRows; i++) {
316
+ let s = 0.0, base = i * nCols;
317
+ for (let j = 0; j < nCols; j++) { const v = mat[base + j]; s += v * v; }
318
+ s = Math.sqrt(s) || 1.0;
319
+ for (let j = 0; j < nCols; j++) mat[base + j] /= s;
320
+ }
321
+ }
322
+
323
+ dot(a, b) {
324
+ let s = 0.0;
325
+ for (let i = 0; i < a.length; i++) s += a[i] * b[i];
326
+ return s;
327
+ }
328
+
329
+ // 迭代式传播(不跟踪路径,面向速度)
330
+ // return Float32Array activation over rows
331
+ iteratePropagation(csr, seeds, steps, actFn, decayK, damp = 0.02) {
332
+ const n = csr.nRows | 0;
333
+ let x = new Float32Array(n);
334
+ for (const [row, v] of seeds) { if (row >= 0 && row < n) x[row] += v; }
335
+
336
+ let y = new Float32Array(n);
337
+ for (let t = 0; t < steps; t++) {
338
+ // y = A x
339
+ this.spmm(csr, x, y);
340
+ // x = act(x + y - decayK*damp*x)
341
+ for (let i = 0; i < n; i++) {
342
+ const raw = x[i] + y[i] - (decayK * damp * x[i]);
343
+ x[i] = actFn(raw);
344
+ }
345
+ }
346
+ return x; // final activation
347
+ }
348
+ }
349
+
350
+ // 词-模因hash嵌入:fixed-D feature hashing + L2 normalize
351
+ class GraphTensorBridge {
352
+ constructor(runtime) {
353
+ this.rt = runtime;
354
+ this.rowIndex = new Map(); // memeID -> row
355
+ this.rows = []; // row -> memeID
356
+ this.emb = null; // Float32Array [N*D]
357
+ this.dim = 0;
358
+ this.csrAll = null; // CSR (all directions)
359
+ this._multi = null; // {all,bi,out,in, id2row, row2id}
360
+ }
361
+
362
+ static fnv1a32(str) {
363
+ let h = 0x811c9dc5;
364
+ for (let i = 0; i < str.length; i++) {
365
+ h ^= str.charCodeAt(i);
366
+ h = (h + ((h << 1) + (h << 4) + (h << 7) + (h << 8) + (h << 24))) >>> 0;
367
+ }
368
+ return h >>> 0;
369
+ }
370
+
371
+ // 构建行索引,只遍历当前窗口
372
+ rebuildRowIndex() {
373
+ this.rowIndex.clear();
374
+ this.rows.length = 0;
375
+ const pts = this.rt.graph.getAllPoints();
376
+ for (let i = 0; i < pts.length; i++) {
377
+ const id = pts[i].pointID;
378
+ this.rowIndex.set(id, i);
379
+ this.rows.push(id);
380
+ }
381
+ return pts.length;
382
+ }
383
+
384
+ // Hash embedding: D默认512
385
+ buildEmbeddings(D = 512) {
386
+ const N = this.rebuildRowIndex();
387
+ this.dim = D | 0;
388
+ this.emb = new Float32Array(N * D);
389
+ for (let r = 0; r < N; r++) {
390
+ const memeId = this.rows[r];
391
+ const words = this.rt.kvm.get(memeId) || [];
392
+ const base = r * D;
393
+ for (let k = 0; k < words.length; k++) {
394
+ const w = String(words[k] || '').toLowerCase();
395
+ const idx = GraphTensorBridge.fnv1a32(w) % D;
396
+ this.emb[base + idx] += 1.0;
397
+ }
398
+ }
399
+ this.rt.tensor.l2NormalizeRows(this.emb, N, D);
400
+ return { N, D };
401
+ }
402
+
403
+ // 构建多通道CSR(每行Top-K;all/bi/out,同时构建out的转置作为in)
404
+ buildMultiOrderCSR(topK = 64) {
405
+ const pts = this.rt.graph.getAllPoints();
406
+ const N = pts.length | 0;
407
+ const id2row = this.rowIndex;
408
+ const row2id = this.rows.slice();
409
+
410
+ const buildChannel = (filterFn) => {
411
+ const rows = new Array(N);
412
+ let nnz = 0;
413
+ for (let i = 0; i < N; i++) {
414
+ const conns = (pts[i].connect || []).filter(filterFn);
415
+ // 取Top-K(按权重大->小)
416
+ conns.sort((a, b) => b[0] - a[0]);
417
+ const pruned = conns.slice(0, topK);
418
+ const cols = [];
419
+ const vals = [];
420
+ for (const [w, tgt] of pruned) {
421
+ const c = id2row.get(tgt);
422
+ if (c === undefined) continue;
423
+ cols.push(c);
424
+ vals.push((typeof w === 'number' && isFinite(w)) ? w : 1.0);
425
+ }
426
+ rows[i] = { cols, vals };
427
+ nnz += cols.length;
428
+ }
429
+ const rowPtr = new Uint32Array(N + 1);
430
+ const colIdx = new Uint32Array(nnz);
431
+ const values = new Float32Array(nnz);
432
+ let p = 0;
433
+ for (let i = 0; i < N; i++) {
434
+ rowPtr[i] = p;
435
+ const { cols, vals } = rows[i];
436
+ for (let j = 0; j < cols.length; j++) {
437
+ colIdx[p] = cols[j];
438
+ values[p] = vals[j];
439
+ p++;
440
+ }
441
+ }
442
+ rowPtr[N] = p;
443
+ return new CSR(rowPtr, colIdx, values, N, N);
444
+ };
445
+
446
+ const ALL = buildChannel(_ => true);
447
+ const BI = buildChannel(([, , d]) => d === 0);
448
+ const OUT = buildChannel(([, , d]) => d === 2);
449
+
450
+ // IN = transpose(OUT)
451
+ const IN = this.transposeCSR(OUT);
276
452
 
453
+ this._multi = { all: ALL, bi: BI, out: OUT, in: IN, id2row, row2id };
454
+ this.csrAll = ALL;
455
+ return this._multi;
456
+ }
457
+
458
+ transposeCSR(A) {
459
+ const { nRows, nCols, rowPtr, colIdx, values } = A;
460
+ const nnz = values.length;
461
+ const counts = new Uint32Array(nCols);
462
+ for (let p = 0; p < nnz; p++) counts[colIdx[p]]++;
463
+ const rowPtrT = new Uint32Array(nCols + 1);
464
+ for (let i = 0; i < nCols; i++) rowPtrT[i + 1] = rowPtrT[i] + counts[i];
465
+ const colIdxT = new Uint32Array(nnz);
466
+ const valuesT = new Float32Array(nnz);
467
+ const cursor = rowPtrT.slice();
468
+ for (let i = 0; i < nRows; i++) {
469
+ for (let p = rowPtr[i]; p < rowPtr[i + 1]; p++) {
470
+ const j = colIdx[p];
471
+ const q = cursor[j]++;
472
+ colIdxT[q] = i;
473
+ valuesT[q] = values[p];
474
+ }
475
+ }
476
+ return new CSR(rowPtrT, colIdxT, valuesT, nCols, nRows);
477
+ }
478
+
479
+ get multi() { return this._multi; }
480
+ }
481
+
482
+ class DimReducer {
483
+ // PCA(2D) 快速近似:power-iteration + 投影
484
+ pca2D(emb, N, D, iters = 6) {
485
+ // 均值中心化
486
+ const mean = new Float32Array(D);
487
+ for (let i = 0; i < N; i++) {
488
+ const base = i * D;
489
+ for (let j = 0; j < D; j++) mean[j] += emb[base + j];
490
+ }
491
+ for (let j = 0; j < D; j++) mean[j] /= Math.max(1, N);
492
+ const X = new Float32Array(N * D);
493
+ for (let i = 0; i < N; i++) {
494
+ const base = i * D;
495
+ for (let j = 0; j < D; j++) X[base + j] = emb[base + j] - mean[j];
496
+ }
497
+ // 随机初始向量
498
+ let v1 = new Float32Array(D); for (let j = 0; j < D; j++) v1[j] = Math.random() - 0.5;
499
+ let v2 = new Float32Array(D); for (let j = 0; j < D; j++) v2[j] = Math.random() - 0.5;
500
+
501
+ const mulCov = (v) => { // X^T X v
502
+ const tmp = new Float32Array(D);
503
+ for (let i = 0; i < N; i++) {
504
+ const base = i * D;
505
+ // s = x_i dot v
506
+ let s = 0.0;
507
+ for (let j = 0; j < D; j++) s += X[base + j] * v[j];
508
+ for (let j = 0; j < D; j++) tmp[j] += X[base + j] * s;
509
+ }
510
+ return tmp;
511
+ };
512
+ const normalize = (v) => {
513
+ let s = 0.0; for (let j = 0; j < D; j++) s += v[j] * v[j];
514
+ s = Math.sqrt(s) || 1.0;
515
+ for (let j = 0; j < D; j++) v[j] /= s;
516
+ };
517
+
518
+ for (let t = 0; t < iters; t++) { v1 = mulCov(v1); normalize(v1); }
519
+ // v2 去除与 v1 的分量
520
+ for (let t = 0; t < iters; t++) {
521
+ v2 = mulCov(v2);
522
+ // Gram-Schmidt
523
+ let dot = 0.0; for (let j = 0; j < D; j++) dot += v2[j] * v1[j];
524
+ for (let j = 0; j < D; j++) v2[j] -= dot * v1[j];
525
+ normalize(v2);
526
+ }
527
+
528
+ // 投影到2D
529
+ const out = new Float32Array(N * 2);
530
+ for (let i = 0; i < N; i++) {
531
+ const base = i * D;
532
+ let x = 0.0, y = 0.0;
533
+ for (let j = 0; j < D; j++) {
534
+ const xv = X[base + j];
535
+ x += xv * v1[j];
536
+ y += xv * v2[j];
537
+ }
538
+ out[2 * i + 0] = x;
539
+ out[2 * i + 1] = y;
540
+ }
541
+ return out;
542
+ }
543
+
544
+ // 若安装 umap-js 则使用 UMAP,否则回退 PCA
545
+ project2D(emb, N, D, method = 'auto') {
546
+ if (method === 'pca') return this.pca2D(emb, N, D);
547
+ if (method === 'umap' || method === 'auto') {
548
+ try {
549
+ // 按需加载
550
+ const { UMAP } = require('umap-js');
551
+ const umap = new UMAP({ nComponents: 2, nNeighbors: 15, minDist: 0.1 });
552
+ // umap-js 需要普通数组
553
+ const data = new Array(N);
554
+ for (let i = 0; i < N; i++) {
555
+ const row = new Array(D);
556
+ const base = i * D;
557
+ for (let j = 0; j < D; j++) row[j] = emb[base + j];
558
+ data[i] = row;
559
+ }
560
+ const coords = umap.fit(data);
561
+ const out = new Float32Array(N * 2);
562
+ for (let i = 0; i < N; i++) { out[2 * i] = coords[i][0]; out[2 * i + 1] = coords[i][1]; }
563
+ return out;
564
+ } catch (_) {
565
+ return this.pca2D(emb, N, D);
566
+ }
567
+ }
568
+ return this.pca2D(emb, N, D);
569
+ }
570
+ }
571
+
572
+ // 提供多通道邻接卷(all/bi/out/in),供三维张量视角
573
+ class MultiOrderAdjacency {
574
+ constructor(runtime) {
575
+ this.rt = runtime;
576
+ }
577
+ rebuild(topK = 64, Demb = 512) {
578
+ if (!this.rt.tensorBridge) this.rt.tensorBridge = new GraphTensorBridge(this.rt);
579
+ const gb = this.rt.tensorBridge;
580
+ gb.buildEmbeddings(Demb);
581
+ const multi = gb.buildMultiOrderCSR(topK);
582
+ return multi;
583
+ }
584
+ }
585
+ // ===== End of Linear Algebra Backend =====
586
+
587
+ // ...existing code...
277
588
  // 按需检索器:在用户输入(processInput)时触发网络检索与增量学习
278
589
  class OnlineResearcher {
279
590
  constructor(runtime, options = {}) {
@@ -2118,6 +2429,51 @@ class Runtime {
2118
2429
  this._act = BuiltinActivations.relu;
2119
2430
  this._transfer = BuiltinTransfers.linear;
2120
2431
  this._activationMeta = { activationType: 'relu', transferType: 'linear' };
2432
+ this.tensor = new TensorEngine();
2433
+ this.tensorBridge = new GraphTensorBridge(this);
2434
+ this.dimReducer = new DimReducer();
2435
+ this.multiAdj = new MultiOrderAdjacency(this);
2436
+ this._laReady = false; // 线代缓存是否就绪
2437
+ }
2438
+ // 重建线代缓存:Embedding + 多通道CSR
2439
+ rebuildLinearAlgebraCaches({ topK = 64, embDim = 512 } = {}) {
2440
+ try {
2441
+ this.multiAdj.rebuild(topK, embDim);
2442
+ this._laReady = !!(this.tensorBridge?.multi?.all);
2443
+ return {
2444
+ ok: this._laReady,
2445
+ nodes: this.tensorBridge?.rows?.length || 0,
2446
+ embDim
2447
+ };
2448
+ } catch (e) {
2449
+ this._laReady = false;
2450
+ return { ok: false, error: e.message };
2451
+ }
2452
+ }
2453
+
2454
+ // 导出CSR(all通道)
2455
+ exportSparseMatrix() {
2456
+ const m = this.tensorBridge?.multi;
2457
+ if (!m?.all) return null;
2458
+ const A = m.all;
2459
+ return {
2460
+ nRows: A.nRows, nCols: A.nCols, nnz: A.nnz,
2461
+ rowPtr: Array.from(A.rowPtr),
2462
+ colIdx: Array.from(A.colIdx),
2463
+ values: Array.from(A.values),
2464
+ rows: this.tensorBridge.rows.slice()
2465
+ };
2466
+ }
2467
+ // 高维->2D投影(PCA/UMAP)
2468
+ foldHighDimTo2D(method = 'auto') {
2469
+ const emb = this.tensorBridge?.emb;
2470
+ const N = this.tensorBridge?.rows?.length || 0;
2471
+ const D = this.tensorBridge?.dim || 0;
2472
+ if (!emb || !N || !D) return { ok: false, error: 'embedding not ready' };
2473
+ const coords = this.dimReducer.project2D(emb, N, D, method);
2474
+ const out = {};
2475
+ for (let i = 0; i < N; i++) out[this.tensorBridge.rows[i]] = [coords[2 * i], coords[2 * i + 1]];
2476
+ return { ok: true, dim: 2, points: out };
2121
2477
  }
2122
2478
  // 获取/设置激活-传递函数配置
2123
2479
  getActivationConfig() {
@@ -2683,75 +3039,47 @@ class Runtime {
2683
3039
  * options.trackPath: 是否记录激活路径
2684
3040
  * @returns {Object|Map} { signalMap, activationPaths } 或 signalMap
2685
3041
  */
3042
+ // 线代版多源扩散(不跟踪路径)
2686
3043
  propagateSignalMultiSource(startIDs, strengths, decayK, maxStep, options = {}) {
2687
- decayK = decayK !== undefined ? decayK : (this.config.decayK !== undefined ? this.config.decayK : 1);
2688
- maxStep = maxStep !== undefined ? maxStep : (this.config.maxStep !== undefined ? this.config.maxStep : 10);
2689
- const maxActiveNodes = options.maxActiveNodes || 5000;
2690
- const minSignal = options.minSignal || 0.01;
2691
- const trackPath = options.trackPath || false;
2692
- const directionalMultiplier = options.directionalMultiplier || 0.7;
2693
- const bidirectionalMultiplier = options.bidirectionalMultiplier || 1.2;
2694
-
2695
- const actFn = this._act || BuiltinActivations.relu;
2696
- const transferFn = this._transfer || BuiltinTransfers.linear;
2697
-
2698
- const signalMap = new Map();
2699
- const activationPaths = trackPath ? new Map() : null;
2700
- const activationTypes = trackPath ? new Map() : null;
2701
-
2702
- let active = startIDs.map((id, i) => ({
2703
- id, value: strengths[i], from: null, connectionType: -1
2704
- }));
2705
- let step = 0;
3044
+ // 如果调用方需要trackPath,仍走旧逻辑
3045
+ if (options?.trackPath) {
3046
+ return super.propagateSignalMultiSource
3047
+ ? super.propagateSignalMultiSource(startIDs, strengths, decayK, maxStep, options)
3048
+ : this._propagateFallback(startIDs, strengths, decayK, maxStep, options);
3049
+ }
2706
3050
 
2707
- while (active.length > 0 && step < maxStep) {
2708
- if (active.length > maxActiveNodes) {
2709
- active.sort((a, b) => b.value - a.value);
2710
- active = active.slice(0, maxActiveNodes);
3051
+ // 优先使用线代后端
3052
+ if (this._laReady && this.tensorBridge?.multi?.all) {
3053
+ const A = this.tensorBridge.multi.all;
3054
+ const id2row = this.tensorBridge.rowIndex;
3055
+ const seeds = [];
3056
+ for (let i = 0; i < startIDs.length; i++) {
3057
+ const r = id2row.get(startIDs[i]);
3058
+ if (r !== undefined) seeds.push([r, strengths[i] || 0]);
2711
3059
  }
3060
+ if (!seeds.length) return new Map();
2712
3061
 
2713
- const next = [];
2714
- for (const { id, value, from, connectionType } of active) {
2715
- if (value < minSignal) continue;
2716
-
2717
- // 节点处应用激活函数(融合累计)
2718
- const prev = signalMap.get(id) || 0;
2719
- const merged = actFn(prev + value);
2720
- signalMap.set(id, merged);
2721
-
2722
- if (trackPath && connectionType !== -1) {
2723
- if (!activationTypes.has(id)) activationTypes.set(id, new Set());
2724
- activationTypes.get(id).add(connectionType);
2725
- }
2726
- if (trackPath && from) {
2727
- if (!activationPaths.has(id)) activationPaths.set(id, []);
2728
- activationPaths.get(id).push({ from, connectionType, value });
2729
- }
3062
+ const actFn = this._act || ((x) => (x > 0 ? x : 0));
3063
+ const steps = Math.max(1, maxStep | 0);
3064
+ const x = this.tensor.iteratePropagation(A, seeds, steps, actFn, (decayK ?? 1));
2730
3065
 
2731
- const point = this.graph.points.get(id);
2732
- if (!point) continue;
2733
-
2734
- const MAX_NEIGHBORS = 30;
2735
- const neighbors = point.connect.slice(0, MAX_NEIGHBORS);
2736
-
2737
- for (const [weight, neighborID, direction = 0] of neighbors) {
2738
- const ctx = { direction, directionalMultiplier, bidirectionalMultiplier };
2739
- const rawNext = transferFn(value, weight, decayK, ctx);
2740
- const nextValue = actFn(rawNext);
2741
-
2742
- if (nextValue >= minSignal) {
2743
- next.push({ id: neighborID, value: nextValue, from: id, connectionType: direction });
2744
- }
2745
- }
3066
+ // 返回Map<MemeID,value>
3067
+ const out = new Map();
3068
+ for (let i = 0; i < x.length; i++) {
3069
+ if (x[i] > 0) out.set(this.tensorBridge.rows[i], x[i]);
2746
3070
  }
2747
- active = next;
2748
- step++;
3071
+ return out;
2749
3072
  }
2750
3073
 
2751
- if (trackPath) {
2752
- return { signalMap, activationPaths, activationTypes };
2753
- }
2754
- return signalMap;
3074
+ // 回退旧实现
3075
+ return this._propagateFallback(startIDs, strengths, decayK, maxStep, options);
3076
+ }
3077
+
3078
+ _propagateFallback(startIDs, strengths, decayK, maxStep, options) {
3079
+ // 使用文件里现有版本(保持兼容)
3080
+ // 这里直接调用之前定义的 Runtime.propagateSignalMultiSource 的原实现
3081
+ const fn = Object.getPrototypeOf(this).propagateSignalMultiSource;
3082
+ return fn.call(this, startIDs, strengths, decayK, maxStep, options);
2755
3083
  }
2756
3084
  // ...existing code...
2757
3085
 
@@ -4328,6 +4656,70 @@ async function main() {
4328
4656
  if (String(process.env.ADV_AUTOSTART || '').toLowerCase() === 'true') {
4329
4657
  adv.start();
4330
4658
  }
4659
+ // ...existing code...
4660
+ app.post('/api/tensor/refresh', (req, res) => {
4661
+ try {
4662
+ const { topK = 64, embDim = 512 } = req.body || {};
4663
+ const rt = global.ctrlA?.runtime;
4664
+ if (!rt) return res.status(500).json({ ok: false, error: 'runtime missing' });
4665
+ const ret = rt.rebuildLinearAlgebraCaches({ topK, embDim });
4666
+ res.json({ ok: !!ret.ok, ...ret });
4667
+ } catch (e) {
4668
+ res.status(500).json({ ok: false, error: e.message });
4669
+ }
4670
+ });
4671
+
4672
+ app.get('/api/tensor/csr', (req, res) => {
4673
+ try {
4674
+ const rt = global.ctrlA?.runtime;
4675
+ if (!rt) return res.status(500).json({ ok: false, error: 'runtime missing' });
4676
+ const csr = rt.exportSparseMatrix();
4677
+ if (!csr) return res.status(400).json({ ok: false, error: 'csr not ready' });
4678
+ res.json({ ok: true, csr });
4679
+ } catch (e) {
4680
+ res.status(500).json({ ok: false, error: e.message });
4681
+ }
4682
+ });
4683
+
4684
+ app.get('/api/tensor/project2d', (req, res) => {
4685
+ try {
4686
+ const method = String(req.query?.method || 'auto');
4687
+ const rt = global.ctrlA?.runtime;
4688
+ if (!rt) return res.status(500).json({ ok: false, error: 'runtime missing' });
4689
+ const ret = rt.foldHighDimTo2D(method);
4690
+ res.json(ret);
4691
+ } catch (e) {
4692
+ res.status(500).json({ ok: false, error: e.message });
4693
+ }
4694
+ });
4695
+
4696
+ app.get('/api/tensor/topk', (req, res) => {
4697
+ try {
4698
+ const memeId = String(req.query?.memeId || '');
4699
+ const k = Math.max(1, Math.min(50, Number(req.query?.k || 10)));
4700
+ const rt = global.ctrlA?.runtime;
4701
+ if (!rt?.tensorBridge?.emb) return res.status(400).json({ ok: false, error: 'embedding not ready' });
4702
+ const row = rt.tensorBridge.rowIndex.get(memeId);
4703
+ if (row === undefined) return res.status(404).json({ ok: false, error: 'meme not found' });
4704
+
4705
+ const N = rt.tensorBridge.rows.length, D = rt.tensorBridge.dim;
4706
+ const base = row * D;
4707
+ const q = new Float32Array(D);
4708
+ for (let j = 0; j < D; j++) q[j] = rt.tensorBridge.emb[base + j];
4709
+
4710
+ const scores = new Array(N);
4711
+ for (let i = 0; i < N; i++) {
4712
+ const b = i * D;
4713
+ let s = 0.0; for (let j = 0; j < D; j++) s += q[j] * rt.tensorBridge.emb[b + j];
4714
+ scores[i] = [rt.tensorBridge.rows[i], s];
4715
+ }
4716
+ scores.sort((a, b) => b[1] - a[1]);
4717
+ res.json({ ok: true, memeId, neighbors: scores.slice(0, k) });
4718
+ } catch (e) {
4719
+ res.status(500).json({ ok: false, error: e.message });
4720
+ }
4721
+ });
4722
+ // ...existing code...
4331
4723
  // 新增:serve 侧参数调优 API(默认不启用自动调参,仅手动设置)
4332
4724
  app.get('/api/tune/get', (req, res) => {
4333
4725
  try {