graphwise 1.1.0 → 1.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (133) hide show
  1. package/dist/expansion/frontier-balanced.d.ts +12 -0
  2. package/dist/expansion/frontier-balanced.d.ts.map +1 -0
  3. package/dist/expansion/frontier-balanced.unit.test.d.ts +2 -0
  4. package/dist/expansion/frontier-balanced.unit.test.d.ts.map +1 -0
  5. package/dist/expansion/index.d.ts +12 -13
  6. package/dist/expansion/index.d.ts.map +1 -1
  7. package/dist/expansion/random-priority.d.ts +20 -0
  8. package/dist/expansion/random-priority.d.ts.map +1 -0
  9. package/dist/expansion/random-priority.unit.test.d.ts +2 -0
  10. package/dist/expansion/random-priority.unit.test.d.ts.map +1 -0
  11. package/dist/expansion/standard-bfs.d.ts +12 -0
  12. package/dist/expansion/standard-bfs.d.ts.map +1 -0
  13. package/dist/expansion/standard-bfs.unit.test.d.ts +2 -0
  14. package/dist/expansion/standard-bfs.unit.test.d.ts.map +1 -0
  15. package/dist/extraction/index.d.ts +6 -6
  16. package/dist/extraction/index.d.ts.map +1 -1
  17. package/dist/extraction/motif.d.ts.map +1 -1
  18. package/dist/gpu/context.d.ts.map +1 -1
  19. package/dist/gpu/csr.d.ts.map +1 -1
  20. package/dist/gpu/index.cjs +410 -5
  21. package/dist/gpu/index.cjs.map +1 -0
  22. package/dist/gpu/index.d.ts +4 -5
  23. package/dist/gpu/index.d.ts.map +1 -1
  24. package/dist/gpu/index.js +400 -2
  25. package/dist/gpu/index.js.map +1 -0
  26. package/dist/graph/index.cjs +222 -2
  27. package/dist/graph/index.cjs.map +1 -0
  28. package/dist/graph/index.d.ts +3 -3
  29. package/dist/graph/index.d.ts.map +1 -1
  30. package/dist/graph/index.js +221 -1
  31. package/dist/graph/index.js.map +1 -0
  32. package/dist/index/index.cjs +902 -10
  33. package/dist/index/index.cjs.map +1 -1
  34. package/dist/index/index.js +880 -10
  35. package/dist/index/index.js.map +1 -1
  36. package/dist/{kmeans-B0HEOU6k.cjs → kmeans-87ExSUNZ.js} +27 -13
  37. package/dist/{kmeans-DgbsOznU.js.map → kmeans-87ExSUNZ.js.map} +1 -1
  38. package/dist/{kmeans-DgbsOznU.js → kmeans-BIgSyGKu.cjs} +44 -2
  39. package/dist/{kmeans-B0HEOU6k.cjs.map → kmeans-BIgSyGKu.cjs.map} +1 -1
  40. package/dist/ranking/baselines/betweenness.d.ts +13 -0
  41. package/dist/ranking/baselines/betweenness.d.ts.map +1 -0
  42. package/dist/ranking/baselines/betweenness.unit.test.d.ts +2 -0
  43. package/dist/ranking/baselines/betweenness.unit.test.d.ts.map +1 -0
  44. package/dist/ranking/baselines/communicability.d.ts +13 -0
  45. package/dist/ranking/baselines/communicability.d.ts.map +1 -0
  46. package/dist/ranking/baselines/communicability.unit.test.d.ts +2 -0
  47. package/dist/ranking/baselines/communicability.unit.test.d.ts.map +1 -0
  48. package/dist/ranking/baselines/degree-sum.d.ts +13 -0
  49. package/dist/ranking/baselines/degree-sum.d.ts.map +1 -0
  50. package/dist/ranking/baselines/degree-sum.unit.test.d.ts +2 -0
  51. package/dist/ranking/baselines/degree-sum.unit.test.d.ts.map +1 -0
  52. package/dist/ranking/baselines/index.d.ts +20 -0
  53. package/dist/ranking/baselines/index.d.ts.map +1 -0
  54. package/dist/ranking/baselines/jaccard-arithmetic.d.ts +13 -0
  55. package/dist/ranking/baselines/jaccard-arithmetic.d.ts.map +1 -0
  56. package/dist/ranking/baselines/jaccard-arithmetic.unit.test.d.ts +2 -0
  57. package/dist/ranking/baselines/jaccard-arithmetic.unit.test.d.ts.map +1 -0
  58. package/dist/ranking/baselines/katz.d.ts +13 -0
  59. package/dist/ranking/baselines/katz.d.ts.map +1 -0
  60. package/dist/ranking/baselines/katz.unit.test.d.ts +2 -0
  61. package/dist/ranking/baselines/katz.unit.test.d.ts.map +1 -0
  62. package/dist/ranking/baselines/pagerank.d.ts +13 -0
  63. package/dist/ranking/baselines/pagerank.d.ts.map +1 -0
  64. package/dist/ranking/baselines/pagerank.unit.test.d.ts +2 -0
  65. package/dist/ranking/baselines/pagerank.unit.test.d.ts.map +1 -0
  66. package/dist/ranking/baselines/random-ranking.d.ts +21 -0
  67. package/dist/ranking/baselines/random-ranking.d.ts.map +1 -0
  68. package/dist/ranking/baselines/random-ranking.unit.test.d.ts +2 -0
  69. package/dist/ranking/baselines/random-ranking.unit.test.d.ts.map +1 -0
  70. package/dist/ranking/baselines/resistance-distance.d.ts +13 -0
  71. package/dist/ranking/baselines/resistance-distance.d.ts.map +1 -0
  72. package/dist/ranking/baselines/resistance-distance.unit.test.d.ts +2 -0
  73. package/dist/ranking/baselines/resistance-distance.unit.test.d.ts.map +1 -0
  74. package/dist/ranking/baselines/widest-path.d.ts +13 -0
  75. package/dist/ranking/baselines/widest-path.d.ts.map +1 -0
  76. package/dist/ranking/baselines/widest-path.unit.test.d.ts +2 -0
  77. package/dist/ranking/baselines/widest-path.unit.test.d.ts.map +1 -0
  78. package/dist/ranking/index.d.ts +3 -6
  79. package/dist/ranking/index.d.ts.map +1 -1
  80. package/dist/ranking/mi/index.d.ts +9 -9
  81. package/dist/ranking/mi/index.d.ts.map +1 -1
  82. package/dist/schemas/index.d.ts +2 -2
  83. package/dist/schemas/index.d.ts.map +1 -1
  84. package/dist/seeds/index.cjs +398 -3
  85. package/dist/seeds/index.cjs.map +1 -0
  86. package/dist/seeds/index.d.ts +2 -4
  87. package/dist/seeds/index.d.ts.map +1 -1
  88. package/dist/seeds/index.js +396 -1
  89. package/dist/seeds/index.js.map +1 -0
  90. package/dist/seeds/stratified.d.ts.map +1 -1
  91. package/dist/structures/index.cjs +133 -2
  92. package/dist/structures/index.cjs.map +1 -0
  93. package/dist/structures/index.d.ts +1 -2
  94. package/dist/structures/index.d.ts.map +1 -1
  95. package/dist/structures/index.js +132 -1
  96. package/dist/structures/index.js.map +1 -0
  97. package/dist/traversal/index.cjs +152 -5
  98. package/dist/traversal/index.cjs.map +1 -0
  99. package/dist/traversal/index.d.ts +2 -2
  100. package/dist/traversal/index.d.ts.map +1 -1
  101. package/dist/traversal/index.js +148 -1
  102. package/dist/traversal/index.js.map +1 -0
  103. package/dist/utils/index.cjs +172 -9
  104. package/dist/utils/index.cjs.map +1 -0
  105. package/dist/utils/index.d.ts +3 -3
  106. package/dist/utils/index.d.ts.map +1 -1
  107. package/dist/utils/index.js +165 -3
  108. package/dist/utils/index.js.map +1 -0
  109. package/package.json +1 -1
  110. package/dist/gpu-BJRVYBjx.cjs +0 -338
  111. package/dist/gpu-BJRVYBjx.cjs.map +0 -1
  112. package/dist/gpu-BveuXugy.js +0 -315
  113. package/dist/gpu-BveuXugy.js.map +0 -1
  114. package/dist/graph-DLWiziLB.js +0 -222
  115. package/dist/graph-DLWiziLB.js.map +0 -1
  116. package/dist/graph-az06J1YV.cjs +0 -227
  117. package/dist/graph-az06J1YV.cjs.map +0 -1
  118. package/dist/seeds-B6J9oJfU.cjs +0 -404
  119. package/dist/seeds-B6J9oJfU.cjs.map +0 -1
  120. package/dist/seeds-UNZxqm_U.js +0 -393
  121. package/dist/seeds-UNZxqm_U.js.map +0 -1
  122. package/dist/structures-BPfhfqNP.js +0 -133
  123. package/dist/structures-BPfhfqNP.js.map +0 -1
  124. package/dist/structures-CJ_S_7fs.cjs +0 -138
  125. package/dist/structures-CJ_S_7fs.cjs.map +0 -1
  126. package/dist/traversal-CQCjUwUJ.js +0 -149
  127. package/dist/traversal-CQCjUwUJ.js.map +0 -1
  128. package/dist/traversal-QeHaNUWn.cjs +0 -172
  129. package/dist/traversal-QeHaNUWn.cjs.map +0 -1
  130. package/dist/utils-Q_akvlMn.js +0 -164
  131. package/dist/utils-Q_akvlMn.js.map +0 -1
  132. package/dist/utils-spZa1ZvS.cjs +0 -205
  133. package/dist/utils-spZa1ZvS.cjs.map +0 -1
@@ -1,10 +1,10 @@
1
- import { t as AdjacencyMapGraph } from "../graph-DLWiziLB.js";
2
- import { i as bfsWithPath, n as dfsWithPath, r as bfs, t as dfs } from "../traversal-CQCjUwUJ.js";
3
- import { t as PriorityQueue } from "../structures-BPfhfqNP.js";
4
- import { n as normaliseFeatures, t as miniBatchKMeans } from "../kmeans-DgbsOznU.js";
5
- import { n as grasp, t as stratified } from "../seeds-UNZxqm_U.js";
6
- import { a as approximateClusteringCoefficient, i as shannonEntropy, n as localTypeEntropy, o as batchClusteringCoefficients, r as normalisedEntropy, s as localClusteringCoefficient, t as entropyFromCounts } from "../utils-Q_akvlMn.js";
7
- import { i as detectWebGPU, n as graphToCSR, r as GPUContext, t as csrToGPUBuffers } from "../gpu-BveuXugy.js";
1
+ import { AdjacencyMapGraph } from "../graph/index.js";
2
+ import { bfs, bfsWithPath, dfs, dfsWithPath } from "../traversal/index.js";
3
+ import { PriorityQueue } from "../structures/index.js";
4
+ import { n as miniBatchKMeans, r as normaliseFeatures, t as _computeMean } from "../kmeans-87ExSUNZ.js";
5
+ import { grasp, stratified } from "../seeds/index.js";
6
+ import { approximateClusteringCoefficient, batchClusteringCoefficients, entropyFromCounts, localClusteringCoefficient, localTypeEntropy, normalisedEntropy, shannonEntropy } from "../utils/index.js";
7
+ import { GPUContext, GPUNotAvailableError, assertWebGPUAvailable, createGPUContext, createResultBuffer, csrToGPUBuffers, detectWebGPU, getGPUContext, graphToCSR, isWebGPUAvailable, readBufferToCPU } from "../gpu/index.js";
8
8
  //#region src/expansion/base.ts
9
9
  /**
10
10
  * Default priority function - degree-ordered (DOME).
@@ -242,6 +242,18 @@ function dome(graph, seeds, config) {
242
242
  priority: domePriority
243
243
  });
244
244
  }
245
+ /**
246
+ * DOME with reverse priority (high degree first).
247
+ */
248
+ function domeHighDegree(graph, seeds, config) {
249
+ const domePriority = (nodeId, context) => {
250
+ return -graph.degree(nodeId);
251
+ };
252
+ return base(graph, seeds, {
253
+ ...config,
254
+ priority: domePriority
255
+ });
256
+ }
245
257
  //#endregion
246
258
  //#region src/expansion/edge.ts
247
259
  /**
@@ -520,6 +532,80 @@ function maze(graph, seeds, config) {
520
532
  });
521
533
  }
522
534
  //#endregion
535
+ //#region src/expansion/standard-bfs.ts
536
+ /**
537
+ * Run standard BFS expansion (FIFO discovery order).
538
+ *
539
+ * @param graph - Source graph
540
+ * @param seeds - Seed nodes for expansion
541
+ * @param config - Expansion configuration
542
+ * @returns Expansion result with discovered paths
543
+ */
544
+ function standardBfs(graph, seeds, config) {
545
+ const bfsPriority = (_nodeId, context) => {
546
+ return context.iteration;
547
+ };
548
+ return base(graph, seeds, {
549
+ ...config,
550
+ priority: bfsPriority
551
+ });
552
+ }
553
+ //#endregion
554
+ //#region src/expansion/frontier-balanced.ts
555
+ /**
556
+ * Run frontier-balanced expansion (round-robin across frontiers).
557
+ *
558
+ * @param graph - Source graph
559
+ * @param seeds - Seed nodes for expansion
560
+ * @param config - Expansion configuration
561
+ * @returns Expansion result with discovered paths
562
+ */
563
+ function frontierBalanced(graph, seeds, config) {
564
+ const balancedPriority = (_nodeId, context) => {
565
+ return context.frontierIndex * 1e9 + context.iteration;
566
+ };
567
+ return base(graph, seeds, {
568
+ ...config,
569
+ priority: balancedPriority
570
+ });
571
+ }
572
+ //#endregion
573
+ //#region src/expansion/random-priority.ts
574
+ /**
575
+ * Deterministic seeded random number generator.
576
+ * Uses FNV-1a-like hash for input → [0, 1] output.
577
+ *
578
+ * @param input - String to hash
579
+ * @param seed - Random seed for reproducibility
580
+ * @returns Deterministic random value in [0, 1]
581
+ */
582
+ function seededRandom$1(input, seed = 0) {
583
+ let h = seed;
584
+ for (let i = 0; i < input.length; i++) {
585
+ h = Math.imul(h ^ input.charCodeAt(i), 2654435769);
586
+ h ^= h >>> 16;
587
+ }
588
+ return (h >>> 0) / 4294967295;
589
+ }
590
+ /**
591
+ * Run random-priority expansion (null hypothesis baseline).
592
+ *
593
+ * @param graph - Source graph
594
+ * @param seeds - Seed nodes for expansion
595
+ * @param config - Expansion configuration
596
+ * @returns Expansion result with discovered paths
597
+ */
598
+ function randomPriority(graph, seeds, config) {
599
+ const { seed = 0 } = config ?? {};
600
+ const randomPriorityFn = (nodeId, context) => {
601
+ return seededRandom$1(nodeId, seed);
602
+ };
603
+ return base(graph, seeds, {
604
+ ...config,
605
+ priority: randomPriorityFn
606
+ });
607
+ }
608
+ //#endregion
523
609
  //#region src/ranking/parse.ts
524
610
  /**
525
611
  * Rank paths using PARSE (Path-Aware Ranking via Salience Estimation).
@@ -812,6 +898,712 @@ function shortest(_graph, paths, config) {
812
898
  };
813
899
  }
814
900
  //#endregion
901
+ //#region src/ranking/baselines/degree-sum.ts
902
+ /**
903
+ * Rank paths by sum of node degrees.
904
+ *
905
+ * @param graph - Source graph
906
+ * @param paths - Paths to rank
907
+ * @param config - Configuration options
908
+ * @returns Ranked paths (highest degree-sum first)
909
+ */
910
+ function degreeSum(graph, paths, config) {
911
+ const { includeScores = true } = config ?? {};
912
+ if (paths.length === 0) return {
913
+ paths: [],
914
+ method: "degree-sum"
915
+ };
916
+ const scored = paths.map((path) => {
917
+ let degreeSum = 0;
918
+ for (const nodeId of path.nodes) degreeSum += graph.degree(nodeId);
919
+ return {
920
+ path,
921
+ score: degreeSum
922
+ };
923
+ });
924
+ const maxScore = Math.max(...scored.map((s) => s.score));
925
+ if (maxScore === 0) return {
926
+ paths: paths.map((path) => ({
927
+ ...path,
928
+ score: 0
929
+ })),
930
+ method: "degree-sum"
931
+ };
932
+ return {
933
+ paths: scored.map(({ path, score }) => ({
934
+ ...path,
935
+ score: includeScores ? score / maxScore : score / maxScore
936
+ })).sort((a, b) => b.score - a.score),
937
+ method: "degree-sum"
938
+ };
939
+ }
940
+ //#endregion
941
+ //#region src/ranking/baselines/widest-path.ts
942
+ /**
943
+ * Rank paths by widest bottleneck (minimum edge similarity).
944
+ *
945
+ * @param graph - Source graph
946
+ * @param paths - Paths to rank
947
+ * @param config - Configuration options
948
+ * @returns Ranked paths (highest bottleneck first)
949
+ */
950
+ function widestPath(graph, paths, config) {
951
+ const { includeScores = true } = config ?? {};
952
+ if (paths.length === 0) return {
953
+ paths: [],
954
+ method: "widest-path"
955
+ };
956
+ const scored = paths.map((path) => {
957
+ if (path.nodes.length < 2) return {
958
+ path,
959
+ score: 1
960
+ };
961
+ let minSimilarity = Number.POSITIVE_INFINITY;
962
+ for (let i = 0; i < path.nodes.length - 1; i++) {
963
+ const source = path.nodes[i];
964
+ const target = path.nodes[i + 1];
965
+ if (source === void 0 || target === void 0) continue;
966
+ const edgeSimilarity = jaccard(graph, source, target);
967
+ minSimilarity = Math.min(minSimilarity, edgeSimilarity);
968
+ }
969
+ return {
970
+ path,
971
+ score: minSimilarity === Number.POSITIVE_INFINITY ? 1 : minSimilarity
972
+ };
973
+ });
974
+ const maxScore = Math.max(...scored.map((s) => s.score));
975
+ if (maxScore === 0) return {
976
+ paths: paths.map((path) => ({
977
+ ...path,
978
+ score: 0
979
+ })),
980
+ method: "widest-path"
981
+ };
982
+ return {
983
+ paths: scored.map(({ path, score }) => ({
984
+ ...path,
985
+ score: includeScores ? score / maxScore : score / maxScore
986
+ })).sort((a, b) => b.score - a.score),
987
+ method: "widest-path"
988
+ };
989
+ }
990
+ //#endregion
991
+ //#region src/ranking/baselines/jaccard-arithmetic.ts
992
+ /**
993
+ * Rank paths by arithmetic mean of edge Jaccard similarities.
994
+ *
995
+ * @param graph - Source graph
996
+ * @param paths - Paths to rank
997
+ * @param config - Configuration options
998
+ * @returns Ranked paths (highest arithmetic mean first)
999
+ */
1000
+ function jaccardArithmetic(graph, paths, config) {
1001
+ const { includeScores = true } = config ?? {};
1002
+ if (paths.length === 0) return {
1003
+ paths: [],
1004
+ method: "jaccard-arithmetic"
1005
+ };
1006
+ const scored = paths.map((path) => {
1007
+ if (path.nodes.length < 2) return {
1008
+ path,
1009
+ score: 1
1010
+ };
1011
+ let similaritySum = 0;
1012
+ let edgeCount = 0;
1013
+ for (let i = 0; i < path.nodes.length - 1; i++) {
1014
+ const source = path.nodes[i];
1015
+ const target = path.nodes[i + 1];
1016
+ if (source === void 0 || target === void 0) continue;
1017
+ const edgeSimilarity = jaccard(graph, source, target);
1018
+ similaritySum += edgeSimilarity;
1019
+ edgeCount++;
1020
+ }
1021
+ return {
1022
+ path,
1023
+ score: edgeCount > 0 ? similaritySum / edgeCount : 1
1024
+ };
1025
+ });
1026
+ const maxScore = Math.max(...scored.map((s) => s.score));
1027
+ if (maxScore === 0) return {
1028
+ paths: paths.map((path) => ({
1029
+ ...path,
1030
+ score: 0
1031
+ })),
1032
+ method: "jaccard-arithmetic"
1033
+ };
1034
+ return {
1035
+ paths: scored.map(({ path, score }) => ({
1036
+ ...path,
1037
+ score: includeScores ? score / maxScore : score / maxScore
1038
+ })).sort((a, b) => b.score - a.score),
1039
+ method: "jaccard-arithmetic"
1040
+ };
1041
+ }
1042
+ //#endregion
1043
+ //#region src/ranking/baselines/pagerank.ts
1044
+ /**
1045
+ * Compute PageRank centrality for all nodes using power iteration.
1046
+ *
1047
+ * @param graph - Source graph
1048
+ * @param damping - Damping factor (default 0.85)
1049
+ * @param tolerance - Convergence tolerance (default 1e-6)
1050
+ * @param maxIterations - Maximum iterations (default 100)
1051
+ * @returns Map of node ID to PageRank value
1052
+ */
1053
+ function computePageRank(graph, damping = .85, tolerance = 1e-6, maxIterations = 100) {
1054
+ const nodes = Array.from(graph.nodeIds());
1055
+ const n = nodes.length;
1056
+ if (n === 0) return /* @__PURE__ */ new Map();
1057
+ const ranks = /* @__PURE__ */ new Map();
1058
+ const newRanks = /* @__PURE__ */ new Map();
1059
+ for (const nodeId of nodes) {
1060
+ ranks.set(nodeId, 1 / n);
1061
+ newRanks.set(nodeId, 0);
1062
+ }
1063
+ let isCurrentRanks = true;
1064
+ for (let iteration = 0; iteration < maxIterations; iteration++) {
1065
+ let maxChange = 0;
1066
+ const currMap = isCurrentRanks ? ranks : newRanks;
1067
+ const nextMap = isCurrentRanks ? newRanks : ranks;
1068
+ for (const nodeId of nodes) {
1069
+ let incomingSum = 0;
1070
+ for (const incomingId of graph.neighbours(nodeId, "in")) {
1071
+ const incomingRank = currMap.get(incomingId) ?? 0;
1072
+ const outDegree = graph.degree(incomingId);
1073
+ if (outDegree > 0) incomingSum += incomingRank / outDegree;
1074
+ }
1075
+ const newRank = (1 - damping) / n + damping * incomingSum;
1076
+ nextMap.set(nodeId, newRank);
1077
+ const oldRank = currMap.get(nodeId) ?? 0;
1078
+ maxChange = Math.max(maxChange, Math.abs(newRank - oldRank));
1079
+ }
1080
+ if (maxChange < tolerance) break;
1081
+ isCurrentRanks = !isCurrentRanks;
1082
+ currMap.clear();
1083
+ }
1084
+ return isCurrentRanks ? ranks : newRanks;
1085
+ }
1086
+ /**
1087
+ * Rank paths by sum of PageRank scores.
1088
+ *
1089
+ * @param graph - Source graph
1090
+ * @param paths - Paths to rank
1091
+ * @param config - Configuration options
1092
+ * @returns Ranked paths (highest PageRank sum first)
1093
+ */
1094
+ function pagerank(graph, paths, config) {
1095
+ const { includeScores = true } = config ?? {};
1096
+ if (paths.length === 0) return {
1097
+ paths: [],
1098
+ method: "pagerank"
1099
+ };
1100
+ const ranks = computePageRank(graph);
1101
+ const scored = paths.map((path) => {
1102
+ let prSum = 0;
1103
+ for (const nodeId of path.nodes) prSum += ranks.get(nodeId) ?? 0;
1104
+ return {
1105
+ path,
1106
+ score: prSum
1107
+ };
1108
+ });
1109
+ const maxScore = Math.max(...scored.map((s) => s.score));
1110
+ if (maxScore === 0) return {
1111
+ paths: paths.map((path) => ({
1112
+ ...path,
1113
+ score: 0
1114
+ })),
1115
+ method: "pagerank"
1116
+ };
1117
+ return {
1118
+ paths: scored.map(({ path, score }) => ({
1119
+ ...path,
1120
+ score: includeScores ? score / maxScore : score / maxScore
1121
+ })).sort((a, b) => b.score - a.score),
1122
+ method: "pagerank"
1123
+ };
1124
+ }
1125
+ //#endregion
1126
+ //#region src/ranking/baselines/betweenness.ts
1127
+ /**
1128
+ * Compute betweenness centrality for all nodes using Brandes algorithm.
1129
+ *
1130
+ * @param graph - Source graph
1131
+ * @returns Map of node ID to betweenness value
1132
+ */
1133
+ function computeBetweenness(graph) {
1134
+ const nodes = Array.from(graph.nodeIds());
1135
+ const betweenness = /* @__PURE__ */ new Map();
1136
+ for (const nodeId of nodes) betweenness.set(nodeId, 0);
1137
+ for (const source of nodes) {
1138
+ const predecessors = /* @__PURE__ */ new Map();
1139
+ const distance = /* @__PURE__ */ new Map();
1140
+ const sigma = /* @__PURE__ */ new Map();
1141
+ const queue = [];
1142
+ for (const nodeId of nodes) {
1143
+ predecessors.set(nodeId, []);
1144
+ distance.set(nodeId, -1);
1145
+ sigma.set(nodeId, 0);
1146
+ }
1147
+ distance.set(source, 0);
1148
+ sigma.set(source, 1);
1149
+ queue.push(source);
1150
+ for (const v of queue) {
1151
+ const vDist = distance.get(v) ?? -1;
1152
+ const neighbours = graph.neighbours(v);
1153
+ for (const w of neighbours) {
1154
+ const wDist = distance.get(w) ?? -1;
1155
+ if (wDist < 0) {
1156
+ distance.set(w, vDist + 1);
1157
+ queue.push(w);
1158
+ }
1159
+ if (wDist === vDist + 1) {
1160
+ const wSigma = sigma.get(w) ?? 0;
1161
+ const vSigma = sigma.get(v) ?? 0;
1162
+ sigma.set(w, wSigma + vSigma);
1163
+ const wPred = predecessors.get(w) ?? [];
1164
+ wPred.push(v);
1165
+ predecessors.set(w, wPred);
1166
+ }
1167
+ }
1168
+ }
1169
+ const delta = /* @__PURE__ */ new Map();
1170
+ for (const nodeId of nodes) delta.set(nodeId, 0);
1171
+ const sorted = [...nodes].sort((a, b) => {
1172
+ const aD = distance.get(a) ?? -1;
1173
+ return (distance.get(b) ?? -1) - aD;
1174
+ });
1175
+ for (const w of sorted) {
1176
+ if (w === source) continue;
1177
+ const wDelta = delta.get(w) ?? 0;
1178
+ const wSigma = sigma.get(w) ?? 0;
1179
+ const wPred = predecessors.get(w) ?? [];
1180
+ for (const v of wPred) {
1181
+ const vSigma = sigma.get(v) ?? 0;
1182
+ const vDelta = delta.get(v) ?? 0;
1183
+ if (wSigma > 0) delta.set(v, vDelta + vSigma / wSigma * (1 + wDelta));
1184
+ }
1185
+ if (w !== source) {
1186
+ const current = betweenness.get(w) ?? 0;
1187
+ betweenness.set(w, current + wDelta);
1188
+ }
1189
+ }
1190
+ }
1191
+ return betweenness;
1192
+ }
1193
+ /**
1194
+ * Rank paths by sum of betweenness scores.
1195
+ *
1196
+ * @param graph - Source graph
1197
+ * @param paths - Paths to rank
1198
+ * @param config - Configuration options
1199
+ * @returns Ranked paths (highest betweenness sum first)
1200
+ */
1201
+ function betweenness(graph, paths, config) {
1202
+ const { includeScores = true } = config ?? {};
1203
+ if (paths.length === 0) return {
1204
+ paths: [],
1205
+ method: "betweenness"
1206
+ };
1207
+ const bcMap = computeBetweenness(graph);
1208
+ const scored = paths.map((path) => {
1209
+ let bcSum = 0;
1210
+ for (const nodeId of path.nodes) bcSum += bcMap.get(nodeId) ?? 0;
1211
+ return {
1212
+ path,
1213
+ score: bcSum
1214
+ };
1215
+ });
1216
+ const maxScore = Math.max(...scored.map((s) => s.score));
1217
+ if (maxScore === 0) return {
1218
+ paths: paths.map((path) => ({
1219
+ ...path,
1220
+ score: 0
1221
+ })),
1222
+ method: "betweenness"
1223
+ };
1224
+ return {
1225
+ paths: scored.map(({ path, score }) => ({
1226
+ ...path,
1227
+ score: includeScores ? score / maxScore : score / maxScore
1228
+ })).sort((a, b) => b.score - a.score),
1229
+ method: "betweenness"
1230
+ };
1231
+ }
1232
+ //#endregion
1233
+ //#region src/ranking/baselines/katz.ts
1234
+ /**
1235
+ * Compute truncated Katz centrality between two nodes.
1236
+ *
1237
+ * Uses iterative matrix-vector products to avoid full matrix powers.
1238
+ * score(s,t) = sum_{k=1}^{K} beta^k * walks_k(s,t)
1239
+ *
1240
+ * @param graph - Source graph
1241
+ * @param source - Source node ID
1242
+ * @param target - Target node ID
1243
+ * @param k - Truncation depth (default 5)
1244
+ * @param beta - Attenuation factor (default 0.005)
1245
+ * @returns Katz score
1246
+ */
1247
+ function computeKatz(graph, source, target, k = 5, beta = .005) {
1248
+ const nodes = Array.from(graph.nodeIds());
1249
+ const nodeToIdx = /* @__PURE__ */ new Map();
1250
+ nodes.forEach((nodeId, idx) => {
1251
+ nodeToIdx.set(nodeId, idx);
1252
+ });
1253
+ const n = nodes.length;
1254
+ if (n === 0) return 0;
1255
+ const sourceIdx = nodeToIdx.get(source);
1256
+ const targetIdx = nodeToIdx.get(target);
1257
+ if (sourceIdx === void 0 || targetIdx === void 0) return 0;
1258
+ let walks = new Float64Array(n);
1259
+ walks[targetIdx] = 1;
1260
+ let katzScore = 0;
1261
+ for (let depth = 1; depth <= k; depth++) {
1262
+ const walksNext = new Float64Array(n);
1263
+ for (const sourceNode of nodes) {
1264
+ const srcIdx = nodeToIdx.get(sourceNode);
1265
+ if (srcIdx === void 0) continue;
1266
+ const neighbours = graph.neighbours(sourceNode);
1267
+ for (const neighbourId of neighbours) {
1268
+ const nIdx = nodeToIdx.get(neighbourId);
1269
+ if (nIdx === void 0) continue;
1270
+ walksNext[srcIdx] = (walksNext[srcIdx] ?? 0) + (walks[nIdx] ?? 0);
1271
+ }
1272
+ }
1273
+ const walkCount = walksNext[sourceIdx] ?? 0;
1274
+ katzScore += Math.pow(beta, depth) * walkCount;
1275
+ walks = walksNext;
1276
+ }
1277
+ return katzScore;
1278
+ }
1279
+ /**
1280
+ * Rank paths by Katz centrality between endpoints.
1281
+ *
1282
+ * @param graph - Source graph
1283
+ * @param paths - Paths to rank
1284
+ * @param config - Configuration options
1285
+ * @returns Ranked paths (highest Katz score first)
1286
+ */
1287
+ function katz(graph, paths, config) {
1288
+ const { includeScores = true } = config ?? {};
1289
+ if (paths.length === 0) return {
1290
+ paths: [],
1291
+ method: "katz"
1292
+ };
1293
+ const scored = paths.map((path) => {
1294
+ const source = path.nodes[0];
1295
+ const target = path.nodes[path.nodes.length - 1];
1296
+ if (source === void 0 || target === void 0) return {
1297
+ path,
1298
+ score: 0
1299
+ };
1300
+ return {
1301
+ path,
1302
+ score: computeKatz(graph, source, target)
1303
+ };
1304
+ });
1305
+ const maxScore = Math.max(...scored.map((s) => s.score));
1306
+ if (maxScore === 0) return {
1307
+ paths: paths.map((path) => ({
1308
+ ...path,
1309
+ score: 0
1310
+ })),
1311
+ method: "katz"
1312
+ };
1313
+ return {
1314
+ paths: scored.map(({ path, score }) => ({
1315
+ ...path,
1316
+ score: includeScores ? score / maxScore : score / maxScore
1317
+ })).sort((a, b) => b.score - a.score),
1318
+ method: "katz"
1319
+ };
1320
+ }
1321
+ //#endregion
1322
+ //#region src/ranking/baselines/communicability.ts
1323
+ /**
1324
+ * Compute truncated communicability between two nodes.
1325
+ *
1326
+ * Uses Taylor series expansion: (e^A)_{s,t} ≈ sum_{k=0}^{K} A^k_{s,t} / k!
1327
+ *
1328
+ * @param graph - Source graph
1329
+ * @param source - Source node ID
1330
+ * @param target - Target node ID
1331
+ * @param k - Truncation depth (default 15)
1332
+ * @returns Communicability score
1333
+ */
1334
+ function computeCommunicability(graph, source, target, k = 15) {
1335
+ const nodes = Array.from(graph.nodeIds());
1336
+ const nodeToIdx = /* @__PURE__ */ new Map();
1337
+ nodes.forEach((nodeId, idx) => {
1338
+ nodeToIdx.set(nodeId, idx);
1339
+ });
1340
+ const n = nodes.length;
1341
+ if (n === 0) return 0;
1342
+ const sourceIdx = nodeToIdx.get(source);
1343
+ const targetIdx = nodeToIdx.get(target);
1344
+ if (sourceIdx === void 0 || targetIdx === void 0) return 0;
1345
+ let walks = new Float64Array(n);
1346
+ walks[targetIdx] = 1;
1347
+ let commScore = walks[sourceIdx] ?? 0;
1348
+ let factorial = 1;
1349
+ for (let depth = 1; depth <= k; depth++) {
1350
+ const walksNext = new Float64Array(n);
1351
+ for (const fromNode of nodes) {
1352
+ const fromIdx = nodeToIdx.get(fromNode);
1353
+ if (fromIdx === void 0) continue;
1354
+ const neighbours = graph.neighbours(fromNode);
1355
+ for (const toNodeId of neighbours) {
1356
+ const toIdx = nodeToIdx.get(toNodeId);
1357
+ if (toIdx === void 0) continue;
1358
+ walksNext[fromIdx] = (walksNext[fromIdx] ?? 0) + (walks[toIdx] ?? 0);
1359
+ }
1360
+ }
1361
+ factorial *= depth;
1362
+ commScore += (walksNext[sourceIdx] ?? 0) / factorial;
1363
+ walks = walksNext;
1364
+ }
1365
+ return commScore;
1366
+ }
1367
+ /**
1368
+ * Rank paths by communicability between endpoints.
1369
+ *
1370
+ * @param graph - Source graph
1371
+ * @param paths - Paths to rank
1372
+ * @param config - Configuration options
1373
+ * @returns Ranked paths (highest communicability first)
1374
+ */
1375
+ function communicability(graph, paths, config) {
1376
+ const { includeScores = true } = config ?? {};
1377
+ if (paths.length === 0) return {
1378
+ paths: [],
1379
+ method: "communicability"
1380
+ };
1381
+ const scored = paths.map((path) => {
1382
+ const source = path.nodes[0];
1383
+ const target = path.nodes[path.nodes.length - 1];
1384
+ if (source === void 0 || target === void 0) return {
1385
+ path,
1386
+ score: 0
1387
+ };
1388
+ return {
1389
+ path,
1390
+ score: computeCommunicability(graph, source, target)
1391
+ };
1392
+ });
1393
+ const maxScore = Math.max(...scored.map((s) => s.score));
1394
+ if (maxScore === 0) return {
1395
+ paths: paths.map((path) => ({
1396
+ ...path,
1397
+ score: 0
1398
+ })),
1399
+ method: "communicability"
1400
+ };
1401
+ return {
1402
+ paths: scored.map(({ path, score }) => ({
1403
+ ...path,
1404
+ score: includeScores ? score / maxScore : score / maxScore
1405
+ })).sort((a, b) => b.score - a.score),
1406
+ method: "communicability"
1407
+ };
1408
+ }
1409
+ //#endregion
1410
+ //#region src/ranking/baselines/resistance-distance.ts
1411
+ /**
1412
+ * Compute effective resistance between two nodes via Laplacian pseudoinverse.
1413
+ *
1414
+ * Resistance = L^+_{s,s} + L^+_{t,t} - 2*L^+_{s,t}
1415
+ * where L^+ is the pseudoinverse of the Laplacian matrix.
1416
+ *
1417
+ * @param graph - Source graph
1418
+ * @param source - Source node ID
1419
+ * @param target - Target node ID
1420
+ * @returns Effective resistance
1421
+ */
1422
+ function computeResistance(graph, source, target) {
1423
+ const nodes = Array.from(graph.nodeIds());
1424
+ const nodeToIdx = /* @__PURE__ */ new Map();
1425
+ nodes.forEach((nodeId, idx) => {
1426
+ nodeToIdx.set(nodeId, idx);
1427
+ });
1428
+ const n = nodes.length;
1429
+ if (n === 0 || n > 5e3) throw new Error(`Cannot compute resistance distance: graph too large (${String(n)} nodes). Maximum 5000.`);
1430
+ const sourceIdx = nodeToIdx.get(source);
1431
+ const targetIdx = nodeToIdx.get(target);
1432
+ if (sourceIdx === void 0 || targetIdx === void 0) return 0;
1433
+ const L = Array.from({ length: n }, () => Array.from({ length: n }, () => 0));
1434
+ for (let i = 0; i < n; i++) {
1435
+ const nodeId = nodes[i];
1436
+ if (nodeId === void 0) continue;
1437
+ const degree = graph.degree(nodeId);
1438
+ const row = L[i];
1439
+ if (row !== void 0) row[i] = degree;
1440
+ const neighbours = graph.neighbours(nodeId);
1441
+ for (const neighbourId of neighbours) {
1442
+ const j = nodeToIdx.get(neighbourId);
1443
+ if (j !== void 0 && row !== void 0) row[j] = -1;
1444
+ }
1445
+ }
1446
+ const Lpinv = pinv(L);
1447
+ const resistance = (Lpinv[sourceIdx]?.[sourceIdx] ?? 0) + (Lpinv[targetIdx]?.[targetIdx] ?? 0) - 2 * (Lpinv[sourceIdx]?.[targetIdx] ?? 0);
1448
+ return Math.max(resistance, 1e-10);
1449
+ }
1450
+ /**
1451
+ * Compute Moore-Penrose pseudoinverse of a matrix.
1452
+ * Simplified implementation for small dense matrices.
1453
+ *
1454
+ * @param A - Square matrix
1455
+ * @returns Pseudoinverse A^+
1456
+ */
1457
+ function pinv(A) {
1458
+ const n = A.length;
1459
+ if (n === 0) return [];
1460
+ const M = A.map((row) => [...row]);
1461
+ const epsilon = 1e-10;
1462
+ for (let i = 0; i < n; i++) {
1463
+ const row = M[i];
1464
+ if (row !== void 0) row[i] = (row[i] ?? 0) + epsilon;
1465
+ }
1466
+ return gaussianInverse(M);
1467
+ }
1468
+ /**
1469
+ * Compute matrix inverse using Gaussian elimination with partial pivoting.
1470
+ *
1471
+ * @param A - Matrix to invert
1472
+ * @returns Inverted matrix
1473
+ */
1474
+ function gaussianInverse(A) {
1475
+ const n = A.length;
1476
+ const aug = A.map((row, i) => {
1477
+ const identity = Array.from({ length: n }, (_, j) => i === j ? 1 : 0);
1478
+ return [...row, ...identity];
1479
+ });
1480
+ for (let col = 0; col < n; col++) {
1481
+ let maxRow = col;
1482
+ for (let row = col + 1; row < n; row++) {
1483
+ const currentRow = aug[row];
1484
+ const maxRowRef = aug[maxRow];
1485
+ if (currentRow !== void 0 && maxRowRef !== void 0 && Math.abs(currentRow[col] ?? 0) > Math.abs(maxRowRef[col] ?? 0)) maxRow = row;
1486
+ }
1487
+ const currentCol = aug[col];
1488
+ const maxRowAug = aug[maxRow];
1489
+ if (currentCol !== void 0 && maxRowAug !== void 0) {
1490
+ aug[col] = maxRowAug;
1491
+ aug[maxRow] = currentCol;
1492
+ }
1493
+ const pivotRow = aug[col];
1494
+ const pivot = pivotRow?.[col];
1495
+ if (pivot === void 0 || Math.abs(pivot) < 1e-12) continue;
1496
+ if (pivotRow !== void 0) for (let j = col; j < 2 * n; j++) pivotRow[j] = (pivotRow[j] ?? 0) / pivot;
1497
+ for (let row = 0; row < n; row++) {
1498
+ if (row === col) continue;
1499
+ const eliminationRow = aug[row];
1500
+ const factor = eliminationRow?.[col] ?? 0;
1501
+ if (eliminationRow !== void 0 && pivotRow !== void 0) for (let j = col; j < 2 * n; j++) eliminationRow[j] = (eliminationRow[j] ?? 0) - factor * (pivotRow[j] ?? 0);
1502
+ }
1503
+ }
1504
+ const Ainv = [];
1505
+ for (let i = 0; i < n; i++) Ainv[i] = (aug[i]?.slice(n) ?? []).map((v) => v);
1506
+ return Ainv;
1507
+ }
1508
+ /**
1509
+ * Rank paths by reciprocal of resistance distance between endpoints.
1510
+ *
1511
+ * @param graph - Source graph
1512
+ * @param paths - Paths to rank
1513
+ * @param config - Configuration options
1514
+ * @returns Ranked paths (highest conductance first)
1515
+ */
1516
+ function resistanceDistance(graph, paths, config) {
1517
+ const { includeScores = true } = config ?? {};
1518
+ if (paths.length === 0) return {
1519
+ paths: [],
1520
+ method: "resistance-distance"
1521
+ };
1522
+ const nodeCount = Array.from(graph.nodeIds()).length;
1523
+ if (nodeCount > 5e3) throw new Error(`Cannot rank paths: graph too large (${String(nodeCount)} nodes). Resistance distance requires O(n^3) computation; maximum 5000 nodes.`);
1524
+ const scored = paths.map((path) => {
1525
+ const source = path.nodes[0];
1526
+ const target = path.nodes[path.nodes.length - 1];
1527
+ if (source === void 0 || target === void 0) return {
1528
+ path,
1529
+ score: 0
1530
+ };
1531
+ return {
1532
+ path,
1533
+ score: 1 / computeResistance(graph, source, target)
1534
+ };
1535
+ });
1536
+ const maxScore = Math.max(...scored.map((s) => s.score));
1537
+ if (maxScore === 0) return {
1538
+ paths: paths.map((path) => ({
1539
+ ...path,
1540
+ score: 0
1541
+ })),
1542
+ method: "resistance-distance"
1543
+ };
1544
+ return {
1545
+ paths: scored.map(({ path, score }) => ({
1546
+ ...path,
1547
+ score: includeScores ? score / maxScore : score / maxScore
1548
+ })).sort((a, b) => b.score - a.score),
1549
+ method: "resistance-distance"
1550
+ };
1551
+ }
1552
+ //#endregion
1553
+ //#region src/ranking/baselines/random-ranking.ts
1554
+ /**
1555
+ * Deterministic seeded random number generator.
1556
+ * Uses FNV-1a-like hash for input → [0, 1] output.
1557
+ *
1558
+ * @param input - String to hash
1559
+ * @param seed - Random seed for reproducibility
1560
+ * @returns Deterministic random value in [0, 1]
1561
+ */
1562
+ function seededRandom(input, seed = 0) {
1563
+ let h = seed;
1564
+ for (let i = 0; i < input.length; i++) {
1565
+ h = Math.imul(h ^ input.charCodeAt(i), 2654435769);
1566
+ h ^= h >>> 16;
1567
+ }
1568
+ return (h >>> 0) / 4294967295;
1569
+ }
1570
+ /**
1571
+ * Rank paths randomly (null hypothesis baseline).
1572
+ *
1573
+ * @param _graph - Source graph (unused)
1574
+ * @param paths - Paths to rank
1575
+ * @param config - Configuration options
1576
+ * @returns Ranked paths (randomly ordered)
1577
+ */
1578
+ function randomRanking(_graph, paths, config) {
1579
+ const { includeScores = true, seed = 0 } = config ?? {};
1580
+ if (paths.length === 0) return {
1581
+ paths: [],
1582
+ method: "random"
1583
+ };
1584
+ const scored = paths.map((path) => {
1585
+ return {
1586
+ path,
1587
+ score: seededRandom(path.nodes.join(","), seed)
1588
+ };
1589
+ });
1590
+ const maxScore = Math.max(...scored.map((s) => s.score));
1591
+ if (maxScore === 0) return {
1592
+ paths: paths.map((path) => ({
1593
+ ...path,
1594
+ score: 0
1595
+ })),
1596
+ method: "random"
1597
+ };
1598
+ return {
1599
+ paths: scored.map(({ path, score }) => ({
1600
+ ...path,
1601
+ score: includeScores ? score / maxScore : score / maxScore
1602
+ })).sort((a, b) => b.score - a.score),
1603
+ method: "random"
1604
+ };
1605
+ }
1606
+ //#endregion
815
1607
  //#region src/extraction/ego-network.ts
816
1608
  /**
817
1609
  * Extract the ego-network (k-hop neighbourhood) of a centre node.
@@ -1022,6 +1814,82 @@ function extractKTruss(graph, k) {
1022
1814
  }
1023
1815
  return result;
1024
1816
  }
1817
+ /**
1818
+ * Compute the truss number for each edge.
1819
+ *
1820
+ * The truss number of an edge is the largest k such that the edge
1821
+ * belongs to the k-truss.
1822
+ *
1823
+ * @param graph - The source graph
1824
+ * @returns Map from edge key (canonical "u::v") to truss number
1825
+ *
1826
+ * @example
1827
+ * ```typescript
1828
+ * const trussNumbers = computeTrussNumbers(graph);
1829
+ * const edgeKey = 'A::B'; // where A < B lexicographically
1830
+ * console.log(`Edge A-B is in the ${trussNumbers.get(edgeKey)}-truss`);
1831
+ * ```
1832
+ */
1833
+ function computeTrussNumbers(graph) {
1834
+ const adjacency = /* @__PURE__ */ new Map();
1835
+ const edgeData = /* @__PURE__ */ new Map();
1836
+ const remainingEdges = /* @__PURE__ */ new Set();
1837
+ for (const nodeId of graph.nodeIds()) adjacency.set(nodeId, /* @__PURE__ */ new Set());
1838
+ for (const edge of graph.edges()) {
1839
+ const { source, target } = edge;
1840
+ adjacency.get(source)?.add(target);
1841
+ adjacency.get(target)?.add(source);
1842
+ const key = source < target ? `${source}::${target}` : `${target}::${source}`;
1843
+ edgeData.set(key, edge);
1844
+ remainingEdges.add(key);
1845
+ }
1846
+ const triangleCounts = /* @__PURE__ */ new Map();
1847
+ for (const key of remainingEdges) {
1848
+ const edge = edgeData.get(key);
1849
+ if (edge !== void 0) triangleCounts.set(key, countEdgeTriangles(graph, edge.source, edge.target));
1850
+ }
1851
+ const trussNumbers = /* @__PURE__ */ new Map();
1852
+ const edgesByTriangleCount = /* @__PURE__ */ new Map();
1853
+ for (const [key, count] of triangleCounts) {
1854
+ if (!edgesByTriangleCount.has(count)) edgesByTriangleCount.set(count, /* @__PURE__ */ new Set());
1855
+ edgesByTriangleCount.get(count)?.add(key);
1856
+ }
1857
+ const sortedCounts = [...edgesByTriangleCount.keys()].sort((a, b) => a - b);
1858
+ for (const currentCount of sortedCounts) {
1859
+ const bucket = edgesByTriangleCount.get(currentCount);
1860
+ if (bucket === void 0) continue;
1861
+ while (bucket.size > 0) {
1862
+ const edgeKey = bucket.values().next().value;
1863
+ if (edgeKey === void 0) break;
1864
+ bucket.delete(edgeKey);
1865
+ if (!remainingEdges.has(edgeKey)) continue;
1866
+ const trussNumber = currentCount + 2;
1867
+ trussNumbers.set(edgeKey, trussNumber);
1868
+ remainingEdges.delete(edgeKey);
1869
+ const edge = edgeData.get(edgeKey);
1870
+ if (edge === void 0) continue;
1871
+ const { source, target } = edge;
1872
+ adjacency.get(source)?.delete(target);
1873
+ adjacency.get(target)?.delete(source);
1874
+ const sourceNeighbours = adjacency.get(source);
1875
+ if (sourceNeighbours !== void 0) {
1876
+ for (const w of adjacency.get(target) ?? []) if (sourceNeighbours.has(w)) {
1877
+ const keySw = source < w ? `${source}::${w}` : `${w}::${source}`;
1878
+ const keyTw = target < w ? `${target}::${w}` : `${w}::${target}`;
1879
+ for (const keyToUpdate of [keySw, keyTw]) if (remainingEdges.has(keyToUpdate)) {
1880
+ const oldCount = triangleCounts.get(keyToUpdate) ?? 0;
1881
+ const newCount = oldCount - 1;
1882
+ triangleCounts.set(keyToUpdate, newCount);
1883
+ edgesByTriangleCount.get(oldCount)?.delete(keyToUpdate);
1884
+ if (!edgesByTriangleCount.has(newCount)) edgesByTriangleCount.set(newCount, /* @__PURE__ */ new Set());
1885
+ edgesByTriangleCount.get(newCount)?.add(keyToUpdate);
1886
+ }
1887
+ }
1888
+ }
1889
+ }
1890
+ }
1891
+ return trussNumbers;
1892
+ }
1025
1893
  //#endregion
1026
1894
  //#region src/extraction/motif.ts
1027
1895
  /**
@@ -1132,10 +2000,11 @@ function enumerate3NodeMotifs(graph, includeInstances) {
1132
2000
  }
1133
2001
  }
1134
2002
  }
1135
- return {
2003
+ if (instances !== void 0) return {
1136
2004
  counts,
1137
2005
  instances
1138
2006
  };
2007
+ return { counts };
1139
2008
  }
1140
2009
  /**
1141
2010
  * Enumerate all 4-node motifs in the graph.
@@ -1203,10 +2072,11 @@ function enumerate4NodeMotifs(graph, includeInstances) {
1203
2072
  }
1204
2073
  }
1205
2074
  }
1206
- return {
2075
+ if (instances !== void 0) return {
1207
2076
  counts,
1208
2077
  instances
1209
2078
  };
2079
+ return { counts };
1210
2080
  }
1211
2081
  /**
1212
2082
  * Human-readable names for common 3-node motifs.
@@ -1351,6 +2221,6 @@ function filterSubgraph(graph, options) {
1351
2221
  return result;
1352
2222
  }
1353
2223
  //#endregion
1354
- export { AdjacencyMapGraph, GPUContext, PriorityQueue, adamicAdar, adaptive, approximateClusteringCoefficient, base, batchClusteringCoefficients, bfs, bfsWithPath, csrToGPUBuffers, detectWebGPU, dfs, dfsWithPath, dome, edge, entropyFromCounts, enumerateMotifs, enumerateMotifsWithInstances, etch, extractEgoNetwork, extractInducedSubgraph, extractKCore, extractKTruss, filterSubgraph, getMotifName, graphToCSR, grasp, hae, jaccard, localClusteringCoefficient, localTypeEntropy, maze, miniBatchKMeans, normaliseFeatures, normaliseFeatures as zScoreNormalise, normalisedEntropy, notch, parse, pipe, reach, sage, scale, shannonEntropy, shortest, skew, span, stratified };
2224
+ export { AdjacencyMapGraph, GPUContext, GPUNotAvailableError, PriorityQueue, _computeMean, adamicAdar, adaptive, approximateClusteringCoefficient, assertWebGPUAvailable, base, batchClusteringCoefficients, betweenness, bfs, bfsWithPath, communicability, computeTrussNumbers, createGPUContext, createResultBuffer, csrToGPUBuffers, degreeSum, detectWebGPU, dfs, dfsWithPath, dome, domeHighDegree, edge, entropyFromCounts, enumerateMotifs, enumerateMotifsWithInstances, etch, extractEgoNetwork, extractInducedSubgraph, extractKCore, extractKTruss, filterSubgraph, frontierBalanced, getGPUContext, getMotifName, graphToCSR, grasp, hae, isWebGPUAvailable, jaccard, jaccardArithmetic, katz, localClusteringCoefficient, localTypeEntropy, maze, miniBatchKMeans, normaliseFeatures, normaliseFeatures as zScoreNormalise, normalisedEntropy, notch, pagerank, parse, pipe, randomPriority, randomRanking, reach, readBufferToCPU, resistanceDistance, sage, scale, shannonEntropy, shortest, skew, span, standardBfs, stratified, widestPath };
1355
2225
 
1356
2226
  //# sourceMappingURL=index.js.map