graphwise 1.2.0 → 1.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1,6 +1,6 @@
1
1
  # graphwise
2
2
 
3
- Low-dependency TypeScript graph algorithms for citation network analysis.
3
+ Low-dependency TypeScript graph algorithms for citation network analysis: novel expansion, MI variants, and path ranking.
4
4
 
5
5
  ## Features
6
6
 
@@ -20,7 +20,7 @@ pnpm add graphwise
20
20
  ## Usage
21
21
 
22
22
  ```typescript
23
- import { AdjacencyMapGraph, dome, parse, jaccard } from 'graphwise';
23
+ import { AdjacencyMapGraph, dome, parse, jaccard } from "graphwise";
24
24
 
25
25
  const graph = AdjacencyMapGraph.undirected();
26
26
  // add nodes and edges...
@@ -29,6 +29,153 @@ const result = dome(graph, seeds);
29
29
  const ranked = parse(graph, result.paths, { mi: jaccard });
30
30
  ```
31
31
 
32
+ ## Algorithms
33
+
34
+ ### Expansion: BASE Framework
35
+
36
+ **Boundary-free Adaptive Seeded Expansion** (BASE) is a parameter-free graph expansion algorithm. Given a graph $G = (V, E)$ and seed nodes $S \subseteq V$, BASE produces the subgraph induced by all vertices visited during priority-ordered expansion until frontier exhaustion:
37
+
38
+ $$G_S = (V_S, E_S) \quad \text{where} \quad V_S = \bigcup_{v \in S} \text{Expand}(v, \pi)$$
39
+
40
+ Three key properties:
41
+
42
+ 1. **Priority-ordered exploration**: the vertex with globally minimum priority $\pi(v)$ is expanded next across all frontiers
43
+ 2. **Frontier collision detection**: when a vertex is reached by multiple frontiers, the connecting path is recorded
44
+ 3. **Implicit termination**: halts when all frontier queues are empty; no depth bound or size threshold
45
+
46
+ #### DOME: Degree-Ordered Multi-seed Expansion
47
+
48
+ The default priority function uses degree-based hub deferral:
49
+
50
+ $$\pi(v) = \frac{\deg^{+}(v) + \deg^{-}(v)}{w_V(v) + \varepsilon}$$
51
+
52
+ where $\deg^{+}(v)$ is weighted out-degree, $\deg^{-}(v)$ is weighted in-degree, $w_V(v)$ is node weight, and $\varepsilon > 0$ prevents division by zero.
53
+
54
+ #### Expansion Variants
55
+
56
+ | Algorithm | Priority Function | Phases |
57
+ | --------- | ------------------------------------------------------ | ------ |
58
+ | **DOME** | Degree (hub deferral) | 1 |
59
+ | **EDGE** | Local neighbourhood type entropy | 1 |
60
+ | **HAE** | User-supplied type entropy (generalises EDGE) | 1 |
61
+ | **PIPE** | Path potential (neighbours visited by other frontiers) | 1 |
62
+ | **SAGE** | Salience accumulation from discovered paths | 2 |
63
+ | **REACH** | Rolling MI estimates of discovered path quality | 2 |
64
+ | **MAZE** | PIPE + SAGE + adaptive termination | 3 |
65
+
66
+ ### Path Ranking: PARSE
67
+
68
+ **Path Aggregation Ranked by Salience Estimation** (PARSE) scores paths by the geometric mean of per-edge mutual information, eliminating length bias:
69
+
70
+ $$M(P) = \exp\left( \frac{1}{k} \sum_{i=1}^{k} \log I(u_i, v_i) \right)$$
71
+
72
+ where $k$ is path length (number of edges) and $I(u_i, v_i)$ is the per-edge MI score from any variant below. The geometric mean ensures a 10-hop path with consistently high-MI edges scores equally to a 2-hop path with the same average MI.
73
+
74
+ ---
75
+
76
+ ### MI Variants
77
+
78
+ Seven MI variants serve as per-edge estimators within PARSE. All build on Jaccard neighbourhood overlap, then weight by domain-specific structural properties.
79
+
80
+ ---
81
+
82
+ #### Jaccard (baseline)
83
+
84
+ $$I_{\text{Jac}}(u, v) = \frac{|N(u) \cap N(v)|}{|N(u) \cup N(v)|}$$
85
+
86
+ Standard neighbourhood overlap. Default MI estimator.
87
+
88
+ ---
89
+
90
+ #### Adamic-Adar
91
+
92
+ $$I_{\text{AA}}(u, v) = \sum_{w \in N(u) \cap N(v)} \frac{1}{\log(\deg(w) + 1)}$$
93
+
94
+ Downweights common neighbours with high degree. Shared hub neighbours are less informative than shared rare neighbours.
95
+
96
+ ---
97
+
98
+ #### SCALE: Structural Correction via Adjusted Local Estimation
99
+
100
+ $$I_{\text{SCALE}}(u, v) = \frac{J(N(u), N(v))}{\rho(G)}$$
101
+
102
+ where $\rho(G) = \frac{2|E|}{|V|(|V|-1)}$ is graph density. Normalises Jaccard by density so that overlap in dense subgraphs is not artificially inflated.
103
+
104
+ ---
105
+
106
+ #### SKEW: Sparse-weighted Knowledge Emphasis Weighting
107
+
108
+ $$I_{\text{SKEW}}(u, v) = J(N(u), N(v)) \cdot \log\!\left(\frac{N}{\deg(u) + 1}\right) \cdot \log\!\left(\frac{N}{\deg(v) + 1}\right)$$
109
+
110
+ where $N = |V|$. IDF-style rarity weighting on both endpoints. Paths through low-degree (rare) nodes score higher; paths through hubs score lower.
111
+
112
+ ---
113
+
114
+ #### SPAN: Spanning-community Penalty for Adjacent Nodes
115
+
116
+ $$I_{\text{SPAN}}(u, v) = J(N(u), N(v)) \cdot \bigl(1 - \max(C(u), C(v))\bigr)$$
117
+
118
+ where $C(v)$ is the local clustering coefficient. Penalises edges within tight clusters; rewards edges bridging communities (structural holes).
119
+
120
+ ---
121
+
122
+ #### ETCH: Edge Type Contrast Heuristic
123
+
124
+ $$I_{\text{ETCH}}(u, v) = J(N(u), N(v)) \cdot \log\!\left(\frac{|E|}{\text{count}(\text{edges with type}(u,v))}\right)$$
125
+
126
+ Weights Jaccard by edge-type rarity. Paths traversing rare edge types receive higher scores. Requires edge-type annotations; falls back to Jaccard when unavailable.
127
+
128
+ ---
129
+
130
+ #### NOTCH: Node Type Contrast Heuristic
131
+
132
+ $$I_{\text{NOTCH}}(u, v) = J(N(u), N(v)) \cdot \log\!\left(\frac{|V|}{c(\tau_u)}\right) \cdot \log\!\left(\frac{|V|}{c(\tau_v)}\right)$$
133
+
134
+ where $c(\tau_u)$ is the count of nodes with the same type as $u$. Weights Jaccard by node-type rarity for both endpoints.
135
+
136
+ ---
137
+
138
+ ### Ranking Baselines
139
+
140
+ | Measure | Formula |
141
+ | --------------------------- | --------------------------------------------------- |
142
+ | **Katz Index** | $\sum_{k=1}^{\infty} \beta^k (A^k)_{st}$ |
143
+ | **Communicability** | $(e^A)_{st}$ |
144
+ | **Resistance Distance** | $L^{+}_{ss} + L^{+}_{tt} - 2L^{+}_{st}$ |
145
+ | **Jaccard Arithmetic Mean** | $\frac{1}{k} \sum J(N(u), N(v))$ |
146
+ | **Degree-Sum** | $\sum_{v \in P} \deg(v)$ |
147
+ | **Widest Path** | $\min_{(u,v) \in P} w(u,v)$ |
148
+ | **PageRank** | Stationary distribution of random walk with damping |
149
+ | **Betweenness** | Fraction of shortest paths through node |
150
+ | **Random** | Uniform random score (null baseline) |
151
+
152
+ ### Seed Selection: GRASP
153
+
154
+ **Graph-agnostic Representative seed pAir Sampling**: selects structurally representative seed pairs from an unknown graph using reservoir sampling and structural feature clustering. Operates blind: no full graph loading, no ground-truth labels, no human-defined strata.
155
+
156
+ Three phases:
157
+
158
+ 1. **Reservoir sampling**: stream graph edges; maintain a reservoir of $N$ nodes
159
+ 2. **Structural features**: for each sampled node compute $\log(\deg + 1)$, clustering coefficient, approximate PageRank
160
+ 3. **Cluster and sample**: MiniBatchKMeans into $K$ groups; sample within-cluster and cross-cluster pairs
161
+
162
+ ## Module Exports
163
+
164
+ ```typescript
165
+ import { ... } from 'graphwise'; // Everything
166
+ import { ... } from 'graphwise/graph'; // Graph data structures
167
+ import { ... } from 'graphwise/expansion'; // Expansion algorithms
168
+ import { ... } from 'graphwise/ranking'; // PARSE + baselines
169
+ import { ... } from 'graphwise/ranking/mi'; // MI variants
170
+ import { ... } from 'graphwise/seeds'; // Seed selection
171
+ import { ... } from 'graphwise/traversal'; // Graph traversal
172
+ import { ... } from 'graphwise/structures'; // Data structures
173
+ import { ... } from 'graphwise/extraction'; // Subgraph extraction
174
+ import { ... } from 'graphwise/utils'; // Utilities
175
+ import { ... } from 'graphwise/gpu'; // WebGPU acceleration
176
+ import { ... } from 'graphwise/schemas'; // Zod schemas
177
+ ```
178
+
32
179
  ## Commands
33
180
 
34
181
  ```bash
@@ -3,8 +3,8 @@ const require_graph = require("../graph/index.cjs");
3
3
  const require_traversal = require("../traversal/index.cjs");
4
4
  const require_structures = require("../structures/index.cjs");
5
5
  const require_kmeans = require("../kmeans-BIgSyGKu.cjs");
6
- const require_seeds = require("../seeds/index.cjs");
7
6
  const require_utils = require("../utils/index.cjs");
7
+ const require_seeds = require("../seeds/index.cjs");
8
8
  const require_gpu = require("../gpu/index.cjs");
9
9
  //#region src/expansion/base.ts
10
10
  /**
@@ -299,15 +299,9 @@ function edge(graph, seeds, config) {
299
299
  */
300
300
  function jaccard(graph, source, target, config) {
301
301
  const { epsilon = 1e-10 } = config ?? {};
302
- const sourceNeighbours = new Set(graph.neighbours(source));
303
- const targetNeighbours = new Set(graph.neighbours(target));
304
- sourceNeighbours.delete(target);
305
- targetNeighbours.delete(source);
306
- let intersectionSize = 0;
307
- for (const neighbour of sourceNeighbours) if (targetNeighbours.has(neighbour)) intersectionSize++;
308
- const unionSize = sourceNeighbours.size + targetNeighbours.size - intersectionSize;
309
- if (unionSize === 0) return 0;
310
- const score = intersectionSize / unionSize;
302
+ const { intersection, union } = require_utils.neighbourOverlap(require_utils.neighbourSet(graph, source, target), require_utils.neighbourSet(graph, target, source));
303
+ if (union === 0) return 0;
304
+ const score = intersection / union;
311
305
  return Math.max(epsilon, score);
312
306
  }
313
307
  //#endregion
@@ -687,19 +681,14 @@ function computePathSalience(graph, path, mi, epsilon) {
687
681
  */
688
682
  function adamicAdar(graph, source, target, config) {
689
683
  const { epsilon = 1e-10, normalise = true } = config ?? {};
690
- const sourceNeighbours = new Set(graph.neighbours(source));
691
- const targetNeighbours = new Set(graph.neighbours(target));
692
- sourceNeighbours.delete(target);
693
- targetNeighbours.delete(source);
684
+ const commonNeighbours = require_utils.neighbourIntersection(require_utils.neighbourSet(graph, source, target), require_utils.neighbourSet(graph, target, source));
694
685
  let score = 0;
695
- for (const neighbour of sourceNeighbours) if (targetNeighbours.has(neighbour)) {
686
+ for (const neighbour of commonNeighbours) {
696
687
  const degree = graph.degree(neighbour);
697
- if (degree > 1) score += 1 / Math.log(degree);
688
+ score += 1 / Math.log(degree + 1);
698
689
  }
699
- if (normalise) {
700
- const commonCount = sourceNeighbours.size < targetNeighbours.size ? sourceNeighbours.size : targetNeighbours.size;
701
- if (commonCount === 0) return 0;
702
- const maxScore = commonCount / Math.log(2);
690
+ if (normalise && commonNeighbours.size > 0) {
691
+ const maxScore = commonNeighbours.size / Math.log(2);
703
692
  score = score / maxScore;
704
693
  }
705
694
  return Math.max(epsilon, score);
@@ -711,22 +700,15 @@ function adamicAdar(graph, source, target, config) {
711
700
  */
712
701
  function scale(graph, source, target, config) {
713
702
  const { epsilon = 1e-10 } = config ?? {};
714
- const sourceNeighbours = new Set(graph.neighbours(source));
715
- const targetNeighbours = new Set(graph.neighbours(target));
716
- sourceNeighbours.delete(target);
717
- targetNeighbours.delete(source);
718
- const sourceDegree = sourceNeighbours.size;
719
- const targetDegree = targetNeighbours.size;
720
- let intersectionSize = 0;
721
- for (const neighbour of sourceNeighbours) if (targetNeighbours.has(neighbour)) intersectionSize++;
722
- const unionSize = sourceDegree + targetDegree - intersectionSize;
723
- const jaccard = unionSize > 0 ? intersectionSize / unionSize : 0;
724
- const minDegree = Math.min(sourceDegree, targetDegree);
725
- const maxDegree = Math.max(sourceDegree, targetDegree);
726
- const degreeRatio = maxDegree > 0 ? minDegree / maxDegree : 0;
727
- if (jaccard + degreeRatio === 0) return epsilon;
728
- const score = 2 * jaccard * degreeRatio / (jaccard + degreeRatio);
729
- return Math.max(epsilon, Math.min(1, score));
703
+ const { intersection, union } = require_utils.neighbourOverlap(require_utils.neighbourSet(graph, source, target), require_utils.neighbourSet(graph, target, source));
704
+ const jaccard = union > 0 ? intersection / union : 0;
705
+ const n = graph.nodeCount;
706
+ const m = graph.edgeCount;
707
+ const densityNormaliser = graph.directed ? n * (n - 1) : 2 * n * (n - 1);
708
+ const density = densityNormaliser > 0 ? m / densityNormaliser : 0;
709
+ if (density === 0) return epsilon;
710
+ const score = jaccard / density;
711
+ return Math.max(epsilon, score);
730
712
  }
731
713
  //#endregion
732
714
  //#region src/ranking/mi/skew.ts
@@ -735,23 +717,15 @@ function scale(graph, source, target, config) {
735
717
  */
736
718
  function skew(graph, source, target, config) {
737
719
  const { epsilon = 1e-10 } = config ?? {};
738
- const sourceNeighbours = new Set(graph.neighbours(source));
739
- const targetNeighbours = new Set(graph.neighbours(target));
740
- sourceNeighbours.delete(target);
741
- targetNeighbours.delete(source);
742
- let weightedIntersection = 0;
743
- let commonCount = 0;
744
- for (const neighbour of sourceNeighbours) if (targetNeighbours.has(neighbour)) {
745
- commonCount++;
746
- const degree = graph.degree(neighbour);
747
- if (degree > 1) weightedIntersection += 1 / Math.log(degree);
748
- }
749
- if (commonCount === 0) return epsilon;
750
- const sourceDegree = sourceNeighbours.size;
751
- const targetDegree = targetNeighbours.size;
752
- const maxScore = Math.min(sourceDegree, targetDegree) / Math.log(2);
753
- const score = weightedIntersection / maxScore;
754
- return Math.max(epsilon, Math.min(1, score));
720
+ const { intersection, union } = require_utils.neighbourOverlap(require_utils.neighbourSet(graph, source, target), require_utils.neighbourSet(graph, target, source));
721
+ const jaccard = union > 0 ? intersection / union : 0;
722
+ const N = graph.nodeCount;
723
+ const sourceDegree = graph.degree(source);
724
+ const targetDegree = graph.degree(target);
725
+ const sourceIdf = Math.log(N / (sourceDegree + 1));
726
+ const targetIdf = Math.log(N / (targetDegree + 1));
727
+ const score = jaccard * sourceIdf * targetIdf;
728
+ return Math.max(epsilon, score);
755
729
  }
756
730
  //#endregion
757
731
  //#region src/ranking/mi/span.ts
@@ -760,21 +734,12 @@ function skew(graph, source, target, config) {
760
734
  */
761
735
  function span(graph, source, target, config) {
762
736
  const { epsilon = 1e-10 } = config ?? {};
763
- const sourceNeighbours = new Set(graph.neighbours(source));
764
- const targetNeighbours = new Set(graph.neighbours(target));
765
- sourceNeighbours.delete(target);
766
- targetNeighbours.delete(source);
767
- const sourceDegree = sourceNeighbours.size;
768
- const targetDegree = targetNeighbours.size;
769
- let intersectionSize = 0;
770
- for (const neighbour of sourceNeighbours) if (targetNeighbours.has(neighbour)) intersectionSize++;
771
- const unionSize = sourceDegree + targetDegree - intersectionSize;
772
- const jaccard = unionSize > 0 ? intersectionSize / unionSize : 0;
773
- const maxDegree = Math.max(sourceDegree, targetDegree);
774
- const degreeDiff = Math.abs(sourceDegree - targetDegree);
775
- const degreeSimilarity = maxDegree > 0 ? 1 - degreeDiff / maxDegree : 1;
776
- const score = Math.sqrt(jaccard * degreeSimilarity);
777
- return Math.max(epsilon, Math.min(1, score));
737
+ const { intersection, union } = require_utils.neighbourOverlap(require_utils.neighbourSet(graph, source, target), require_utils.neighbourSet(graph, target, source));
738
+ const jaccard = union > 0 ? intersection / union : 0;
739
+ const sourceCc = require_utils.localClusteringCoefficient(graph, source);
740
+ const targetCc = require_utils.localClusteringCoefficient(graph, target);
741
+ const score = jaccard * (1 - Math.max(sourceCc, targetCc));
742
+ return Math.max(epsilon, score);
778
743
  }
779
744
  //#endregion
780
745
  //#region src/ranking/mi/etch.ts
@@ -783,30 +748,14 @@ function span(graph, source, target, config) {
783
748
  */
784
749
  function etch(graph, source, target, config) {
785
750
  const { epsilon = 1e-10 } = config ?? {};
786
- const sourceNeighbours = new Set(graph.neighbours(source));
787
- const targetNeighbours = new Set(graph.neighbours(target));
788
- sourceNeighbours.delete(target);
789
- targetNeighbours.delete(source);
790
- const commonNeighbours = [];
791
- for (const neighbour of sourceNeighbours) if (targetNeighbours.has(neighbour)) commonNeighbours.push(neighbour);
792
- if (commonNeighbours.length < 2) return epsilon;
793
- let jointEdges = 0;
794
- for (let i = 0; i < commonNeighbours.length; i++) for (let j = i + 1; j < commonNeighbours.length; j++) {
795
- const ni = commonNeighbours[i];
796
- const nj = commonNeighbours[j];
797
- if (ni !== void 0 && nj !== void 0 && graph.getEdge(ni, nj) !== void 0) jointEdges++;
798
- }
799
- const maxJointEdges = commonNeighbours.length * (commonNeighbours.length - 1) / 2;
800
- const jointDensity = maxJointEdges > 0 ? jointEdges / maxJointEdges : 0;
801
- let commonEdges = 0;
802
- for (const cn of commonNeighbours) {
803
- if (graph.getEdge(source, cn) !== void 0) commonEdges++;
804
- if (graph.getEdge(target, cn) !== void 0) commonEdges++;
805
- }
806
- const maxCommonEdges = commonNeighbours.length * 2;
807
- const commonDensity = maxCommonEdges > 0 ? commonEdges / maxCommonEdges : 0;
808
- const score = jointDensity * .7 + commonDensity * .3;
809
- return Math.max(epsilon, Math.min(1, score));
751
+ const { intersection, union } = require_utils.neighbourOverlap(require_utils.neighbourSet(graph, source, target), require_utils.neighbourSet(graph, target, source));
752
+ const jaccard = union > 0 ? intersection / union : 0;
753
+ const edge = graph.getEdge(source, target);
754
+ if (edge?.type === void 0) return Math.max(epsilon, jaccard);
755
+ const edgeTypeCount = require_utils.countEdgesOfType(graph, edge.type);
756
+ if (edgeTypeCount === 0) return Math.max(epsilon, jaccard);
757
+ const score = jaccard * Math.log(graph.edgeCount / edgeTypeCount);
758
+ return Math.max(epsilon, score);
810
759
  }
811
760
  //#endregion
812
761
  //#region src/ranking/mi/notch.ts
@@ -815,20 +764,18 @@ function etch(graph, source, target, config) {
815
764
  */
816
765
  function notch(graph, source, target, config) {
817
766
  const { epsilon = 1e-10 } = config ?? {};
818
- const sourceNeighbours = new Set(graph.neighbours(source));
819
- const targetNeighbours = new Set(graph.neighbours(target));
820
- sourceNeighbours.delete(target);
821
- targetNeighbours.delete(source);
822
- const sourceDegree = sourceNeighbours.size;
823
- const targetDegree = targetNeighbours.size;
824
- let intersectionSize = 0;
825
- for (const neighbour of sourceNeighbours) if (targetNeighbours.has(neighbour)) intersectionSize++;
826
- const minDegree = Math.min(sourceDegree, targetDegree);
827
- const overlap = minDegree > 0 ? intersectionSize / minDegree : 0;
828
- const maxDegree = Math.max(sourceDegree, targetDegree);
829
- const correlation = maxDegree > 0 ? 1 - Math.abs(sourceDegree - targetDegree) / maxDegree : 1;
830
- const score = overlap * .6 + correlation * .4;
831
- return Math.max(epsilon, Math.min(1, score));
767
+ const { intersection, union } = require_utils.neighbourOverlap(require_utils.neighbourSet(graph, source, target), require_utils.neighbourSet(graph, target, source));
768
+ const jaccard = union > 0 ? intersection / union : 0;
769
+ const sourceNode = graph.getNode(source);
770
+ const targetNode = graph.getNode(target);
771
+ if (sourceNode?.type === void 0 || targetNode?.type === void 0) return Math.max(epsilon, jaccard);
772
+ const sourceTypeCount = require_utils.countNodesOfType(graph, sourceNode.type);
773
+ const targetTypeCount = require_utils.countNodesOfType(graph, targetNode.type);
774
+ if (sourceTypeCount === 0 || targetTypeCount === 0) return Math.max(epsilon, jaccard);
775
+ const sourceRarity = Math.log(graph.nodeCount / sourceTypeCount);
776
+ const targetRarity = Math.log(graph.nodeCount / targetTypeCount);
777
+ const score = jaccard * sourceRarity * targetRarity;
778
+ return Math.max(epsilon, score);
832
779
  }
833
780
  //#endregion
834
781
  //#region src/ranking/mi/adaptive.ts
@@ -836,7 +783,7 @@ function notch(graph, source, target, config) {
836
783
  * Compute unified adaptive MI between two connected nodes.
837
784
  *
838
785
  * Combines structural, degree, and overlap signals with
839
- * adaptive weighting based on graph density.
786
+ * configurable weighting.
840
787
  *
841
788
  * @param graph - Source graph
842
789
  * @param source - Source node ID
@@ -851,17 +798,13 @@ function adaptive(graph, source, target, config) {
851
798
  epsilon,
852
799
  normalise: true
853
800
  });
854
- const sourceNeighbours = new Set(graph.neighbours(source));
855
- const targetNeighbours = new Set(graph.neighbours(target));
856
- sourceNeighbours.delete(target);
857
- targetNeighbours.delete(source);
858
- const sourceDegree = sourceNeighbours.size;
859
- const targetDegree = targetNeighbours.size;
801
+ const sourceNeighbours = require_utils.neighbourSet(graph, source, target);
802
+ const targetNeighbours = require_utils.neighbourSet(graph, target, source);
860
803
  let overlap;
861
- if (sourceDegree > 0 && targetDegree > 0) {
862
- let commonCount = 0;
863
- for (const n of sourceNeighbours) if (targetNeighbours.has(n)) commonCount++;
864
- overlap = commonCount / Math.min(sourceDegree, targetDegree);
804
+ if (sourceNeighbours.size > 0 && targetNeighbours.size > 0) {
805
+ const { intersection } = require_utils.neighbourOverlap(sourceNeighbours, targetNeighbours);
806
+ const minDegree = Math.min(sourceNeighbours.size, targetNeighbours.size);
807
+ overlap = minDegree > 0 ? intersection / minDegree : epsilon;
865
808
  } else overlap = epsilon;
866
809
  const totalWeight = structuralWeight + degreeWeight + overlapWeight;
867
810
  const score = (structuralWeight * structural + degreeWeight * degreeComponent + overlapWeight * overlap) / totalWeight;
@@ -2238,6 +2181,8 @@ exports.bfs = require_traversal.bfs;
2238
2181
  exports.bfsWithPath = require_traversal.bfsWithPath;
2239
2182
  exports.communicability = communicability;
2240
2183
  exports.computeTrussNumbers = computeTrussNumbers;
2184
+ exports.countEdgesOfType = require_utils.countEdgesOfType;
2185
+ exports.countNodesOfType = require_utils.countNodesOfType;
2241
2186
  exports.createGPUContext = require_gpu.createGPUContext;
2242
2187
  exports.createResultBuffer = require_gpu.createResultBuffer;
2243
2188
  exports.csrToGPUBuffers = require_gpu.csrToGPUBuffers;
@@ -2271,6 +2216,9 @@ exports.localClusteringCoefficient = require_utils.localClusteringCoefficient;
2271
2216
  exports.localTypeEntropy = require_utils.localTypeEntropy;
2272
2217
  exports.maze = maze;
2273
2218
  exports.miniBatchKMeans = require_kmeans.miniBatchKMeans;
2219
+ exports.neighbourIntersection = require_utils.neighbourIntersection;
2220
+ exports.neighbourOverlap = require_utils.neighbourOverlap;
2221
+ exports.neighbourSet = require_utils.neighbourSet;
2274
2222
  exports.normaliseFeatures = require_kmeans.normaliseFeatures;
2275
2223
  exports.zScoreNormalise = require_kmeans.normaliseFeatures;
2276
2224
  exports.normalisedEntropy = require_utils.normalisedEntropy;