scikit-network 0.33.3__cp313-cp313-macosx_10_13_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of scikit-network might be problematic. Click here for more details.
- scikit_network-0.33.3.dist-info/METADATA +122 -0
- scikit_network-0.33.3.dist-info/RECORD +228 -0
- scikit_network-0.33.3.dist-info/WHEEL +6 -0
- scikit_network-0.33.3.dist-info/licenses/AUTHORS.rst +43 -0
- scikit_network-0.33.3.dist-info/licenses/LICENSE +34 -0
- scikit_network-0.33.3.dist-info/top_level.txt +1 -0
- sknetwork/__init__.py +21 -0
- sknetwork/base.py +67 -0
- sknetwork/classification/__init__.py +8 -0
- sknetwork/classification/base.py +142 -0
- sknetwork/classification/base_rank.py +133 -0
- sknetwork/classification/diffusion.py +134 -0
- sknetwork/classification/knn.py +139 -0
- sknetwork/classification/metrics.py +205 -0
- sknetwork/classification/pagerank.py +66 -0
- sknetwork/classification/propagation.py +152 -0
- sknetwork/classification/tests/__init__.py +1 -0
- sknetwork/classification/tests/test_API.py +30 -0
- sknetwork/classification/tests/test_diffusion.py +77 -0
- sknetwork/classification/tests/test_knn.py +23 -0
- sknetwork/classification/tests/test_metrics.py +53 -0
- sknetwork/classification/tests/test_pagerank.py +20 -0
- sknetwork/classification/tests/test_propagation.py +24 -0
- sknetwork/classification/vote.cpp +27581 -0
- sknetwork/classification/vote.cpython-313-darwin.so +0 -0
- sknetwork/classification/vote.pyx +56 -0
- sknetwork/clustering/__init__.py +8 -0
- sknetwork/clustering/base.py +172 -0
- sknetwork/clustering/kcenters.py +253 -0
- sknetwork/clustering/leiden.py +242 -0
- sknetwork/clustering/leiden_core.cpp +31572 -0
- sknetwork/clustering/leiden_core.cpython-313-darwin.so +0 -0
- sknetwork/clustering/leiden_core.pyx +124 -0
- sknetwork/clustering/louvain.py +286 -0
- sknetwork/clustering/louvain_core.cpp +31217 -0
- sknetwork/clustering/louvain_core.cpython-313-darwin.so +0 -0
- sknetwork/clustering/louvain_core.pyx +124 -0
- sknetwork/clustering/metrics.py +91 -0
- sknetwork/clustering/postprocess.py +66 -0
- sknetwork/clustering/propagation_clustering.py +104 -0
- sknetwork/clustering/tests/__init__.py +1 -0
- sknetwork/clustering/tests/test_API.py +38 -0
- sknetwork/clustering/tests/test_kcenters.py +60 -0
- sknetwork/clustering/tests/test_leiden.py +34 -0
- sknetwork/clustering/tests/test_louvain.py +135 -0
- sknetwork/clustering/tests/test_metrics.py +50 -0
- sknetwork/clustering/tests/test_postprocess.py +39 -0
- sknetwork/data/__init__.py +6 -0
- sknetwork/data/base.py +33 -0
- sknetwork/data/load.py +406 -0
- sknetwork/data/models.py +459 -0
- sknetwork/data/parse.py +644 -0
- sknetwork/data/test_graphs.py +84 -0
- sknetwork/data/tests/__init__.py +1 -0
- sknetwork/data/tests/test_API.py +30 -0
- sknetwork/data/tests/test_base.py +14 -0
- sknetwork/data/tests/test_load.py +95 -0
- sknetwork/data/tests/test_models.py +52 -0
- sknetwork/data/tests/test_parse.py +250 -0
- sknetwork/data/tests/test_test_graphs.py +29 -0
- sknetwork/data/tests/test_toy_graphs.py +68 -0
- sknetwork/data/timeout.py +38 -0
- sknetwork/data/toy_graphs.py +611 -0
- sknetwork/embedding/__init__.py +8 -0
- sknetwork/embedding/base.py +94 -0
- sknetwork/embedding/force_atlas.py +198 -0
- sknetwork/embedding/louvain_embedding.py +148 -0
- sknetwork/embedding/random_projection.py +135 -0
- sknetwork/embedding/spectral.py +141 -0
- sknetwork/embedding/spring.py +198 -0
- sknetwork/embedding/svd.py +359 -0
- sknetwork/embedding/tests/__init__.py +1 -0
- sknetwork/embedding/tests/test_API.py +49 -0
- sknetwork/embedding/tests/test_force_atlas.py +35 -0
- sknetwork/embedding/tests/test_louvain_embedding.py +33 -0
- sknetwork/embedding/tests/test_random_projection.py +28 -0
- sknetwork/embedding/tests/test_spectral.py +81 -0
- sknetwork/embedding/tests/test_spring.py +50 -0
- sknetwork/embedding/tests/test_svd.py +43 -0
- sknetwork/gnn/__init__.py +10 -0
- sknetwork/gnn/activation.py +117 -0
- sknetwork/gnn/base.py +181 -0
- sknetwork/gnn/base_activation.py +90 -0
- sknetwork/gnn/base_layer.py +109 -0
- sknetwork/gnn/gnn_classifier.py +305 -0
- sknetwork/gnn/layer.py +153 -0
- sknetwork/gnn/loss.py +180 -0
- sknetwork/gnn/neighbor_sampler.py +65 -0
- sknetwork/gnn/optimizer.py +164 -0
- sknetwork/gnn/tests/__init__.py +1 -0
- sknetwork/gnn/tests/test_activation.py +56 -0
- sknetwork/gnn/tests/test_base.py +75 -0
- sknetwork/gnn/tests/test_base_layer.py +37 -0
- sknetwork/gnn/tests/test_gnn_classifier.py +130 -0
- sknetwork/gnn/tests/test_layers.py +80 -0
- sknetwork/gnn/tests/test_loss.py +33 -0
- sknetwork/gnn/tests/test_neigh_sampler.py +23 -0
- sknetwork/gnn/tests/test_optimizer.py +43 -0
- sknetwork/gnn/tests/test_utils.py +41 -0
- sknetwork/gnn/utils.py +127 -0
- sknetwork/hierarchy/__init__.py +6 -0
- sknetwork/hierarchy/base.py +96 -0
- sknetwork/hierarchy/louvain_hierarchy.py +272 -0
- sknetwork/hierarchy/metrics.py +234 -0
- sknetwork/hierarchy/paris.cpp +37865 -0
- sknetwork/hierarchy/paris.cpython-313-darwin.so +0 -0
- sknetwork/hierarchy/paris.pyx +316 -0
- sknetwork/hierarchy/postprocess.py +350 -0
- sknetwork/hierarchy/tests/__init__.py +1 -0
- sknetwork/hierarchy/tests/test_API.py +24 -0
- sknetwork/hierarchy/tests/test_algos.py +34 -0
- sknetwork/hierarchy/tests/test_metrics.py +62 -0
- sknetwork/hierarchy/tests/test_postprocess.py +57 -0
- sknetwork/linalg/__init__.py +9 -0
- sknetwork/linalg/basics.py +37 -0
- sknetwork/linalg/diteration.cpp +27397 -0
- sknetwork/linalg/diteration.cpython-313-darwin.so +0 -0
- sknetwork/linalg/diteration.pyx +47 -0
- sknetwork/linalg/eig_solver.py +93 -0
- sknetwork/linalg/laplacian.py +15 -0
- sknetwork/linalg/normalizer.py +86 -0
- sknetwork/linalg/operators.py +225 -0
- sknetwork/linalg/polynome.py +76 -0
- sknetwork/linalg/ppr_solver.py +170 -0
- sknetwork/linalg/push.cpp +31069 -0
- sknetwork/linalg/push.cpython-313-darwin.so +0 -0
- sknetwork/linalg/push.pyx +71 -0
- sknetwork/linalg/sparse_lowrank.py +142 -0
- sknetwork/linalg/svd_solver.py +91 -0
- sknetwork/linalg/tests/__init__.py +1 -0
- sknetwork/linalg/tests/test_eig.py +44 -0
- sknetwork/linalg/tests/test_laplacian.py +18 -0
- sknetwork/linalg/tests/test_normalization.py +34 -0
- sknetwork/linalg/tests/test_operators.py +66 -0
- sknetwork/linalg/tests/test_polynome.py +38 -0
- sknetwork/linalg/tests/test_ppr.py +50 -0
- sknetwork/linalg/tests/test_sparse_lowrank.py +61 -0
- sknetwork/linalg/tests/test_svd.py +38 -0
- sknetwork/linkpred/__init__.py +2 -0
- sknetwork/linkpred/base.py +46 -0
- sknetwork/linkpred/nn.py +126 -0
- sknetwork/linkpred/tests/__init__.py +1 -0
- sknetwork/linkpred/tests/test_nn.py +27 -0
- sknetwork/log.py +19 -0
- sknetwork/path/__init__.py +5 -0
- sknetwork/path/dag.py +54 -0
- sknetwork/path/distances.py +98 -0
- sknetwork/path/search.py +31 -0
- sknetwork/path/shortest_path.py +61 -0
- sknetwork/path/tests/__init__.py +1 -0
- sknetwork/path/tests/test_dag.py +37 -0
- sknetwork/path/tests/test_distances.py +62 -0
- sknetwork/path/tests/test_search.py +40 -0
- sknetwork/path/tests/test_shortest_path.py +40 -0
- sknetwork/ranking/__init__.py +8 -0
- sknetwork/ranking/base.py +61 -0
- sknetwork/ranking/betweenness.cpp +9704 -0
- sknetwork/ranking/betweenness.cpython-313-darwin.so +0 -0
- sknetwork/ranking/betweenness.pyx +97 -0
- sknetwork/ranking/closeness.py +92 -0
- sknetwork/ranking/hits.py +94 -0
- sknetwork/ranking/katz.py +83 -0
- sknetwork/ranking/pagerank.py +110 -0
- sknetwork/ranking/postprocess.py +37 -0
- sknetwork/ranking/tests/__init__.py +1 -0
- sknetwork/ranking/tests/test_API.py +32 -0
- sknetwork/ranking/tests/test_betweenness.py +38 -0
- sknetwork/ranking/tests/test_closeness.py +30 -0
- sknetwork/ranking/tests/test_hits.py +20 -0
- sknetwork/ranking/tests/test_pagerank.py +62 -0
- sknetwork/ranking/tests/test_postprocess.py +26 -0
- sknetwork/regression/__init__.py +4 -0
- sknetwork/regression/base.py +61 -0
- sknetwork/regression/diffusion.py +210 -0
- sknetwork/regression/tests/__init__.py +1 -0
- sknetwork/regression/tests/test_API.py +32 -0
- sknetwork/regression/tests/test_diffusion.py +56 -0
- sknetwork/sknetwork.py +3 -0
- sknetwork/test_base.py +35 -0
- sknetwork/test_log.py +15 -0
- sknetwork/topology/__init__.py +8 -0
- sknetwork/topology/cliques.cpp +32562 -0
- sknetwork/topology/cliques.cpython-313-darwin.so +0 -0
- sknetwork/topology/cliques.pyx +149 -0
- sknetwork/topology/core.cpp +30648 -0
- sknetwork/topology/core.cpython-313-darwin.so +0 -0
- sknetwork/topology/core.pyx +90 -0
- sknetwork/topology/cycles.py +243 -0
- sknetwork/topology/minheap.cpp +27329 -0
- sknetwork/topology/minheap.cpython-313-darwin.so +0 -0
- sknetwork/topology/minheap.pxd +20 -0
- sknetwork/topology/minheap.pyx +109 -0
- sknetwork/topology/structure.py +194 -0
- sknetwork/topology/tests/__init__.py +1 -0
- sknetwork/topology/tests/test_cliques.py +28 -0
- sknetwork/topology/tests/test_core.py +19 -0
- sknetwork/topology/tests/test_cycles.py +65 -0
- sknetwork/topology/tests/test_structure.py +85 -0
- sknetwork/topology/tests/test_triangles.py +38 -0
- sknetwork/topology/tests/test_wl.py +72 -0
- sknetwork/topology/triangles.cpp +8891 -0
- sknetwork/topology/triangles.cpython-313-darwin.so +0 -0
- sknetwork/topology/triangles.pyx +151 -0
- sknetwork/topology/weisfeiler_lehman.py +133 -0
- sknetwork/topology/weisfeiler_lehman_core.cpp +27632 -0
- sknetwork/topology/weisfeiler_lehman_core.cpython-313-darwin.so +0 -0
- sknetwork/topology/weisfeiler_lehman_core.pyx +114 -0
- sknetwork/utils/__init__.py +7 -0
- sknetwork/utils/check.py +355 -0
- sknetwork/utils/format.py +221 -0
- sknetwork/utils/membership.py +82 -0
- sknetwork/utils/neighbors.py +115 -0
- sknetwork/utils/tests/__init__.py +1 -0
- sknetwork/utils/tests/test_check.py +190 -0
- sknetwork/utils/tests/test_format.py +63 -0
- sknetwork/utils/tests/test_membership.py +24 -0
- sknetwork/utils/tests/test_neighbors.py +41 -0
- sknetwork/utils/tests/test_tfidf.py +18 -0
- sknetwork/utils/tests/test_values.py +66 -0
- sknetwork/utils/tfidf.py +37 -0
- sknetwork/utils/values.py +76 -0
- sknetwork/visualization/__init__.py +4 -0
- sknetwork/visualization/colors.py +34 -0
- sknetwork/visualization/dendrograms.py +277 -0
- sknetwork/visualization/graphs.py +1039 -0
- sknetwork/visualization/tests/__init__.py +1 -0
- sknetwork/visualization/tests/test_dendrograms.py +53 -0
- sknetwork/visualization/tests/test_graphs.py +176 -0
|
Binary file
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
# distutils: language = c++
|
|
2
|
+
# cython: language_level=3
|
|
3
|
+
"""
|
|
4
|
+
Created on Apr 2020
|
|
5
|
+
@author: Nathan de Lara <nathan.delara@polytechnique.org>
|
|
6
|
+
"""
|
|
7
|
+
cimport cython
|
|
8
|
+
from cython.parallel import prange
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
@cython.boundscheck(False)
|
|
12
|
+
@cython.wraparound(False)
|
|
13
|
+
def diffusion(int[:] indptr, int[:] indices, float[:] data, float[:] scores, float[:] fluid,
|
|
14
|
+
float damping_factor, int n_iter, float tol):
|
|
15
|
+
"""One loop of fluid diffusion."""
|
|
16
|
+
cdef int n = fluid.shape[0]
|
|
17
|
+
cdef int i
|
|
18
|
+
cdef int j
|
|
19
|
+
cdef int j1
|
|
20
|
+
cdef int j2
|
|
21
|
+
cdef int jj
|
|
22
|
+
cdef float sent
|
|
23
|
+
cdef float tmp
|
|
24
|
+
cdef float removed
|
|
25
|
+
cdef float restart_prob = 1 - damping_factor
|
|
26
|
+
cdef float residu = restart_prob
|
|
27
|
+
|
|
28
|
+
for k in range(n_iter):
|
|
29
|
+
for i in prange(n, nogil=True, schedule='guided'):
|
|
30
|
+
sent = fluid[i]
|
|
31
|
+
if sent > 0:
|
|
32
|
+
scores[i] += sent
|
|
33
|
+
fluid[i] = 0
|
|
34
|
+
j1 = indptr[i]
|
|
35
|
+
j2 = indptr[i+1]
|
|
36
|
+
tmp = sent * damping_factor
|
|
37
|
+
if j2 != j1:
|
|
38
|
+
for jj in range(j1, j2):
|
|
39
|
+
j = indices[jj]
|
|
40
|
+
fluid[j] += tmp * data[jj]
|
|
41
|
+
removed = sent * restart_prob
|
|
42
|
+
else:
|
|
43
|
+
removed = sent
|
|
44
|
+
residu -= removed
|
|
45
|
+
if residu < tol * restart_prob:
|
|
46
|
+
return
|
|
47
|
+
return
|
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# coding: utf-8
|
|
3
|
+
"""
|
|
4
|
+
Created on July 9 2019
|
|
5
|
+
@author: Nathan De Lara <nathan.delara@polytechnique.org>
|
|
6
|
+
"""
|
|
7
|
+
from abc import ABC
|
|
8
|
+
from typing import Union
|
|
9
|
+
|
|
10
|
+
import numpy as np
|
|
11
|
+
from scipy import sparse
|
|
12
|
+
from scipy.sparse.linalg import eigsh
|
|
13
|
+
from sknetwork.base import Algorithm
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class EigSolver(Algorithm, ABC):
|
|
17
|
+
"""Generic class for eigensolvers.
|
|
18
|
+
|
|
19
|
+
Parameters
|
|
20
|
+
----------
|
|
21
|
+
which: str
|
|
22
|
+
Which eigenvectors and eigenvalues to find:
|
|
23
|
+
|
|
24
|
+
* ``'LM'`` : Largest (in magnitude) eigenvalues.
|
|
25
|
+
* ``'SM'` : Smallest (in magnitude) eigenvalues.
|
|
26
|
+
|
|
27
|
+
Attributes
|
|
28
|
+
----------
|
|
29
|
+
eigenvectors_: np.ndarray
|
|
30
|
+
Two-dimensional array, each column is an eigenvector of the input.
|
|
31
|
+
eigenvalues_: np.ndarray
|
|
32
|
+
Eigenvalues associated to each eigenvector.
|
|
33
|
+
"""
|
|
34
|
+
def __init__(self, which='LM'):
|
|
35
|
+
self.which = which
|
|
36
|
+
|
|
37
|
+
self.eigenvectors_ = None
|
|
38
|
+
self.eigenvalues_ = None
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class LanczosEig(EigSolver):
|
|
42
|
+
"""Eigenvalue solver using Lanczos method.
|
|
43
|
+
|
|
44
|
+
Parameters
|
|
45
|
+
----------
|
|
46
|
+
which : str
|
|
47
|
+
Which eigenvectors and eigenvalues to find:
|
|
48
|
+
|
|
49
|
+
* ``'LM'`` : Largest (in modulus) eigenvalues.
|
|
50
|
+
* ``'SM'`` : Smallest (in modulus) eigenvalues.
|
|
51
|
+
* ``'LA'`` : Largest (algebraic) eigenvalues.
|
|
52
|
+
* ``'SA'`` : Smallest (algebraic) eigenvalues.
|
|
53
|
+
|
|
54
|
+
n_iter : int
|
|
55
|
+
Maximum number of Arnoldi update iterations allowed.
|
|
56
|
+
Default = 10 * nb of rows.
|
|
57
|
+
tol : float
|
|
58
|
+
Relative accuracy for eigenvalues (stopping criterion).
|
|
59
|
+
Default = 0 (machine precision).
|
|
60
|
+
Attributes
|
|
61
|
+
----------
|
|
62
|
+
eigenvectors_: np.ndarray
|
|
63
|
+
Two-dimensional array, each column is an eigenvector of the input.
|
|
64
|
+
eigenvalues_: np.ndarray
|
|
65
|
+
Eigenvalues associated to each eigenvector.
|
|
66
|
+
|
|
67
|
+
See Also
|
|
68
|
+
--------
|
|
69
|
+
scipy.sparse.linalg.eigsh
|
|
70
|
+
"""
|
|
71
|
+
def __init__(self, which='LM', n_iter: int = None, tol: float = 0.):
|
|
72
|
+
super(LanczosEig, self).__init__(which=which)
|
|
73
|
+
self.n_iter = n_iter
|
|
74
|
+
self.tol = tol
|
|
75
|
+
|
|
76
|
+
def fit(self, matrix: Union[sparse.csr_matrix, sparse.linalg.LinearOperator], n_components: int = 2):
|
|
77
|
+
"""Perform spectral decomposition on symmetric input matrix.
|
|
78
|
+
|
|
79
|
+
Parameters
|
|
80
|
+
----------
|
|
81
|
+
matrix : sparse.csr_matrix or linear operator
|
|
82
|
+
Matrix to decompose.
|
|
83
|
+
n_components : int
|
|
84
|
+
Number of eigenvectors to compute
|
|
85
|
+
|
|
86
|
+
Returns
|
|
87
|
+
-------
|
|
88
|
+
self: :class:`EigSolver`
|
|
89
|
+
"""
|
|
90
|
+
self.eigenvalues_, self.eigenvectors_ = eigsh(matrix.astype(float), n_components, which=self.which,
|
|
91
|
+
maxiter=self.n_iter, tol=self.tol)
|
|
92
|
+
|
|
93
|
+
return self
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
Created in July 2022
|
|
5
|
+
@author: Thomas Bonald <thomas.bonald@telecom-paris.fr>
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import numpy as np
|
|
9
|
+
from scipy import sparse
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def get_laplacian(adjacency: sparse.csr_matrix) -> sparse.csr_matrix:
|
|
13
|
+
"""Return the Laplacian matrix of a graph."""
|
|
14
|
+
weights = adjacency.dot(np.ones(adjacency.shape[0]))
|
|
15
|
+
return sparse.diags(weights) - adjacency
|
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
Created in November 2019
|
|
5
|
+
@author: Nathan de Lara <nathan.delara@polytechnique.org>
|
|
6
|
+
"""
|
|
7
|
+
from typing import Union
|
|
8
|
+
|
|
9
|
+
import numpy as np
|
|
10
|
+
from scipy import sparse
|
|
11
|
+
from scipy.sparse.linalg import LinearOperator
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def diagonal_pseudo_inverse(weights: np.ndarray) -> sparse.csr_matrix:
|
|
15
|
+
"""Compute :math:`\\text{diag}(w)^+`, the pseudo-inverse of the diagonal matrix
|
|
16
|
+
with diagonal elements given by the weights :math:`w`.
|
|
17
|
+
|
|
18
|
+
Parameters
|
|
19
|
+
----------
|
|
20
|
+
weights:
|
|
21
|
+
The weights to invert.
|
|
22
|
+
|
|
23
|
+
Returns
|
|
24
|
+
-------
|
|
25
|
+
sparse.csr_matrix
|
|
26
|
+
|
|
27
|
+
"""
|
|
28
|
+
diag: sparse.csr_matrix = sparse.diags(weights, format='csr')
|
|
29
|
+
diag.data = 1 / diag.data
|
|
30
|
+
return diag
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def get_norms(matrix: Union[sparse.csr_matrix, np.ndarray, LinearOperator], p=1):
|
|
34
|
+
"""Get the norms of rows of a matrix.
|
|
35
|
+
|
|
36
|
+
Parameters
|
|
37
|
+
----------
|
|
38
|
+
matrix : numpy array or sparse CSR matrix or LinearOperator, shape (n_rows, n_cols)
|
|
39
|
+
Input matrix.
|
|
40
|
+
p :
|
|
41
|
+
Order of the norm (1 or 2).
|
|
42
|
+
Returns
|
|
43
|
+
-------
|
|
44
|
+
norms : np.array, shape (n_rows,)
|
|
45
|
+
Vector norms
|
|
46
|
+
"""
|
|
47
|
+
n_row, n_col = matrix.shape
|
|
48
|
+
if isinstance(matrix, np.ndarray):
|
|
49
|
+
input_matrix = sparse.csr_matrix(matrix)
|
|
50
|
+
elif isinstance(matrix, sparse.csr_matrix):
|
|
51
|
+
input_matrix = matrix.copy()
|
|
52
|
+
else:
|
|
53
|
+
input_matrix = matrix
|
|
54
|
+
if p == 1:
|
|
55
|
+
if not isinstance(matrix, LinearOperator):
|
|
56
|
+
input_matrix.data = np.abs(input_matrix.data)
|
|
57
|
+
return input_matrix.dot(np.ones(n_col))
|
|
58
|
+
elif p == 2:
|
|
59
|
+
if isinstance(matrix, LinearOperator):
|
|
60
|
+
raise ValueError('Only norm 1 is available for linear operators.')
|
|
61
|
+
input_matrix.data = input_matrix.data**2
|
|
62
|
+
return np.sqrt(input_matrix.dot(np.ones(n_col)))
|
|
63
|
+
else:
|
|
64
|
+
raise ValueError('Only norms 1 and 2 are available.')
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def normalize(matrix: Union[sparse.csr_matrix, np.ndarray, LinearOperator], p=1):
|
|
68
|
+
"""Normalize the rows of a matrix so that all have norm 1 (or 0; null rows remain null).
|
|
69
|
+
|
|
70
|
+
Parameters
|
|
71
|
+
----------
|
|
72
|
+
matrix :
|
|
73
|
+
Input matrix.
|
|
74
|
+
p :
|
|
75
|
+
Order of the norm.
|
|
76
|
+
|
|
77
|
+
Returns
|
|
78
|
+
-------
|
|
79
|
+
normalized matrix :
|
|
80
|
+
Normalized matrix (same format as input matrix).
|
|
81
|
+
"""
|
|
82
|
+
norms = get_norms(matrix, p)
|
|
83
|
+
diag = diagonal_pseudo_inverse(norms)
|
|
84
|
+
if hasattr(matrix, 'left_sparse_dot') and callable(matrix.left_sparse_dot):
|
|
85
|
+
return matrix.left_sparse_dot(diag)
|
|
86
|
+
return diag.dot(matrix)
|
|
@@ -0,0 +1,225 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# coding: utf-8
|
|
3
|
+
"""
|
|
4
|
+
Created in April 2020
|
|
5
|
+
@author: Thomas Bonald <bonald@enst.fr>
|
|
6
|
+
@author: Nathan de Lara <nathan.delara@polytechnique.org>
|
|
7
|
+
"""
|
|
8
|
+
from typing import Union
|
|
9
|
+
|
|
10
|
+
import numpy as np
|
|
11
|
+
from scipy import sparse
|
|
12
|
+
from scipy.sparse.linalg import LinearOperator
|
|
13
|
+
|
|
14
|
+
from sknetwork.linalg import diagonal_pseudo_inverse
|
|
15
|
+
from sknetwork.linalg.normalizer import normalize
|
|
16
|
+
from sknetwork.linalg.sparse_lowrank import SparseLR
|
|
17
|
+
from sknetwork.utils.check import check_format
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class Regularizer(SparseLR):
|
|
21
|
+
"""Regularized matrix as a Scipy LinearOperator.
|
|
22
|
+
|
|
23
|
+
Defined by :math:`A + \\alpha \\frac{11^T}n` where :math:`A` is the input matrix
|
|
24
|
+
and :math:`\\alpha` the regularization factor.
|
|
25
|
+
|
|
26
|
+
Parameters
|
|
27
|
+
----------
|
|
28
|
+
input_matrix :
|
|
29
|
+
Input matrix.
|
|
30
|
+
regularization : float
|
|
31
|
+
Regularization factor.
|
|
32
|
+
Default value = 1.
|
|
33
|
+
|
|
34
|
+
Examples
|
|
35
|
+
--------
|
|
36
|
+
>>> from sknetwork.data import house
|
|
37
|
+
>>> adjacency = house()
|
|
38
|
+
>>> regularizer = Regularizer(adjacency)
|
|
39
|
+
>>> regularizer.dot(np.ones(5))
|
|
40
|
+
array([3., 4., 3., 3., 4.])
|
|
41
|
+
"""
|
|
42
|
+
def __init__(self, input_matrix: Union[sparse.csr_matrix, np.ndarray], regularization: float = 1):
|
|
43
|
+
n_row, n_col = input_matrix.shape
|
|
44
|
+
u = regularization * np.ones(n_row)
|
|
45
|
+
v = np.ones(n_col) / n_col
|
|
46
|
+
super(Regularizer, self).__init__(input_matrix, (u, v))
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class Normalizer(LinearOperator):
|
|
50
|
+
"""Normalized matrix as a Scipy LinearOperator.
|
|
51
|
+
|
|
52
|
+
Defined by :math:`D^{-1}A` where :math:`A` is the regularized adjacency matrix and :math:`D` the corresponding
|
|
53
|
+
diagonal matrix of degrees (sums over rows).
|
|
54
|
+
|
|
55
|
+
Parameters
|
|
56
|
+
----------
|
|
57
|
+
adjacency :
|
|
58
|
+
:term:`Adjacency <adjacency>` matrix of the graph.
|
|
59
|
+
regularization : float
|
|
60
|
+
Regularization factor.
|
|
61
|
+
Default value = 0.
|
|
62
|
+
|
|
63
|
+
Examples
|
|
64
|
+
--------
|
|
65
|
+
>>> from sknetwork.data import house
|
|
66
|
+
>>> adjacency = house()
|
|
67
|
+
>>> normalizer = Normalizer(adjacency)
|
|
68
|
+
>>> normalizer.dot(np.ones(5))
|
|
69
|
+
array([1., 1., 1., 1., 1.])
|
|
70
|
+
"""
|
|
71
|
+
def __init__(self, adjacency: Union[sparse.csr_matrix, np.ndarray], regularization: float = 0):
|
|
72
|
+
if adjacency.ndim == 1:
|
|
73
|
+
adjacency = adjacency.reshape(1, -1)
|
|
74
|
+
super(Normalizer, self).__init__(dtype=float, shape=adjacency.shape)
|
|
75
|
+
n_col = adjacency.shape[1]
|
|
76
|
+
self.regularization = regularization
|
|
77
|
+
self.adjacency = adjacency
|
|
78
|
+
self.norm_diag = diagonal_pseudo_inverse(adjacency.dot(np.ones(n_col)) + regularization)
|
|
79
|
+
|
|
80
|
+
def _matvec(self, matrix: np.ndarray):
|
|
81
|
+
prod = self.adjacency.dot(matrix)
|
|
82
|
+
if self.regularization > 0:
|
|
83
|
+
n_row = self.shape[0]
|
|
84
|
+
if matrix.ndim == 2:
|
|
85
|
+
prod += self.regularization * np.outer(np.ones(n_row), matrix.mean(axis=0))
|
|
86
|
+
else:
|
|
87
|
+
prod += self.regularization * matrix.mean() * np.ones(n_row)
|
|
88
|
+
return self.norm_diag.dot(prod)
|
|
89
|
+
|
|
90
|
+
def _transpose(self):
|
|
91
|
+
return self
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
class Laplacian(LinearOperator):
|
|
95
|
+
"""Laplacian matrix as a Scipy LinearOperator.
|
|
96
|
+
|
|
97
|
+
Defined by :math:`L = D - A` where :math:`A` is the regularized adjacency matrix and :math:`D` the corresponding
|
|
98
|
+
diagonal matrix of degrees.
|
|
99
|
+
|
|
100
|
+
If normalized, defined by :math:`L = I - D^{-1/2}AD^{-1/2}`.
|
|
101
|
+
|
|
102
|
+
Parameters
|
|
103
|
+
----------
|
|
104
|
+
adjacency :
|
|
105
|
+
:term:`Adjacency <adjacency>` matrix of the graph.
|
|
106
|
+
regularization : float
|
|
107
|
+
Regularization factor.
|
|
108
|
+
Default value = 0.
|
|
109
|
+
normalized_laplacian : bool
|
|
110
|
+
If ``True``, use normalized Laplacian.
|
|
111
|
+
Default value = ``False``.
|
|
112
|
+
|
|
113
|
+
Examples
|
|
114
|
+
--------
|
|
115
|
+
>>> from sknetwork.data import house
|
|
116
|
+
>>> adjacency = house()
|
|
117
|
+
>>> laplacian = Laplacian(adjacency)
|
|
118
|
+
>>> laplacian.dot(np.ones(5))
|
|
119
|
+
array([0., 0., 0., 0., 0.])
|
|
120
|
+
"""
|
|
121
|
+
def __init__(self, adjacency: Union[sparse.csr_matrix, np.ndarray], regularization: float = 0,
|
|
122
|
+
normalized_laplacian: bool = False):
|
|
123
|
+
super(Laplacian, self).__init__(dtype=float, shape=adjacency.shape)
|
|
124
|
+
n = adjacency.shape[0]
|
|
125
|
+
self.regularization = regularization
|
|
126
|
+
self.normalized_laplacian = normalized_laplacian
|
|
127
|
+
self.weights = adjacency.dot(np.ones(n))
|
|
128
|
+
self.laplacian = sparse.diags(self.weights, format='csr') - adjacency
|
|
129
|
+
if self.normalized_laplacian:
|
|
130
|
+
self.norm_diag = diagonal_pseudo_inverse(np.sqrt(self.weights + regularization))
|
|
131
|
+
|
|
132
|
+
def _matvec(self, matrix: np.ndarray):
|
|
133
|
+
if self.normalized_laplacian:
|
|
134
|
+
matrix = self.norm_diag.dot(matrix)
|
|
135
|
+
prod = self.laplacian.dot(matrix)
|
|
136
|
+
if self.regularization > 0:
|
|
137
|
+
n = self.shape[0]
|
|
138
|
+
if matrix.ndim == 2:
|
|
139
|
+
prod += self.regularization * (matrix - np.outer(np.ones(n), matrix.mean(axis=0)))
|
|
140
|
+
else:
|
|
141
|
+
prod += self.regularization * (matrix - matrix.mean())
|
|
142
|
+
if self.normalized_laplacian:
|
|
143
|
+
prod = self.norm_diag.dot(prod)
|
|
144
|
+
return prod
|
|
145
|
+
|
|
146
|
+
def _transpose(self):
|
|
147
|
+
return self
|
|
148
|
+
|
|
149
|
+
def astype(self, dtype: Union[str, np.dtype]):
|
|
150
|
+
"""Change dtype of the object."""
|
|
151
|
+
self.dtype = np.dtype(dtype)
|
|
152
|
+
self.laplacian = self.laplacian.astype(self.dtype)
|
|
153
|
+
return self
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
class CoNeighbor(LinearOperator):
|
|
157
|
+
"""Co-neighborhood adjacency as a LinearOperator.
|
|
158
|
+
|
|
159
|
+
:math:`\\tilde{A} = AF^{-1}A^T`, or :math:`\\tilde{B} = BF^{-1}B^T`.
|
|
160
|
+
|
|
161
|
+
where F is a weight matrix.
|
|
162
|
+
|
|
163
|
+
Parameters
|
|
164
|
+
----------
|
|
165
|
+
adjacency:
|
|
166
|
+
Adjacency or biadjacency of the input graph.
|
|
167
|
+
normalized:
|
|
168
|
+
If ``True``, F is the diagonal in-degree matrix :math:`F = \\text{diag}(A^T1)`.
|
|
169
|
+
Otherwise, F is the identity matrix.
|
|
170
|
+
|
|
171
|
+
Examples
|
|
172
|
+
--------
|
|
173
|
+
>>> from sknetwork.data import star_wars
|
|
174
|
+
>>> biadjacency = star_wars(metadata=False)
|
|
175
|
+
>>> d_out = biadjacency.dot(np.ones(3))
|
|
176
|
+
>>> coneighbor = CoNeighbor(biadjacency)
|
|
177
|
+
>>> np.allclose(d_out, coneighbor.dot(np.ones(4)))
|
|
178
|
+
True
|
|
179
|
+
"""
|
|
180
|
+
def __init__(self, adjacency: Union[sparse.csr_matrix, np.ndarray], normalized: bool = True):
|
|
181
|
+
adjacency = check_format(adjacency).astype(float)
|
|
182
|
+
n = adjacency.shape[0]
|
|
183
|
+
super(CoNeighbor, self).__init__(dtype=float, shape=(n, n))
|
|
184
|
+
|
|
185
|
+
if normalized:
|
|
186
|
+
self.forward = normalize(adjacency.T).tocsr()
|
|
187
|
+
else:
|
|
188
|
+
self.forward = adjacency.T
|
|
189
|
+
|
|
190
|
+
self.backward = adjacency
|
|
191
|
+
|
|
192
|
+
def __neg__(self):
|
|
193
|
+
self.backward *= -1
|
|
194
|
+
return self
|
|
195
|
+
|
|
196
|
+
def __mul__(self, other):
|
|
197
|
+
self.backward *= other
|
|
198
|
+
return self
|
|
199
|
+
|
|
200
|
+
def _matvec(self, matrix: np.ndarray):
|
|
201
|
+
return self.backward.dot(self.forward.dot(matrix))
|
|
202
|
+
|
|
203
|
+
def _transpose(self):
|
|
204
|
+
"""Transposed operator"""
|
|
205
|
+
operator = CoNeighbor(self.backward)
|
|
206
|
+
operator.backward = self.forward.T.tocsr()
|
|
207
|
+
operator.forward = self.backward.T.tocsr()
|
|
208
|
+
return operator
|
|
209
|
+
|
|
210
|
+
def left_sparse_dot(self, matrix: sparse.csr_matrix):
|
|
211
|
+
"""Left dot product with a sparse matrix"""
|
|
212
|
+
self.backward = matrix.dot(self.backward)
|
|
213
|
+
return self
|
|
214
|
+
|
|
215
|
+
def right_sparse_dot(self, matrix: sparse.csr_matrix):
|
|
216
|
+
"""Right dot product with a sparse matrix"""
|
|
217
|
+
self.forward = self.forward.dot(matrix)
|
|
218
|
+
return self
|
|
219
|
+
|
|
220
|
+
def astype(self, dtype: Union[str, np.dtype]):
|
|
221
|
+
"""Change dtype of the object."""
|
|
222
|
+
self.backward.astype(dtype)
|
|
223
|
+
self.forward.astype(dtype)
|
|
224
|
+
self.dtype = dtype
|
|
225
|
+
return self
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
Created in April 2020
|
|
5
|
+
@author: Nathan de Lara <nathan.delara@polytechnique.org>
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from typing import Union
|
|
9
|
+
|
|
10
|
+
import numpy as np
|
|
11
|
+
from scipy import sparse
|
|
12
|
+
from scipy.sparse.linalg import LinearOperator
|
|
13
|
+
|
|
14
|
+
from sknetwork.utils.check import check_format, check_square
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class Polynome(LinearOperator):
|
|
18
|
+
"""Polynome of a matrix as a linear operator
|
|
19
|
+
|
|
20
|
+
:math:`P(A) = \\alpha_k A^k + ... + \\alpha_1 A + \\alpha_0`.
|
|
21
|
+
|
|
22
|
+
Parameters
|
|
23
|
+
----------
|
|
24
|
+
matrix :
|
|
25
|
+
Square matrix
|
|
26
|
+
coeffs : np.ndarray
|
|
27
|
+
Coefficients of the polynome by increasing order of power.
|
|
28
|
+
|
|
29
|
+
Examples
|
|
30
|
+
--------
|
|
31
|
+
>>> from scipy import sparse
|
|
32
|
+
>>> from sknetwork.linalg import Polynome
|
|
33
|
+
>>> matrix = sparse.eye(2, format='csr')
|
|
34
|
+
>>> polynome = Polynome(matrix, np.arange(3))
|
|
35
|
+
>>> x = np.ones(2)
|
|
36
|
+
>>> polynome.dot(x)
|
|
37
|
+
array([3., 3.])
|
|
38
|
+
>>> polynome.T.dot(x)
|
|
39
|
+
array([3., 3.])
|
|
40
|
+
|
|
41
|
+
Notes
|
|
42
|
+
-----
|
|
43
|
+
The polynome is evaluated using the `Ruffini-Horner method
|
|
44
|
+
<https://en.wikipedia.org/wiki/Horner%27s_method>`_.
|
|
45
|
+
"""
|
|
46
|
+
|
|
47
|
+
def __init__(self, matrix: Union[sparse.csr_matrix, np.ndarray], coeffs: np.ndarray):
|
|
48
|
+
if coeffs.shape[0] == 0:
|
|
49
|
+
raise ValueError('A polynome requires at least one coefficient.')
|
|
50
|
+
if not isinstance(matrix, LinearOperator):
|
|
51
|
+
matrix = check_format(matrix)
|
|
52
|
+
check_square(matrix)
|
|
53
|
+
shape = matrix.shape
|
|
54
|
+
dtype = matrix.dtype
|
|
55
|
+
super(Polynome, self).__init__(dtype=dtype, shape=shape)
|
|
56
|
+
|
|
57
|
+
self.matrix = matrix
|
|
58
|
+
self.coeffs = coeffs
|
|
59
|
+
|
|
60
|
+
def __neg__(self):
|
|
61
|
+
return Polynome(self.matrix, -self.coeffs)
|
|
62
|
+
|
|
63
|
+
def __mul__(self, other):
|
|
64
|
+
return Polynome(self.matrix, other * self.coeffs)
|
|
65
|
+
|
|
66
|
+
def _matvec(self, matrix: np.ndarray):
|
|
67
|
+
"""Right dot product with a dense matrix.
|
|
68
|
+
"""
|
|
69
|
+
y = self.coeffs[-1] * matrix
|
|
70
|
+
for a in self.coeffs[::-1][1:]:
|
|
71
|
+
y = self.matrix.dot(y) + a * matrix
|
|
72
|
+
return y
|
|
73
|
+
|
|
74
|
+
def _transpose(self):
|
|
75
|
+
"""Transposed operator."""
|
|
76
|
+
return Polynome(self.matrix.T.tocsr(), self.coeffs)
|