nystrom-ncut 0.0.3__py3-none-any.whl → 0.0.5__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
@@ -173,9 +173,10 @@ class NCUT(OnlineNystrom):
173
173
  else:
174
174
  sampled_indices = run_subgraph_sampling(
175
175
  features,
176
- num_sample=self.num_sample,
176
+ self.num_sample,
177
177
  sample_method=self.sample_method,
178
178
  )
179
+ sampled_indices = torch.sort(sampled_indices).values
179
180
  sampled_features = features[sampled_indices]
180
181
  OnlineNystrom.fit(self, sampled_features)
181
182
 
nystrom_ncut/nystrom.py CHANGED
@@ -52,6 +52,18 @@ class OnlineNystrom:
52
52
  self.transform_matrix: torch.Tensor = None # [n x n_components]
53
53
  self.LS: torch.Tensor = None # [n]
54
54
 
55
+ def _update_to_kernel(self) -> Tuple[torch.Tensor, torch.Tensor]:
56
+ self.A = self.S = self.kernel.transform()
57
+ U, L = solve_eig(
58
+ self.A,
59
+ num_eig=self.inverse_approximation_dim,
60
+ eig_solver=self.eig_solver,
61
+ ) # [n x (? + 1)], [? + 1]
62
+ self.Ahinv_UL = U * (L ** -0.5) # [n x (? + 1)]
63
+ self.Ahinv_VT = U.mT # [(? + 1) x n]
64
+ self.Ahinv = self.Ahinv_UL @ self.Ahinv_VT # [n x n]
65
+ return U, L
66
+
55
67
  def fit(self, features: torch.Tensor):
56
68
  OnlineNystrom.fit_transform(self, features)
57
69
  return self
@@ -60,17 +72,8 @@ class OnlineNystrom:
60
72
  self.anchor_features = features
61
73
 
62
74
  self.kernel.fit(self.anchor_features)
63
- self.A = self.S = self.kernel.transform() # [n x n]
64
-
65
75
  self.inverse_approximation_dim = max(self.n_components, features.shape[-1]) + 1
66
- U, L = solve_eig(
67
- self.A,
68
- num_eig=self.inverse_approximation_dim,
69
- eig_solver=self.eig_solver,
70
- ) # [n x (? + 1)], [? + 1]
71
- self.Ahinv_UL = U * (L ** -0.5) # [n x (? + 1)]
72
- self.Ahinv_VT = U.mT # [(? + 1) x n]
73
- self.Ahinv = self.Ahinv_UL @ self.Ahinv_VT # [n x n]
76
+ U, L = self._update_to_kernel() # [n x (? + 1)], [? + 1]
74
77
 
75
78
  self.transform_matrix = (U / L)[:, :self.n_components] # [n x n_components]
76
79
  self.LS = L[:self.n_components] # [n_components]
@@ -83,6 +86,7 @@ class OnlineNystrom:
83
86
  chunks = torch.chunk(features, n_chunks, dim=0)
84
87
  for chunk in chunks:
85
88
  self.kernel.update(chunk)
89
+ self._update_to_kernel()
86
90
 
87
91
  compressed_BBT = torch.zeros((self.inverse_approximation_dim, self.inverse_approximation_dim)) # [(? + 1) x (? + 1))]
88
92
  for i, chunk in enumerate(chunks):
@@ -101,6 +105,7 @@ class OnlineNystrom:
101
105
  else:
102
106
  """ Unchunked version """
103
107
  B = self.kernel.update(features).mT # [n x m]
108
+ self._update_to_kernel()
104
109
  compressed_B = self.Ahinv_VT @ B # [indirect_pca_dim x m]
105
110
 
106
111
  self.S = self.S + self.Ahinv_UL @ (compressed_B @ compressed_B.mT) @ self.Ahinv_UL.mT # [n x n]
@@ -11,7 +11,7 @@ from .common import ceildiv, lazy_normalize
11
11
  @torch.no_grad()
12
12
  def run_subgraph_sampling(
13
13
  features: torch.Tensor,
14
- num_sample: int = 300,
14
+ num_sample: int,
15
15
  max_draw: int = 1000000,
16
16
  sample_method: Literal["farthest", "random"] = "farthest",
17
17
  ):
@@ -272,7 +272,7 @@ def propagate_eigenvectors(
272
272
  # sample subgraph
273
273
  subgraph_indices = run_subgraph_sampling(
274
274
  features,
275
- num_sample=num_sample,
275
+ num_sample,
276
276
  sample_method=sample_method,
277
277
  )
278
278
 
@@ -34,7 +34,7 @@ def _rgb_with_dimensionality_reduction(
34
34
  ) -> Tuple[torch.Tensor, torch.Tensor]:
35
35
  subgraph_indices = run_subgraph_sampling(
36
36
  features,
37
- num_sample=num_sample,
37
+ num_sample,
38
38
  sample_method="farthest",
39
39
  )
40
40
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: nystrom_ncut
3
- Version: 0.0.3
3
+ Version: 0.0.5
4
4
  Summary: Normalized Cut and Nyström Approximation
5
5
  Author-email: Huzheng Yang <huze.yann@gmail.com>, Wentinn Liao <wentinn.liao@gmail.com>
6
6
  Project-URL: Documentation, https://github.com/JophiArcana/Nystrom-NCUT/
@@ -0,0 +1,11 @@
1
+ nystrom_ncut/__init__.py,sha256=Cww-_OsyQHLKpgw_Wh28_tUOvIMMr7Ey8w-tH7v99xQ,452
2
+ nystrom_ncut/common.py,sha256=qdR_JwknT9H1Cv5LopwdwZfORFx-O8MLiRI6ZF1Qohc,558
3
+ nystrom_ncut/ncut_pytorch.py,sha256=wRQXUPBOW2_vutocKf0J19HrFVkBYQePAYUEfotLfx4,11701
4
+ nystrom_ncut/nystrom.py,sha256=HbwON9pLW3gEZvOmbDJwkQNHolOo1EBvwBPeh2p2uJE,8833
5
+ nystrom_ncut/propagation_utils.py,sha256=mD6rZ_mwYjYXs1cp5ZaTK0FrJ4YhyCdoIUrdGRP9k-M,12119
6
+ nystrom_ncut/visualize_utils.py,sha256=QmBatlX7Q-ZWF_iJ1zFDnPHFuofz3tCmtoNeeoMPw3U,18558
7
+ nystrom_ncut-0.0.5.dist-info/LICENSE,sha256=2bm9uFabQZ3Ykb_SaSU_uUbAj2-htc6WJQmS_65qD00,1073
8
+ nystrom_ncut-0.0.5.dist-info/METADATA,sha256=n9zlRYBD02k478INScrj9V9rZ1mhXTylcMjkmQDgl1A,6058
9
+ nystrom_ncut-0.0.5.dist-info/WHEEL,sha256=A3WOREP4zgxI0fKrHUG8DC8013e3dK3n7a6HDbcEIwE,91
10
+ nystrom_ncut-0.0.5.dist-info/top_level.txt,sha256=j7g_j0S048EvguFFnGgD5Ewd3r2H6klsxd5A4dd-wHw,13
11
+ nystrom_ncut-0.0.5.dist-info/RECORD,,
@@ -1,11 +0,0 @@
1
- nystrom_ncut/__init__.py,sha256=Cww-_OsyQHLKpgw_Wh28_tUOvIMMr7Ey8w-tH7v99xQ,452
2
- nystrom_ncut/common.py,sha256=qdR_JwknT9H1Cv5LopwdwZfORFx-O8MLiRI6ZF1Qohc,558
3
- nystrom_ncut/ncut_pytorch.py,sha256=8LfznDwhq-WL_vQxbFBFLSzymg9SEDti_zzf9QQLnrA,11651
4
- nystrom_ncut/nystrom.py,sha256=Jo-P-2vnYk8yEZinGZnN3jHMiiB5AueoaLAYK4OmRqE,8604
5
- nystrom_ncut/propagation_utils.py,sha256=pigecB0rAmlbCoMNb8zhCyyNwh3QzkxXEnaBsDRE_ns,12136
6
- nystrom_ncut/visualize_utils.py,sha256=oNaDz_Xn12g3knEZZTb-QWVN-wTrnCNE5gn9cu8Xl_U,18569
7
- nystrom_ncut-0.0.3.dist-info/LICENSE,sha256=2bm9uFabQZ3Ykb_SaSU_uUbAj2-htc6WJQmS_65qD00,1073
8
- nystrom_ncut-0.0.3.dist-info/METADATA,sha256=yh1pDFHUL2Z4WPVnuQyeHjgSEokUtpB6OPzPMxEClsM,6058
9
- nystrom_ncut-0.0.3.dist-info/WHEEL,sha256=A3WOREP4zgxI0fKrHUG8DC8013e3dK3n7a6HDbcEIwE,91
10
- nystrom_ncut-0.0.3.dist-info/top_level.txt,sha256=j7g_j0S048EvguFFnGgD5Ewd3r2H6klsxd5A4dd-wHw,13
11
- nystrom_ncut-0.0.3.dist-info/RECORD,,