nystrom-ncut 0.0.4__tar.gz → 0.0.5__tar.gz

Sign up to get free protection for your applications and to get access to all the features.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: nystrom_ncut
3
- Version: 0.0.4
3
+ Version: 0.0.5
4
4
  Summary: Normalized Cut and Nyström Approximation
5
5
  Author-email: Huzheng Yang <huze.yann@gmail.com>, Wentinn Liao <wentinn.liao@gmail.com>
6
6
  Project-URL: Documentation, https://github.com/JophiArcana/Nystrom-NCUT/
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "nystrom_ncut"
7
- version = "0.0.4"
7
+ version = "0.0.5"
8
8
  authors = [
9
9
  { name = "Huzheng Yang", email = "huze.yann@gmail.com" },
10
10
  { name = "Wentinn Liao", email = "wentinn.liao@gmail.com" },
@@ -173,9 +173,10 @@ class NCUT(OnlineNystrom):
173
173
  else:
174
174
  sampled_indices = run_subgraph_sampling(
175
175
  features,
176
- num_sample=self.num_sample,
176
+ self.num_sample,
177
177
  sample_method=self.sample_method,
178
178
  )
179
+ sampled_indices = torch.sort(sampled_indices).values
179
180
  sampled_features = features[sampled_indices]
180
181
  OnlineNystrom.fit(self, sampled_features)
181
182
 
@@ -11,7 +11,7 @@ from .common import ceildiv, lazy_normalize
11
11
  @torch.no_grad()
12
12
  def run_subgraph_sampling(
13
13
  features: torch.Tensor,
14
- num_sample: int = 300,
14
+ num_sample: int,
15
15
  max_draw: int = 1000000,
16
16
  sample_method: Literal["farthest", "random"] = "farthest",
17
17
  ):
@@ -272,7 +272,7 @@ def propagate_eigenvectors(
272
272
  # sample subgraph
273
273
  subgraph_indices = run_subgraph_sampling(
274
274
  features,
275
- num_sample=num_sample,
275
+ num_sample,
276
276
  sample_method=sample_method,
277
277
  )
278
278
 
@@ -34,7 +34,7 @@ def _rgb_with_dimensionality_reduction(
34
34
  ) -> Tuple[torch.Tensor, torch.Tensor]:
35
35
  subgraph_indices = run_subgraph_sampling(
36
36
  features,
37
- num_sample=num_sample,
37
+ num_sample,
38
38
  sample_method="farthest",
39
39
  )
40
40
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: nystrom_ncut
3
- Version: 0.0.4
3
+ Version: 0.0.5
4
4
  Summary: Normalized Cut and Nyström Approximation
5
5
  Author-email: Huzheng Yang <huze.yann@gmail.com>, Wentinn Liao <wentinn.liao@gmail.com>
6
6
  Project-URL: Documentation, https://github.com/JophiArcana/Nystrom-NCUT/
@@ -39,26 +39,26 @@ if __name__ == "__main__":
39
39
 
40
40
  torch.set_printoptions(precision=8, sci_mode=False, linewidth=400)
41
41
  torch.set_default_dtype(torch.float64)
42
- # torch.manual_seed(1212)
43
- # np.random.seed(1212)
42
+ torch.manual_seed(1212)
43
+ np.random.seed(1212)
44
44
 
45
- M = torch.rand((200, 12))
46
- NC = NCUT(n_components=12, num_sample=80, sample_method="random", chunk_size=20)
45
+ M = torch.rand((12000, 12))
46
+ NC = NCUT(n_components=12, num_sample=10000, sample_method="farthest")
47
47
 
48
48
  torch.manual_seed(1212)
49
49
  np.random.seed(1212)
50
50
  X, eigs = NC.fit_transform(M)
51
51
  print(eigs)
52
- raise Exception()
53
52
 
54
53
  normalized_M = Fn.normalize(M, p=2, dim=-1)
55
54
  A = torch.exp(-(1 - normalized_M @ normalized_M.mT))
56
55
  R = torch.diag(torch.sum(A, dim=-1) ** -0.5)
57
56
  L = R @ A @ R
58
57
  # print(L)
59
- print(X @ torch.diag(eigs) @ X.mT)
60
- print(L)
61
- print(torch.abs(X @ torch.diag(eigs) @ X.mT / L - 1))
58
+ # print(X @ torch.diag(eigs) @ X.mT)
59
+ # print(L)
60
+ RE = torch.abs(X @ torch.diag(eigs) @ X.mT / L - 1)
61
+ print(RE.max().item(), RE.mean().item())
62
62
 
63
63
  # torch.manual_seed(1212)
64
64
  # np.random.seed(1212)
File without changes
File without changes
File without changes
File without changes