nystrom-ncut 0.0.2__py3-none-any.whl → 0.0.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -94,20 +94,19 @@ class NCUT(OnlineNystrom):
94
94
 
95
95
  def __init__(
96
96
  self,
97
- num_eig: int = 100,
97
+ n_components: int = 100,
98
98
  affinity_focal_gamma: float = 1.0,
99
99
  num_sample: int = 10000,
100
100
  sample_method: Literal["farthest", "random"] = "farthest",
101
101
  distance: DistanceOptions = "cosine",
102
102
  eig_solver: EigSolverOptions = "svd_lowrank",
103
103
  normalize_features: bool = None,
104
- device: str = None,
105
104
  move_output_to_cpu: bool = False,
106
- matmul_chunk_size: int = 8096,
105
+ chunk_size: int = 8192,
107
106
  ):
108
107
  """
109
108
  Args:
110
- num_eig (int): number of top eigenvectors to return
109
+ n_components (int): number of top eigenvectors to return
111
110
  affinity_focal_gamma (float): affinity matrix temperature, lower t reduce the not-so-connected edge weights,
112
111
  smaller t result in more sharp eigenvectors.
113
112
  num_sample (int): number of samples for Nystrom-like approximation,
@@ -118,17 +117,15 @@ class NCUT(OnlineNystrom):
118
117
  eig_solver (str): eigen decompose solver, ['svd_lowrank', 'lobpcg', 'svd', 'eigh'].
119
118
  normalize_features (bool): normalize input features before computing affinity matrix,
120
119
  default 'None' is True for cosine distance, False for euclidean distance and rbf
121
- device (str): device to use for eigen computation,
122
- move to GPU to speeds up a bit (~5x faster)
123
120
  move_output_to_cpu (bool): move output to CPU, set to True if you have memory issue
124
- matmul_chunk_size (int): chunk size for large-scale matrix multiplication
121
+ chunk_size (int): chunk size for large-scale matrix multiplication
125
122
  """
126
123
  OnlineNystrom.__init__(
127
124
  self,
128
- n_components=num_eig,
125
+ n_components=n_components,
129
126
  kernel=LaplacianKernel(affinity_focal_gamma, distance, eig_solver),
130
127
  eig_solver=eig_solver,
131
- chunk_size=matmul_chunk_size,
128
+ chunk_size=chunk_size,
132
129
  )
133
130
  self.num_sample = num_sample
134
131
  self.sample_method = sample_method
@@ -140,19 +137,14 @@ class NCUT(OnlineNystrom):
140
137
  if distance in ["euclidean", "rbf"]:
141
138
  self.normalize_features = False
142
139
 
143
- self.device = device
144
140
  self.move_output_to_cpu = move_output_to_cpu
145
- self.matmul_chunk_size = matmul_chunk_size
141
+ self.chunk_size = chunk_size
146
142
 
147
143
  def _fit_helper(
148
144
  self,
149
145
  features: torch.Tensor,
150
146
  precomputed_sampled_indices: torch.Tensor,
151
147
  ) -> Tuple[torch.Tensor, torch.Tensor]:
152
- # move subgraph gpu to speed up
153
- original_device = features.device
154
- device = original_device if self.device is None else self.device
155
-
156
148
  _n = features.shape[0]
157
149
  if self.num_sample >= _n:
158
150
  logging.info(
@@ -184,13 +176,13 @@ class NCUT(OnlineNystrom):
184
176
  num_sample=self.num_sample,
185
177
  sample_method=self.sample_method,
186
178
  )
187
- sampled_features = features[sampled_indices].to(device)
179
+ sampled_features = features[sampled_indices]
188
180
  OnlineNystrom.fit(self, sampled_features)
189
181
 
190
182
  _n_not_sampled = _n - len(sampled_features)
191
183
  if _n_not_sampled > 0:
192
- unsampled_indices = torch.full((_n,), True).scatter(0, sampled_indices, False)
193
- unsampled_features = features[unsampled_indices].to(device)
184
+ unsampled_indices = torch.full((_n,), True, device=features.device).scatter_(0, sampled_indices, False)
185
+ unsampled_features = features[unsampled_indices]
194
186
  V_unsampled, _ = OnlineNystrom.update(self, unsampled_features)
195
187
  else:
196
188
  unsampled_indices = V_unsampled = None
@@ -231,7 +223,7 @@ class NCUT(OnlineNystrom):
231
223
  V_sampled, L = OnlineNystrom.transform(self)
232
224
 
233
225
  if unsampled_indices is not None:
234
- V = torch.zeros((len(unsampled_indices), self.n_components))
226
+ V = torch.zeros((len(unsampled_indices), self.n_components), device=features.device)
235
227
  V[~unsampled_indices] = V_sampled
236
228
  V[unsampled_indices] = V_unsampled
237
229
  else:
@@ -43,7 +43,7 @@ def run_subgraph_sampling(
43
43
  sampled_indices = torch.randperm(features.shape[0])[:num_sample]
44
44
  else:
45
45
  raise ValueError("sample_method should be 'farthest' or 'random'")
46
- return sampled_indices
46
+ return sampled_indices.to(features.device)
47
47
 
48
48
 
49
49
  def farthest_point_sampling(
@@ -139,7 +139,7 @@ def propagate_knn(
139
139
  knn: int = 10,
140
140
  distance: Literal["cosine", "euclidean", "rbf"] = "cosine",
141
141
  affinity_focal_gamma: float = 1.0,
142
- chunk_size: int = 8096,
142
+ chunk_size: int = 8192,
143
143
  device: str = None,
144
144
  move_output_to_cpu: bool = False,
145
145
  ):
@@ -206,7 +206,7 @@ def propagate_nearest(
206
206
  inp_features: torch.Tensor,
207
207
  subgraph_features: torch.Tensor,
208
208
  distance: Literal["cosine", "euclidean", "rbf"] = "cosine",
209
- chunk_size: int = 8096,
209
+ chunk_size: int = 8192,
210
210
  device: str = None,
211
211
  move_output_to_cpu: bool = False,
212
212
  ):
@@ -254,7 +254,7 @@ def propagate_eigenvectors(
254
254
  knn (int): number of KNN to propagate eigenvectors, default 3
255
255
  num_sample (int): number of samples for subgraph sampling, default 50000
256
256
  sample_method (str): sample method, 'farthest' (default) or 'random'
257
- chunk_size (int): chunk size for matrix multiplication, default 8096
257
+ chunk_size (int): chunk size for matrix multiplication, default 8192
258
258
  device (str): device to use for computation, if None, will not change device
259
259
  Returns:
260
260
  torch.Tensor: propagated eigenvectors, shape (n_new_samples, num_eig)
@@ -420,7 +420,7 @@ def propagate_rgb_color(
420
420
  knn: int = 10,
421
421
  num_sample: int = 1000,
422
422
  sample_method: Literal["farthest", "random"] = "farthest",
423
- chunk_size: int = 8096,
423
+ chunk_size: int = 8192,
424
424
  device: str = None,
425
425
  ):
426
426
  """Propagate RGB color to new nodes using KNN.
@@ -431,7 +431,7 @@ def propagate_rgb_color(
431
431
  knn (int): number of KNN to propagate RGB color, default 1
432
432
  num_sample (int): number of samples for subgraph sampling, default 50000
433
433
  sample_method (str): sample method, 'farthest' (default) or 'random'
434
- chunk_size (int): chunk size for matrix multiplication, default 8096
434
+ chunk_size (int): chunk size for matrix multiplication, default 8192
435
435
  device (str): device to use for computation, if None, will not change device
436
436
  Returns:
437
437
  torch.Tensor: propagated RGB color for each data sample, shape (n_new_samples, 3)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: nystrom_ncut
3
- Version: 0.0.2
3
+ Version: 0.0.3
4
4
  Summary: Normalized Cut and Nyström Approximation
5
5
  Author-email: Huzheng Yang <huze.yann@gmail.com>, Wentinn Liao <wentinn.liao@gmail.com>
6
6
  Project-URL: Documentation, https://github.com/JophiArcana/Nystrom-NCUT/
@@ -0,0 +1,11 @@
1
+ nystrom_ncut/__init__.py,sha256=Cww-_OsyQHLKpgw_Wh28_tUOvIMMr7Ey8w-tH7v99xQ,452
2
+ nystrom_ncut/common.py,sha256=qdR_JwknT9H1Cv5LopwdwZfORFx-O8MLiRI6ZF1Qohc,558
3
+ nystrom_ncut/ncut_pytorch.py,sha256=8LfznDwhq-WL_vQxbFBFLSzymg9SEDti_zzf9QQLnrA,11651
4
+ nystrom_ncut/nystrom.py,sha256=Jo-P-2vnYk8yEZinGZnN3jHMiiB5AueoaLAYK4OmRqE,8604
5
+ nystrom_ncut/propagation_utils.py,sha256=pigecB0rAmlbCoMNb8zhCyyNwh3QzkxXEnaBsDRE_ns,12136
6
+ nystrom_ncut/visualize_utils.py,sha256=oNaDz_Xn12g3knEZZTb-QWVN-wTrnCNE5gn9cu8Xl_U,18569
7
+ nystrom_ncut-0.0.3.dist-info/LICENSE,sha256=2bm9uFabQZ3Ykb_SaSU_uUbAj2-htc6WJQmS_65qD00,1073
8
+ nystrom_ncut-0.0.3.dist-info/METADATA,sha256=yh1pDFHUL2Z4WPVnuQyeHjgSEokUtpB6OPzPMxEClsM,6058
9
+ nystrom_ncut-0.0.3.dist-info/WHEEL,sha256=A3WOREP4zgxI0fKrHUG8DC8013e3dK3n7a6HDbcEIwE,91
10
+ nystrom_ncut-0.0.3.dist-info/top_level.txt,sha256=j7g_j0S048EvguFFnGgD5Ewd3r2H6klsxd5A4dd-wHw,13
11
+ nystrom_ncut-0.0.3.dist-info/RECORD,,
@@ -1,11 +0,0 @@
1
- nystrom_ncut/__init__.py,sha256=Cww-_OsyQHLKpgw_Wh28_tUOvIMMr7Ey8w-tH7v99xQ,452
2
- nystrom_ncut/common.py,sha256=qdR_JwknT9H1Cv5LopwdwZfORFx-O8MLiRI6ZF1Qohc,558
3
- nystrom_ncut/ncut_pytorch.py,sha256=Lz0aQwZMOUnxQRvf6m9-eWZG8Zha71sRikp7sDuvNHo,11980
4
- nystrom_ncut/nystrom.py,sha256=Jo-P-2vnYk8yEZinGZnN3jHMiiB5AueoaLAYK4OmRqE,8604
5
- nystrom_ncut/propagation_utils.py,sha256=rBya8WnspnvhF_sGDAprIHEmerw_93td7ddRG3lUQHA,12116
6
- nystrom_ncut/visualize_utils.py,sha256=1-eoF2FlMKJSjqHacuBiJ9IcEcvV-WJkBtKp_PoIg-0,18569
7
- nystrom_ncut-0.0.2.dist-info/LICENSE,sha256=2bm9uFabQZ3Ykb_SaSU_uUbAj2-htc6WJQmS_65qD00,1073
8
- nystrom_ncut-0.0.2.dist-info/METADATA,sha256=x040uvrRFlXh9iXvPEyNcymw2rGmkYnCOGp4eIF-pKQ,6058
9
- nystrom_ncut-0.0.2.dist-info/WHEEL,sha256=A3WOREP4zgxI0fKrHUG8DC8013e3dK3n7a6HDbcEIwE,91
10
- nystrom_ncut-0.0.2.dist-info/top_level.txt,sha256=j7g_j0S048EvguFFnGgD5Ewd3r2H6klsxd5A4dd-wHw,13
11
- nystrom_ncut-0.0.2.dist-info/RECORD,,