artlib 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (32) hide show
  1. artlib-0.1.0/LICENSE +21 -0
  2. artlib-0.1.0/PKG-INFO +119 -0
  3. artlib-0.1.0/README.md +89 -0
  4. artlib-0.1.0/artlib/__init__.py +47 -0
  5. artlib-0.1.0/artlib/biclustering/BARTMAP.py +344 -0
  6. artlib-0.1.0/artlib/biclustering/__init__.py +0 -0
  7. artlib-0.1.0/artlib/common/BaseART.py +473 -0
  8. artlib-0.1.0/artlib/common/BaseARTMAP.py +160 -0
  9. artlib-0.1.0/artlib/common/__init__.py +0 -0
  10. artlib-0.1.0/artlib/common/utils.py +65 -0
  11. artlib-0.1.0/artlib/common/visualization.py +131 -0
  12. artlib-0.1.0/artlib/elementary/ART1.py +150 -0
  13. artlib-0.1.0/artlib/elementary/ART2.py +165 -0
  14. artlib-0.1.0/artlib/elementary/BayesianART.py +202 -0
  15. artlib-0.1.0/artlib/elementary/DualVigilanceART.py +216 -0
  16. artlib-0.1.0/artlib/elementary/EllipsoidART.py +228 -0
  17. artlib-0.1.0/artlib/elementary/FuzzyART.py +227 -0
  18. artlib-0.1.0/artlib/elementary/GaussianART.py +168 -0
  19. artlib-0.1.0/artlib/elementary/HypersphereART.py +195 -0
  20. artlib-0.1.0/artlib/elementary/QuadraticNeuronART.py +197 -0
  21. artlib-0.1.0/artlib/elementary/__init__.py +0 -0
  22. artlib-0.1.0/artlib/fusion/FusionART.py +262 -0
  23. artlib-0.1.0/artlib/fusion/__init__.py +0 -0
  24. artlib-0.1.0/artlib/hierarchical/DeepARTMAP.py +235 -0
  25. artlib-0.1.0/artlib/hierarchical/SMART.py +106 -0
  26. artlib-0.1.0/artlib/hierarchical/__init__.py +0 -0
  27. artlib-0.1.0/artlib/supervised/ARTMAP.py +135 -0
  28. artlib-0.1.0/artlib/supervised/SimpleARTMAP.py +282 -0
  29. artlib-0.1.0/artlib/supervised/__init__.py +0 -0
  30. artlib-0.1.0/artlib/topological/TopoART.py +282 -0
  31. artlib-0.1.0/artlib/topological/__init__.py +0 -0
  32. artlib-0.1.0/pyproject.toml +39 -0
artlib-0.1.0/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) [2024] [Niklas M. Melton]
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
artlib-0.1.0/PKG-INFO ADDED
@@ -0,0 +1,119 @@
1
+ Metadata-Version: 2.1
2
+ Name: artlib
3
+ Version: 0.1.0
4
+ Summary: A Python library for Adaptive Resonance Theory (ART) algorithms.
5
+ Home-page: https://github.com/NiklasMelton/AdaptiveResonanceLib
6
+ License: MIT
7
+ Keywords: adaptive resonance theory,ART,machine learning,neural networks,clustering
8
+ Author: Niklas M. Melton
9
+ Author-email: niklasmelton@gmail.com
10
+ Requires-Python: >=3.9,<4.0
11
+ Classifier: Development Status :: 3 - Alpha
12
+ Classifier: Intended Audience :: Developers
13
+ Classifier: Intended Audience :: Science/Research
14
+ Classifier: License :: OSI Approved :: MIT License
15
+ Classifier: Operating System :: OS Independent
16
+ Classifier: Programming Language :: Python :: 3
17
+ Classifier: Programming Language :: Python :: 3.9
18
+ Classifier: Programming Language :: Python :: 3.10
19
+ Classifier: Programming Language :: Python :: 3.11
20
+ Classifier: Programming Language :: Python :: 3.12
21
+ Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
22
+ Requires-Dist: matplotlib (>=3.3.3)
23
+ Requires-Dist: numpy (>=1.19.4)
24
+ Requires-Dist: scikit-learn (>=1.0)
25
+ Requires-Dist: scipy (>=1.5.4)
26
+ Project-URL: Documentation, https://github.com/NiklasMelton/AdaptiveResonanceLib
27
+ Project-URL: Repository, https://github.com/NiklasMelton/AdaptiveResonanceLib
28
+ Description-Content-Type: text/markdown
29
+
30
+
31
+ # AdaptiveResonanceLib
32
+
33
+ Welcome to AdaptiveResonanceLib, a comprehensive and modular Python library for Adaptive Resonance Theory (ART) algorithms. Based on scikit-learn, our library offers a wide range of ART models designed for both researchers and practitioners in the field of machine learning and neural networks. Whether you're working on classification, clustering, or pattern recognition, AdaptiveResonanceLib provides the tools you need to implement ART algorithms efficiently and effectively.
34
+
35
+ ## Available Models
36
+
37
+ AdaptiveResonanceLib includes implementations for the following ART models:
38
+
39
+ - ART1
40
+ - ART2
41
+ - Bayesian ART
42
+ - Gaussian ART
43
+ - Hypersphere ART
44
+ - Ellipsoidal ART
45
+ - Fuzzy ART
46
+ - Quadratic Neuron ART
47
+ - Dual Vigilance ART
48
+ - Topo ART
49
+ - Simple ARTMAP
50
+ - ARTMAP
51
+ - DeepARTMAP
52
+ - SMART
53
+ - Fusion ART
54
+ - Biclustering ARTMAP
55
+
56
+ ## Installation
57
+
58
+ To install AdaptiveResonanceLib, simply use pip:
59
+
60
+ [comment]: <> (```bash)
61
+
62
+ [comment]: <> (pip install AdaptiveResonanceLib)
63
+
64
+ [comment]: <> (```)
65
+
66
+ ```bash
67
+ pip install artlib
68
+ ```
69
+
70
+ Ensure you have Python 3.9 or newer installed.
71
+
72
+ ## Quick Start
73
+
74
+ Here's a quick example of how to use AdaptiveResonanceLib with the Fuzzy ART model:
75
+
76
+ ```python
77
+ from artlib import FuzzyART
78
+ import numpy as np
79
+
80
+ # Your dataset
81
+ train_X = np.array([...])
82
+ test_X = np.array([...])
83
+
84
+ # Initialize the Fuzzy ART model
85
+ model = FuzzyART(rho=0.7, alpha = 0.0, beta=1.0)
86
+
87
+ # Fit the model
88
+ model.fit(train_X)
89
+
90
+ # Predict new data points
91
+ predictions = model.predict(test_X)
92
+ ```
93
+
94
+ Replace `params` with the parameters appropriate for your use case.
95
+
96
+ ## Documentation
97
+
98
+ For more detailed documentation, including the full list of parameters for each model, visit our [documentation page](https://github.com/NiklasMelton/AdaptiveResonanceLib).
99
+
100
+ ## Examples
101
+
102
+ For examples of how to use each model in AdaptiveResonanceLib, check out the `/examples` directory in our repository.
103
+
104
+ ## Contributing
105
+
106
+ We welcome contributions to AdaptiveResonanceLib! If you have suggestions for improvements, or if you'd like to add more ART models, please see our `CONTRIBUTING.md` file for guidelines on how to contribute.
107
+
108
+ You can also join our [Discord server](https://discord.gg/45FjtRCt) and participate directly in the discussion.
109
+
110
+ ## License
111
+
112
+ AdaptiveResonanceLib is open source and available under the MIT license. See the `LICENSE` file for more info.
113
+
114
+ ## Contact
115
+
116
+ For questions and support, please open an issue in the GitHub issue tracker or message us on our [Discord server](https://discord.gg/45FjtRCt). We'll do our best to assist you.
117
+
118
+ Happy Modeling with AdaptiveResonanceLib!
119
+
artlib-0.1.0/README.md ADDED
@@ -0,0 +1,89 @@
1
+
2
+ # AdaptiveResonanceLib
3
+
4
+ Welcome to AdaptiveResonanceLib, a comprehensive and modular Python library for Adaptive Resonance Theory (ART) algorithms. Based on scikit-learn, our library offers a wide range of ART models designed for both researchers and practitioners in the field of machine learning and neural networks. Whether you're working on classification, clustering, or pattern recognition, AdaptiveResonanceLib provides the tools you need to implement ART algorithms efficiently and effectively.
5
+
6
+ ## Available Models
7
+
8
+ AdaptiveResonanceLib includes implementations for the following ART models:
9
+
10
+ - ART1
11
+ - ART2
12
+ - Bayesian ART
13
+ - Gaussian ART
14
+ - Hypersphere ART
15
+ - Ellipsoidal ART
16
+ - Fuzzy ART
17
+ - Quadratic Neuron ART
18
+ - Dual Vigilance ART
19
+ - Topo ART
20
+ - Simple ARTMAP
21
+ - ARTMAP
22
+ - DeepARTMAP
23
+ - SMART
24
+ - Fusion ART
25
+ - Biclustering ARTMAP
26
+
27
+ ## Installation
28
+
29
+ To install AdaptiveResonanceLib, simply use pip:
30
+
31
+ [comment]: <> (```bash)
32
+
33
+ [comment]: <> (pip install AdaptiveResonanceLib)
34
+
35
+ [comment]: <> (```)
36
+
37
+ ```bash
38
+ pip install artlib
39
+ ```
40
+
41
+ Ensure you have Python 3.9 or newer installed.
42
+
43
+ ## Quick Start
44
+
45
+ Here's a quick example of how to use AdaptiveResonanceLib with the Fuzzy ART model:
46
+
47
+ ```python
48
+ from artlib import FuzzyART
49
+ import numpy as np
50
+
51
+ # Your dataset
52
+ train_X = np.array([...])
53
+ test_X = np.array([...])
54
+
55
+ # Initialize the Fuzzy ART model
56
+ model = FuzzyART(rho=0.7, alpha = 0.0, beta=1.0)
57
+
58
+ # Fit the model
59
+ model.fit(train_X)
60
+
61
+ # Predict new data points
62
+ predictions = model.predict(test_X)
63
+ ```
64
+
65
+ Replace `params` with the parameters appropriate for your use case.
66
+
67
+ ## Documentation
68
+
69
+ For more detailed documentation, including the full list of parameters for each model, visit our [documentation page](https://github.com/NiklasMelton/AdaptiveResonanceLib).
70
+
71
+ ## Examples
72
+
73
+ For examples of how to use each model in AdaptiveResonanceLib, check out the `/examples` directory in our repository.
74
+
75
+ ## Contributing
76
+
77
+ We welcome contributions to AdaptiveResonanceLib! If you have suggestions for improvements, or if you'd like to add more ART models, please see our `CONTRIBUTING.md` file for guidelines on how to contribute.
78
+
79
+ You can also join our [Discord server](https://discord.gg/45FjtRCt) and participate directly in the discussion.
80
+
81
+ ## License
82
+
83
+ AdaptiveResonanceLib is open source and available under the MIT license. See the `LICENSE` file for more info.
84
+
85
+ ## Contact
86
+
87
+ For questions and support, please open an issue in the GitHub issue tracker or message us on our [Discord server](https://discord.gg/45FjtRCt). We'll do our best to assist you.
88
+
89
+ Happy Modeling with AdaptiveResonanceLib!
@@ -0,0 +1,47 @@
1
+ from artlib.common.BaseART import BaseART
2
+ from artlib.common.BaseARTMAP import BaseARTMAP
3
+ from artlib.common.utils import normalize, compliment_code
4
+
5
+ from artlib.elementary.ART1 import ART1
6
+ from artlib.elementary.ART2 import ART2A
7
+ from artlib.elementary.BayesianART import BayesianART
8
+ from artlib.elementary.DualVigilanceART import DualVigilanceART
9
+ from artlib.elementary.EllipsoidART import EllipsoidART
10
+ from artlib.elementary.GaussianART import GaussianART
11
+ from artlib.elementary.FuzzyART import FuzzyART
12
+ from artlib.elementary.HypersphereART import HypersphereART
13
+ from artlib.elementary.QuadraticNeuronART import QuadraticNeuronART
14
+
15
+ from artlib.supervised.ARTMAP import ARTMAP, SimpleARTMAP
16
+
17
+ from artlib.hierarchical.SMART import SMART
18
+ from artlib.hierarchical.DeepARTMAP import DeepARTMAP
19
+
20
+ from artlib.fusion.FusionART import FusionART
21
+
22
+ from artlib.biclustering.BARTMAP import BARTMAP
23
+
24
+ from artlib.topological.TopoART import TopoART
25
+
26
+ __all__ = [
27
+ "BaseART",
28
+ "BaseARTMAP",
29
+ "normalize",
30
+ "compliment_code",
31
+ "ART1",
32
+ "ART2A",
33
+ "BayesianART",
34
+ "GaussianART",
35
+ "EllipsoidART",
36
+ "HypersphereART",
37
+ "QuadraticNeuronART",
38
+ "FuzzyART",
39
+ "TopoART",
40
+ "DualVigilanceART",
41
+ "ARTMAP",
42
+ "SimpleARTMAP",
43
+ "DeepARTMAP",
44
+ "SMART",
45
+ "FusionART",
46
+ "BARTMAP",
47
+ ]
@@ -0,0 +1,344 @@
1
+ """
2
+ Xu, R., & Wunsch II, D. C. (2011).
3
+ BARTMAP: A viable structure for biclustering.
4
+ Neural Networks, 24, 709–716. doi:10.1016/j.neunet.2011.03.020.
5
+
6
+ Xu, R., Wunsch II, D. C., & Kim, S. (2012).
7
+ Methods and systems for biclustering algorithm.
8
+ U.S. Patent 9,043,326 Filed January 28, 2012,
9
+ claiming priority to Provisional U.S. Patent Application,
10
+ January 28, 2011, issued May 26, 2015.
11
+ """
12
+
13
+ import numpy as np
14
+ from typing import Optional
15
+ from collections import defaultdict
16
+ from matplotlib.colors import Colormap
17
+ from artlib.common.BaseART import BaseART
18
+ from sklearn.base import BaseEstimator, BiclusterMixin
19
+ from scipy.stats import pearsonr
20
+
21
+ class BARTMAP(BaseEstimator, BiclusterMixin):
22
+ rows_: np.ndarray #bool
23
+ columns_: np.ndarray #bool
24
+
25
+ def __init__(self, module_a: BaseART, module_b: BaseART, eta: float):
26
+ """
27
+
28
+ Parameters:
29
+ - module_a: a-side ART module
30
+ - module_b: b-side ART module
31
+ - eta: minimum correlation
32
+
33
+ """
34
+ params: dict = {"eta": eta}
35
+ self.validate_params(params)
36
+ self.params = params
37
+ self.module_a = module_a
38
+ self.module_b = module_b
39
+
40
+ def __getattr__(self, key):
41
+ if key in self.params:
42
+ return self.params[key]
43
+ else:
44
+ # If the key is not in params, raise an AttributeError
45
+ raise AttributeError(f"'{type(self).__name__}' object has no attribute '{key}'")
46
+
47
+ def __setattr__(self, key, value):
48
+ if key in self.__dict__.get('params', {}):
49
+ # If key is in params, set its value
50
+ self.params[key] = value
51
+ else:
52
+ # Otherwise, proceed with normal attribute setting
53
+ super().__setattr__(key, value)
54
+
55
+ def get_params(self, deep: bool = True) -> dict:
56
+ """
57
+
58
+ Parameters:
59
+ - deep: If True, will return the parameters for this class and contained subobjects that are estimators.
60
+
61
+ Returns:
62
+ Parameter names mapped to their values.
63
+
64
+ """
65
+ out = self.params
66
+
67
+ deep_a_items = self.module_a.get_params().items()
68
+ out.update(("module_a" + "__" + k, val) for k, val in deep_a_items)
69
+ out["module_a"] = self.module_a
70
+
71
+ deep_b_items = self.module_b.get_params().items()
72
+ out.update(("module_b" + "__" + k, val) for k, val in deep_b_items)
73
+ out["module_b"] = self.module_b
74
+ return out
75
+
76
+ def set_params(self, **params):
77
+ """Set the parameters of this estimator.
78
+
79
+ Specific redefinition of sklearn.BaseEstimator.set_params for ART classes
80
+
81
+ Parameters:
82
+ - **params : Estimator parameters.
83
+
84
+ Returns:
85
+ - self : estimator instance
86
+ """
87
+
88
+ if not params:
89
+ # Simple optimization to gain speed (inspect is slow)
90
+ return self
91
+ valid_params = self.get_params(deep=True)
92
+ local_params = dict()
93
+
94
+ nested_params = defaultdict(dict) # grouped by prefix
95
+ for key, value in params.items():
96
+ key, delim, sub_key = key.partition("__")
97
+ if key not in valid_params:
98
+ local_valid_params = list(valid_params.keys())
99
+ raise ValueError(
100
+ f"Invalid parameter {key!r} for estimator {self}. "
101
+ f"Valid parameters are: {local_valid_params!r}."
102
+ )
103
+
104
+ if delim:
105
+ nested_params[key][sub_key] = value
106
+ else:
107
+ setattr(self, key, value)
108
+ valid_params[key] = value
109
+ local_params[key] = value
110
+
111
+ for key, sub_params in nested_params.items():
112
+ valid_params[key].set_params(**sub_params)
113
+ self.validate_params(local_params)
114
+ return self
115
+
116
+ @staticmethod
117
+ def validate_params(params):
118
+ """
119
+ validate clustering parameters
120
+
121
+ Parameters:
122
+ - params: dict containing parameters for the algorithm
123
+
124
+ """
125
+ assert "eta" in params
126
+ assert isinstance(params["eta"], float)
127
+
128
+ @property
129
+ def column_labels_(self):
130
+ return self.module_b.labels_
131
+
132
+ @property
133
+ def row_labels_(self):
134
+ return self.module_a.labels_
135
+
136
+ @property
137
+ def n_row_clusters(self):
138
+ return self.module_a.n_clusters
139
+
140
+ @property
141
+ def n_column_clusters(self):
142
+ return self.module_b.n_clusters
143
+
144
+ def _get_x_cb(self, x: np.ndarray, c_b: int):
145
+ """
146
+ get the components of a vector belonging to a b-side cluster
147
+
148
+ Parameters:
149
+ - x: a sample vector
150
+ - c_b: b-side cluster label
151
+
152
+ Returns:
153
+ x filtered to features belonging to the b-side cluster c_b
154
+
155
+ """
156
+ b_components = self.module_b.labels_ == c_b
157
+ return x[b_components]
158
+
159
+ @staticmethod
160
+ def _pearsonr(a: np.ndarray, b: np.ndarray):
161
+ """
162
+ get the correlation between two vectors
163
+
164
+ Parameters:
165
+ - a: some vector
166
+ - b: some vector
167
+
168
+ Returns:
169
+ Pearson correlation
170
+
171
+ """
172
+ r, _ = pearsonr(a, b)
173
+ return r
174
+
175
+ def _average_pearson_corr(self, X: np.ndarray, k: int, c_b: int) -> float:
176
+ """
177
+ get the average correlation between for a sample for all features in cluster b
178
+
179
+ Parameters:
180
+ - X: data set A
181
+ - k: sample index
182
+ - c_b: b-side cluster to check
183
+
184
+ """
185
+ X_a = X[self.column_labels_ == c_b, :]
186
+ if len(X_a) == 0:
187
+ raise ValueError("HERE")
188
+ X_k_cb = self._get_x_cb(X[k,:], c_b)
189
+ mean_r = np.mean(
190
+ [
191
+ self._pearsonr(X_k_cb, self._get_x_cb(x_a_l, c_b))
192
+ for x_a_l in X_a
193
+ ]
194
+ )
195
+
196
+ return float(mean_r)
197
+
198
+ def validate_data(self, X_a: np.ndarray, X_b: np.ndarray):
199
+ """
200
+ validates the data prior to clustering
201
+
202
+ Parameters:
203
+ - X: data set A
204
+ - y: data set B
205
+
206
+ """
207
+ self.module_a.validate_data(X_a)
208
+ self.module_b.validate_data(X_b)
209
+
210
+ def match_criterion_bin(self, X: np.ndarray, k: int, c_b: int, params: dict) -> bool:
211
+ """
212
+ get the binary match criterion of the cluster
213
+
214
+ Parameters:
215
+ - X: data set
216
+ - k: sample index
217
+ - c_b: b-side cluster to check
218
+ - params: dict containing parameters for the algorithm
219
+
220
+ Returns:
221
+ cluster match criterion binary
222
+
223
+ """
224
+ M = self._average_pearson_corr(X, k, c_b)
225
+ return M >= self.params["eta"]
226
+
227
+ def match_reset_func(
228
+ self,
229
+ i: np.ndarray,
230
+ w: np.ndarray,
231
+ cluster_a,
232
+ params: dict,
233
+ extra: dict,
234
+ cache: Optional[dict] = None
235
+ ) -> bool:
236
+ """
237
+ Permits external factors to influence cluster creation.
238
+
239
+ Parameters:
240
+ - i: data sample
241
+ - w: cluster weight / info
242
+ - cluster_a: a-side cluster label
243
+ - params: dict containing parameters for the algorithm
244
+ - extra: additional parameters for the algorithm
245
+ - cache: dict containing values cached from previous calculations
246
+
247
+ Returns:
248
+ true if match is permitted
249
+
250
+ """
251
+ k = extra["k"]
252
+ for cluster_b in range(len(self.module_b.W)):
253
+ if self.match_criterion_bin(self.X, k, cluster_b, params):
254
+ return True
255
+ return False
256
+
257
+ def step_fit(self, X: np.ndarray, k: int) -> int:
258
+ """
259
+ fit the model to a single sample
260
+
261
+ Parameters:
262
+ - X: data set
263
+ - k: sample index
264
+
265
+ Returns:
266
+ cluster label of the input sample
267
+
268
+ """
269
+ match_reset_func = lambda i, w, cluster, params, cache: self.match_reset_func(
270
+ i, w, cluster, params=params, extra={"k": k}, cache=cache
271
+ )
272
+ c_a = self.module_a.step_fit(X[k, :], match_reset_func=match_reset_func)
273
+ return c_a
274
+
275
+ def fit(self, X: np.ndarray, max_iter=1):
276
+ """
277
+ Fit the model to the data
278
+
279
+ Parameters:
280
+ - X: data set
281
+ - max_iter: number of iterations to fit the model on the same data set
282
+
283
+ """
284
+ # Check that X and y have correct shape
285
+ self.X = X
286
+
287
+ n = X.shape[0]
288
+ X_a = self.module_b.prepare_data(X)
289
+ X_b = self.module_b.prepare_data(X.T)
290
+ self.validate_data(X_a, X_b)
291
+
292
+
293
+ self.module_b = self.module_b.fit(X_b, max_iter=max_iter)
294
+
295
+ # init module A
296
+ self.module_a.W = []
297
+ self.module_a.labels_ = np.zeros((X.shape[0],), dtype=int)
298
+
299
+ for _ in range(max_iter):
300
+ for k in range(n):
301
+ print(k, self.module_a.n_clusters)
302
+ self.module_a.pre_step_fit(X)
303
+ c_a = self.step_fit(X_a, k)
304
+ self.module_a.labels_[k] = c_a
305
+ self.module_a.post_step_fit(X)
306
+
307
+ self.rows_ = np.vstack(
308
+ [
309
+ self.row_labels_ == label
310
+ for label in range(self.module_a.n_clusters)
311
+ for _ in range(self.module_b.n_clusters)
312
+ ]
313
+ )
314
+ self.columns_ = np.vstack(
315
+ [
316
+ self.column_labels_ == label
317
+ for _ in range(self.module_a.n_clusters)
318
+ for label in range(self.module_b.n_clusters)
319
+ ]
320
+ )
321
+ return self
322
+
323
+
324
+ def visualize(
325
+ self,
326
+ cmap: Optional[Colormap] = None
327
+ ):
328
+ """
329
+ Visualize the clustering of the data
330
+
331
+ Parameters:
332
+ - cmap: some colormap
333
+
334
+ """
335
+ import matplotlib.pyplot as plt
336
+
337
+ if cmap is None:
338
+ from matplotlib.pyplot import cm
339
+ cmap=plt.cm.Blues
340
+
341
+ plt.matshow(
342
+ np.outer(np.sort(self.row_labels_) + 1, np.sort(self.column_labels_) + 1),
343
+ cmap=cmap,
344
+ )
File without changes