rumale-manifold 0.24.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml ADDED
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA256:
3
+ metadata.gz: 6c0ef66c09b01569728f5b856236aac366cd0048055edb91e26f4d206fe44033
4
+ data.tar.gz: dc5fc6ad6423e6c9d65adbbee74cb79817b94001a1c8e0753b62f3fbfd5d8ded
5
+ SHA512:
6
+ metadata.gz: 4601ea832a9583c8987a457ea7b4c9e4d9d094e041041903d05f554c8a4ec34324ed9d7c34282f9825a68eca67d5bc89e0082489362b93f173f4f944ddc896f5
7
+ data.tar.gz: 20eaeed3f214929b208e4faa04929270b6a28c0b89b835f0098ce90a8877f912a5a476fa2dcebd2f62a06228a783cd0eda55fa955982cbb70dbcfaf165cf1042
data/LICENSE.txt ADDED
@@ -0,0 +1,27 @@
1
+ Copyright (c) 2022 Atsushi Tatsuma
2
+ All rights reserved.
3
+
4
+ Redistribution and use in source and binary forms, with or without
5
+ modification, are permitted provided that the following conditions are met:
6
+
7
+ * Redistributions of source code must retain the above copyright notice, this
8
+ list of conditions and the following disclaimer.
9
+
10
+ * Redistributions in binary form must reproduce the above copyright notice,
11
+ this list of conditions and the following disclaimer in the documentation
12
+ and/or other materials provided with the distribution.
13
+
14
+ * Neither the name of the copyright holder nor the names of its
15
+ contributors may be used to endorse or promote products derived from
16
+ this software without specific prior written permission.
17
+
18
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
19
+ AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
20
+ IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
21
+ DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
22
+ FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
23
+ DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
24
+ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
25
+ CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
26
+ OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
27
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
data/README.md ADDED
@@ -0,0 +1,34 @@
1
+ # Rumale::Manifold
2
+
3
+ [![Gem Version](https://badge.fury.io/rb/rumale-manifold.svg)](https://badge.fury.io/rb/rumale-manifold)
4
+ [![BSD 3-Clause License](https://img.shields.io/badge/License-BSD%203--Clause-orange.svg)](https://gitlab.com/yoshoku/rumale/-/blob/main/rumale-manifold/LICENSE.txt)
5
+ [![Documentation](https://img.shields.io/badge/api-reference-blue.svg)](https://yoshoku.github.io/rumale/doc/Rumale/Manifold.html)
6
+
7
+ Rumale is a machine learning library in Ruby.
8
+ Rumale::Manifold provides data embedding algorithms,
9
+ such as Multi-dimensional Scaling and t-distributed Stochastic Neighbor Embedding,
10
+ with Rumale interface.
11
+
12
+ ## Installation
13
+
14
+ Add this line to your application's Gemfile:
15
+
16
+ ```ruby
17
+ gem 'rumale-manifold'
18
+ ```
19
+
20
+ And then execute:
21
+
22
+ $ bundle install
23
+
24
+ Or install it yourself as:
25
+
26
+ $ gem install rumale-manifold
27
+
28
+ ## Documentation
29
+
30
+ - [Rumale API Documentation - Manifold](https://yoshoku.github.io/rumale/doc/Rumale/Manifold.html)
31
+
32
+ ## License
33
+
34
+ The gem is available as open source under the terms of the [BSD-3-Clause License](https://opensource.org/licenses/BSD-3-Clause).
@@ -0,0 +1,152 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'rumale/base/estimator'
4
+ require 'rumale/base/transformer'
5
+ require 'rumale/utils'
6
+ require 'rumale/validation'
7
+ require 'rumale/pairwise_metric'
8
+ require 'rumale/decomposition/pca'
9
+
10
+ module Rumale
11
+ module Manifold
12
+ # MDS is a class that implements Metric Multidimensional Scaling (MDS)
13
+ # with Scaling by MAjorizing a COmplicated Function (SMACOF) algorithm.
14
+ #
15
+ # @example
16
+ # require 'rumale/manifold/mds'
17
+ #
18
+ # mds = Rumale::Manifold::MDS.new(init: 'pca', max_iter: 500, random_seed: 1)
19
+ # representations = mds.fit_transform(samples)
20
+ #
21
+ # *Reference*
22
+ # - Groenen, P J. F. and van de Velden, M., "Multidimensional Scaling by Majorization: A Review," J. of Statistical Software, Vol. 73 (8), 2016.
23
+ class MDS < ::Rumale::Base::Estimator
24
+ include ::Rumale::Base::Transformer
25
+
26
+ # Return the data in representation space.
27
+ # @return [Numo::DFloat] (shape: [n_samples, n_components])
28
+ attr_reader :embedding
29
+
30
+ # Return the stress function value after optimization.
31
+ # @return [Float]
32
+ attr_reader :stress
33
+
34
+ # Return the number of iterations run for optimization
35
+ # @return [Integer]
36
+ attr_reader :n_iter
37
+
38
+ # Return the random generator.
39
+ # @return [Random]
40
+ attr_reader :rng
41
+
42
+ # Create a new transformer with MDS.
43
+ #
44
+ # @param n_components [Integer] The number of dimensions on representation space.
45
+ # @param metric [String] The metric to calculate the distances in original space.
46
+ # If metric is 'euclidean', Euclidean distance is calculated for distance in original space.
47
+ # If metric is 'precomputed', the fit and fit_transform methods expect to be given a distance matrix.
48
+ # @param init [String] The init is a method to initialize the representaion space.
49
+ # If init is 'random', the representaion space is initialized with normal random variables.
50
+ # If init is 'pca', the result of principal component analysis as the initial value of the representation space.
51
+ # @param max_iter [Integer] The maximum number of iterations.
52
+ # @param tol [Float] The tolerance of stress value for terminating optimization.
53
+ # If tol is nil, it does not use stress value as a criterion for terminating the optimization.
54
+ # @param verbose [Boolean] The flag indicating whether to output stress value during iteration.
55
+ # @param random_seed [Integer] The seed value using to initialize the random generator.
56
+ def initialize(n_components: 2, metric: 'euclidean', init: 'random',
57
+ max_iter: 300, tol: nil, verbose: false, random_seed: nil)
58
+ super()
59
+ @params = {
60
+ n_components: n_components,
61
+ max_iter: max_iter,
62
+ tol: tol,
63
+ metric: metric,
64
+ init: init,
65
+ verbose: verbose,
66
+ random_seed: random_seed || srand
67
+ }
68
+ @rng = Random.new(@params[:random_seed])
69
+ end
70
+
71
+ # Fit the model with given training data.
72
+ #
73
+ # @overload fit(x) -> MDS
74
+ # @param x [Numo::DFloat] (shape: [n_samples, n_features]) The training data to be used for fitting the model.
75
+ # If the metric is 'precomputed', x must be a square distance matrix (shape: [n_samples, n_samples]).
76
+ # @return [MDS] The learned transformer itself.
77
+ def fit(x, _not_used = nil)
78
+ x = ::Rumale::Validation.check_convert_sample_array(x)
79
+ if @params[:metric] == 'precomputed' && x.shape[0] != x.shape[1]
80
+ raise ArgumentError, 'Expect the input distance matrix to be square.'
81
+ end
82
+
83
+ # initialize some varibales.
84
+ n_samples = x.shape[0]
85
+ hi_distance_mat = @params[:metric] == 'precomputed' ? x : ::Rumale::PairwiseMetric.euclidean_distance(x)
86
+ @embedding = init_embedding(x)
87
+ lo_distance_mat = ::Rumale::PairwiseMetric.euclidean_distance(@embedding)
88
+ @stress = calc_stress(hi_distance_mat, lo_distance_mat)
89
+ @n_iter = 0
90
+ # perform optimization.
91
+ @params[:max_iter].times do |t|
92
+ # guttman tarnsform.
93
+ ratio = hi_distance_mat / lo_distance_mat
94
+ ratio[ratio.diag_indices] = 0.0
95
+ ratio[lo_distance_mat.eq(0)] = 0.0
96
+ tmp_mat = -ratio
97
+ tmp_mat[tmp_mat.diag_indices] += ratio.sum(axis: 1)
98
+ @embedding = 1.fdiv(n_samples) * tmp_mat.dot(@embedding)
99
+ lo_distance_mat = ::Rumale::PairwiseMetric.euclidean_distance(@embedding)
100
+ # check convergence.
101
+ new_stress = calc_stress(hi_distance_mat, lo_distance_mat)
102
+ if terminate?(@stress, new_stress)
103
+ @stress = new_stress
104
+ break
105
+ end
106
+ # next step.
107
+ @n_iter = t + 1
108
+ @stress = new_stress
109
+ puts "[MDS] stress function after #{@n_iter} iterations: #{@stress}" if @params[:verbose] && (@n_iter % 100).zero?
110
+ end
111
+ self
112
+ end
113
+
114
+ # Fit the model with training data, and then transform them with the learned model.
115
+ #
116
+ # @overload fit_transform(x) -> Numo::DFloat
117
+ # @param x [Numo::DFloat] (shape: [n_samples, n_features]) The training data to be used for fitting the model.
118
+ # If the metric is 'precomputed', x must be a square distance matrix (shape: [n_samples, n_samples]).
119
+ # @return [Numo::DFloat] (shape: [n_samples, n_components]) The transformed data
120
+ def fit_transform(x, _not_used = nil)
121
+ x = ::Rumale::Validation.check_convert_sample_array(x)
122
+
123
+ fit(x)
124
+ @embedding.dup
125
+ end
126
+
127
+ private
128
+
129
+ def init_embedding(x)
130
+ if @params[:init] == 'pca' && @params[:metric] == 'euclidean'
131
+ pca = ::Rumale::Decomposition::PCA.new(n_components: @params[:n_components], random_seed: @params[:random_seed])
132
+ pca.fit_transform(x)
133
+ else
134
+ n_samples = x.shape[0]
135
+ sub_rng = @rng.dup
136
+ ::Rumale::Utils.rand_uniform([n_samples, @params[:n_components]], sub_rng) - 0.5
137
+ end
138
+ end
139
+
140
+ def terminate?(old_stress, new_stress)
141
+ return false if @params[:tol].nil?
142
+ return false if old_stress.nil?
143
+
144
+ (old_stress - new_stress).abs <= @params[:tol]
145
+ end
146
+
147
+ def calc_stress(hi_distance_mat, lo_distance_mat)
148
+ ((hi_distance_mat - lo_distance_mat)**2).sum.fdiv(2)
149
+ end
150
+ end
151
+ end
152
+ end
@@ -0,0 +1,218 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'rumale/base/estimator'
4
+ require 'rumale/base/transformer'
5
+ require 'rumale/utils'
6
+ require 'rumale/validation'
7
+ require 'rumale/pairwise_metric'
8
+ require 'rumale/decomposition/pca'
9
+
10
+ module Rumale
11
+ module Manifold
12
+ # TSNE is a class that implements t-Distributed Stochastic Neighbor Embedding (t-SNE)
13
+ # with fixed-point optimization algorithm.
14
+ # Fixed-point algorithm usually converges faster than gradient descent method and
15
+ # do not need the learning parameters such as the learning rate and momentum.
16
+ #
17
+ # @example
18
+ # require 'rumale/manifold/tsne'
19
+ #
20
+ # tsne = Rumale::Manifold::TSNE.new(perplexity: 40.0, init: 'pca', max_iter: 500, random_seed: 1)
21
+ # representations = tsne.fit_transform(samples)
22
+ #
23
+ # *Reference*
24
+ # - van der Maaten, L., and Hinton, G., "Visualizing data using t-SNE," J. of Machine Learning Research, vol. 9, pp. 2579--2605, 2008.
25
+ # - Yang, Z., King, I., Xu, Z., and Oja, E., "Heavy-Tailed Symmetric Stochastic Neighbor Embedding," Proc. NIPS'09, pp. 2169--2177, 2009.
26
+ class TSNE < ::Rumale::Base::Estimator
27
+ include ::Rumale::Base::Transformer
28
+
29
+ # Return the data in representation space.
30
+ # @return [Numo::DFloat] (shape: [n_samples, n_components])
31
+ attr_reader :embedding
32
+
33
+ # Return the Kullback-Leibler divergence after optimization.
34
+ # @return [Float]
35
+ attr_reader :kl_divergence
36
+
37
+ # Return the number of iterations run for optimization
38
+ # @return [Integer]
39
+ attr_reader :n_iter
40
+
41
+ # Return the random generator.
42
+ # @return [Random]
43
+ attr_reader :rng
44
+
45
+ # Create a new transformer with t-SNE.
46
+ #
47
+ # @param n_components [Integer] The number of dimensions on representation space.
48
+ # @param perplexity [Float] The effective number of neighbors for each point. Perplexity are typically set from 5 to 50.
49
+ # @param metric [String] The metric to calculate the distances in original space.
50
+ # If metric is 'euclidean', Euclidean distance is calculated for distance in original space.
51
+ # If metric is 'precomputed', the fit and fit_transform methods expect to be given a distance matrix.
52
+ # @param init [String] The init is a method to initialize the representaion space.
53
+ # If init is 'random', the representaion space is initialized with normal random variables.
54
+ # If init is 'pca', the result of principal component analysis as the initial value of the representation space.
55
+ # @param max_iter [Integer] The maximum number of iterations.
56
+ # @param tol [Float] The tolerance of KL-divergence for terminating optimization.
57
+ # If tol is nil, it does not use KL divergence as a criterion for terminating the optimization.
58
+ # @param verbose [Boolean] The flag indicating whether to output KL divergence during iteration.
59
+ # @param random_seed [Integer] The seed value using to initialize the random generator.
60
+ def initialize(n_components: 2, perplexity: 30.0, metric: 'euclidean', init: 'random',
61
+ max_iter: 500, tol: nil, verbose: false, random_seed: nil)
62
+ super()
63
+ @params = {
64
+ n_components: n_components,
65
+ perplexity: perplexity,
66
+ max_iter: max_iter,
67
+ tol: tol,
68
+ metric: metric,
69
+ init: init,
70
+ verbose: verbose,
71
+ random_seed: random_seed || srand
72
+ }
73
+ @rng = Random.new(@params[:random_seed])
74
+ end
75
+
76
+ # Fit the model with given training data.
77
+ #
78
+ # @overload fit(x) -> TSNE
79
+ # @param x [Numo::DFloat] (shape: [n_samples, n_features]) The training data to be used for fitting the model.
80
+ # If the metric is 'precomputed', x must be a square distance matrix (shape: [n_samples, n_samples]).
81
+ # @return [TSNE] The learned transformer itself.
82
+ def fit(x, _not_used = nil)
83
+ x = ::Rumale::Validation.check_convert_sample_array(x)
84
+ if @params[:metric] == 'precomputed' && x.shape[0] != x.shape[1]
85
+ raise ArgumentError, 'Expect the input distance matrix to be square.'
86
+ end
87
+
88
+ # initialize some varibales.
89
+ @n_iter = 0
90
+ distance_mat = @params[:metric] == 'precomputed' ? x**2 : ::Rumale::PairwiseMetric.squared_error(x)
91
+ hi_prob_mat = gaussian_distributed_probability_matrix(distance_mat)
92
+ y = init_embedding(x)
93
+ lo_prob_mat = t_distributed_probability_matrix(y)
94
+ # perform fixed-point optimization.
95
+ one_vec = Numo::DFloat.ones(x.shape[0]).expand_dims(1)
96
+ @params[:max_iter].times do |t|
97
+ break if terminate?(hi_prob_mat, lo_prob_mat)
98
+
99
+ a = hi_prob_mat * lo_prob_mat
100
+ b = lo_prob_mat**2
101
+ y = (b.dot(one_vec) * y + (a - b).dot(y)) / a.dot(one_vec)
102
+ lo_prob_mat = t_distributed_probability_matrix(y)
103
+ @n_iter = t + 1
104
+ if @params[:verbose] && (@n_iter % 100).zero?
105
+ puts "[t-SNE] KL divergence after #{@n_iter} iterations: #{cost(hi_prob_mat, lo_prob_mat)}"
106
+ end
107
+ end
108
+ # store results.
109
+ @embedding = y
110
+ @kl_divergence = cost(hi_prob_mat, lo_prob_mat)
111
+ self
112
+ end
113
+
114
+ # Fit the model with training data, and then transform them with the learned model.
115
+ #
116
+ # @overload fit_transform(x) -> Numo::DFloat
117
+ # @param x [Numo::DFloat] (shape: [n_samples, n_features]) The training data to be used for fitting the model.
118
+ # If the metric is 'precomputed', x must be a square distance matrix (shape: [n_samples, n_samples]).
119
+ # @return [Numo::DFloat] (shape: [n_samples, n_components]) The transformed data
120
+ def fit_transform(x, _not_used = nil)
121
+ x = ::Rumale::Validation.check_convert_sample_array(x)
122
+
123
+ fit(x)
124
+ @embedding.dup
125
+ end
126
+
127
+ private
128
+
129
+ def init_embedding(x)
130
+ if @params[:init] == 'pca' && @params[:metric] == 'euclidean'
131
+ pca = ::Rumale::Decomposition::PCA.new(n_components: @params[:n_components], random_seed: @params[:random_seed])
132
+ pca.fit_transform(x)
133
+ else
134
+ n_samples = x.shape[0]
135
+ sub_rng = @rng.dup
136
+ ::Rumale::Utils.rand_normal([n_samples, @params[:n_components]], sub_rng, 0, 0.0001)
137
+ end
138
+ end
139
+
140
+ def gaussian_distributed_probability_matrix(distance_mat)
141
+ # initialize some variables.
142
+ n_samples = distance_mat.shape[0]
143
+ prob_mat = Numo::DFloat.zeros(n_samples, n_samples)
144
+ sum_beta = 0.0
145
+ # calculate conditional probabilities.
146
+ n_samples.times do |n|
147
+ beta, probs = optimal_probabilities(n, distance_mat[n, true])
148
+ prob_mat[n, true] = probs
149
+ sum_beta += beta
150
+ puts "[t-SNE] Computed conditional probabilities for sample #{n + 1} / #{n_samples}" if @params[:verbose] && ((n + 1) % 1000).zero?
151
+ end
152
+ puts "[t-SNE] Mean sigma: #{Math.sqrt(n_samples.fdiv(sum_beta))}" if @params[:verbose]
153
+ # symmetrize and normalize probability matrix.
154
+ prob_mat[prob_mat.diag_indices(0)] = 0.0
155
+ prob_mat = 0.5 * (prob_mat + prob_mat.transpose)
156
+ prob_mat / prob_mat.sum
157
+ end
158
+
159
+ def optimal_probabilities(sample_id, distance_vec, max_iter = 100)
160
+ # initialize some variables.
161
+ probs = nil
162
+ beta = 1.0
163
+ betamin = Float::MIN
164
+ betamax = Float::MAX
165
+ init_entropy = Math.log(@params[:perplexity])
166
+ # calculate optimal beta and conditional probabilities with binary search.
167
+ max_iter.times do
168
+ entropy, probs = gaussian_distributed_probability_vector(sample_id, distance_vec, beta)
169
+ diff_entropy = entropy - init_entropy
170
+ break if diff_entropy.abs <= 1e-5
171
+
172
+ if diff_entropy.positive?
173
+ betamin = beta
174
+ if betamax == Float::MAX
175
+ beta *= 2.0
176
+ else
177
+ beta = 0.5 * (beta + betamax)
178
+ end
179
+ else
180
+ betamax = beta
181
+ if betamin == Float::MIN
182
+ beta /= 2.0
183
+ else
184
+ beta = 0.5 * (beta + betamin)
185
+ end
186
+ end
187
+ end
188
+ [beta, probs]
189
+ end
190
+
191
+ def gaussian_distributed_probability_vector(n, distance_vec, beta)
192
+ probs = Numo::NMath.exp(-beta * distance_vec)
193
+ probs[n] = 0.0
194
+ sum_probs = probs.sum
195
+ probs /= sum_probs
196
+ entropy = Math.log(sum_probs) + beta * (distance_vec * probs).sum
197
+ [entropy, probs]
198
+ end
199
+
200
+ def t_distributed_probability_matrix(y)
201
+ distance_mat = ::Rumale::PairwiseMetric.squared_error(y)
202
+ prob_mat = 1.0 / (1.0 + distance_mat)
203
+ prob_mat[prob_mat.diag_indices(0)] = 0.0
204
+ prob_mat / prob_mat.sum
205
+ end
206
+
207
+ def cost(p, q)
208
+ (p * Numo::NMath.log(Numo::DFloat.maximum(1e-20, p) / Numo::DFloat.maximum(1e-20, q))).sum
209
+ end
210
+
211
+ def terminate?(p, q)
212
+ return false if @params[:tol].nil?
213
+
214
+ cost(p, q) <= @params[:tol]
215
+ end
216
+ end
217
+ end
218
+ end
@@ -0,0 +1,10 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Rumale is a machine learning library in Ruby.
4
+ module Rumale
5
+ # Module for data embedding algorithms.
6
+ module Manifold
7
+ # @!visibility private
8
+ VERSION = '0.24.0'
9
+ end
10
+ end
@@ -0,0 +1,7 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'numo/narray'
4
+
5
+ require_relative 'manifold/mds'
6
+ require_relative 'manifold/tsne'
7
+ require_relative 'manifold/version'
metadata ADDED
@@ -0,0 +1,98 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: rumale-manifold
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.24.0
5
+ platform: ruby
6
+ authors:
7
+ - yoshoku
8
+ autorequire:
9
+ bindir: exe
10
+ cert_chain: []
11
+ date: 2022-12-31 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ name: numo-narray
15
+ requirement: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - ">="
18
+ - !ruby/object:Gem::Version
19
+ version: 0.9.1
20
+ type: :runtime
21
+ prerelease: false
22
+ version_requirements: !ruby/object:Gem::Requirement
23
+ requirements:
24
+ - - ">="
25
+ - !ruby/object:Gem::Version
26
+ version: 0.9.1
27
+ - !ruby/object:Gem::Dependency
28
+ name: rumale-core
29
+ requirement: !ruby/object:Gem::Requirement
30
+ requirements:
31
+ - - "~>"
32
+ - !ruby/object:Gem::Version
33
+ version: 0.24.0
34
+ type: :runtime
35
+ prerelease: false
36
+ version_requirements: !ruby/object:Gem::Requirement
37
+ requirements:
38
+ - - "~>"
39
+ - !ruby/object:Gem::Version
40
+ version: 0.24.0
41
+ - !ruby/object:Gem::Dependency
42
+ name: rumale-decomposition
43
+ requirement: !ruby/object:Gem::Requirement
44
+ requirements:
45
+ - - "~>"
46
+ - !ruby/object:Gem::Version
47
+ version: 0.24.0
48
+ type: :runtime
49
+ prerelease: false
50
+ version_requirements: !ruby/object:Gem::Requirement
51
+ requirements:
52
+ - - "~>"
53
+ - !ruby/object:Gem::Version
54
+ version: 0.24.0
55
+ description: |
56
+ Rumale::Manifold provides data embedding algorithms,
57
+ such as Multi-dimensional Scaling and t-distributed Stochastic Neighbor Embedding,
58
+ with Rumale interface.
59
+ email:
60
+ - yoshoku@outlook.com
61
+ executables: []
62
+ extensions: []
63
+ extra_rdoc_files: []
64
+ files:
65
+ - LICENSE.txt
66
+ - README.md
67
+ - lib/rumale/manifold.rb
68
+ - lib/rumale/manifold/mds.rb
69
+ - lib/rumale/manifold/tsne.rb
70
+ - lib/rumale/manifold/version.rb
71
+ homepage: https://gitlab.com/yoshoku/rumale
72
+ licenses:
73
+ - BSD-3-Clause
74
+ metadata:
75
+ homepage_uri: https://gitlab.com/yoshoku/rumale
76
+ source_code_uri: https://gitlab.com/yoshoku/rumale/-/tree/main/rumale-manifold
77
+ changelog_uri: https://gitlab.com/yoshoku/rumale/-/blob/main/CHANGELOG.md
78
+ rubygems_mfa_required: 'true'
79
+ post_install_message:
80
+ rdoc_options: []
81
+ require_paths:
82
+ - lib
83
+ required_ruby_version: !ruby/object:Gem::Requirement
84
+ requirements:
85
+ - - ">="
86
+ - !ruby/object:Gem::Version
87
+ version: '0'
88
+ required_rubygems_version: !ruby/object:Gem::Requirement
89
+ requirements:
90
+ - - ">="
91
+ - !ruby/object:Gem::Version
92
+ version: '0'
93
+ requirements: []
94
+ rubygems_version: 3.3.26
95
+ signing_key:
96
+ specification_version: 4
97
+ summary: Rumale::Manifold provides data embedding algorithms with Rumale interface.
98
+ test_files: []