rumale-ensemble 0.24.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,168 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'rumale/validation'
4
+ require 'rumale/base/estimator'
5
+ require 'rumale/base/regressor'
6
+ require 'rumale/linear_model/ridge'
7
+ require 'rumale/model_selection/k_fold'
8
+
9
+ module Rumale
10
+ module Ensemble
11
+ # StackingRegressor is a class that implements regressor with stacking method.
12
+ #
13
+ # @example
14
+ # require 'rumale/ensemble/stacking_regressor'
15
+ #
16
+ # estimators = {
17
+ # las: Rumale::LinearModel::Lasso.new(reg_param: 1e-2, random_seed: 1),
18
+ # mlp: Rumale::NeuralNetwork::MLPRegressor.new(hidden_units: [256], random_seed: 1),
19
+ # rnd: Rumale::Ensemble::RandomForestRegressor.new(random_seed: 1)
20
+ # }
21
+ # meta_estimator = Rumale::LinearModel::Ridge.new(random_seed: 1)
22
+ # regressor = Rumale::Ensemble::StackedRegressor.new(
23
+ # estimators: estimators, meta_estimator: meta_estimator, random_seed: 1
24
+ # )
25
+ # regressor.fit(training_samples, training_values)
26
+ # results = regressor.predict(testing_samples)
27
+ #
28
+ # *Reference*
29
+ # - Zhou, Z-H., "Ensemble Methods - Foundations and Algorithms," CRC Press Taylor and Francis Group, Chapman and Hall/CRC, 2012.
30
+ class StackingRegressor < ::Rumale::Base::Estimator
31
+ include ::Rumale::Base::Regressor
32
+
33
+ # Return the base regressors.
34
+ # @return [Hash<Symbol,Regressor>]
35
+ attr_reader :estimators
36
+
37
+ # Return the meta regressor.
38
+ # @return [Regressor]
39
+ attr_reader :meta_estimator
40
+
41
+ # Create a new regressor with stacking method.
42
+ #
43
+ # @param estimators [Hash<Symbol,Regressor>] The base regressors for extracting meta features.
44
+ # @param meta_estimator [Regressor/Nil] The meta regressor that predicts values.
45
+ # If nil is given, Ridge is used.
46
+ # @param n_splits [Integer] The number of folds for cross validation with k-fold on meta feature extraction in training phase.
47
+ # @param shuffle [Boolean] The flag indicating whether to shuffle the dataset on cross validation.
48
+ # @param passthrough [Boolean] The flag indicating whether to concatenate the original features and meta features when training the meta regressor.
49
+ # @param random_seed [Integer/Nil] The seed value using to initialize the random generator on cross validation.
50
+ def initialize(estimators:, meta_estimator: nil, n_splits: 5, shuffle: true, passthrough: false, random_seed: nil)
51
+ super()
52
+ @estimators = estimators
53
+ @meta_estimator = meta_estimator || ::Rumale::LinearModel::Ridge.new
54
+ @params = {
55
+ n_splits: n_splits,
56
+ shuffle: shuffle,
57
+ passthrough: passthrough,
58
+ random_seed: random_seed || srand
59
+ }
60
+ end
61
+
62
+ # Fit the model with given training data.
63
+ #
64
+ # @param x [Numo::DFloat] (shape: [n_samples, n_features]) The training data to be used for fitting the model.
65
+ # @param y [Numo::DFloat] (shape: [n_samples, n_outputs]) The target variables to be used for fitting the model.
66
+ # @return [StackedRegressor] The learned regressor itself.
67
+ def fit(x, y)
68
+ x = ::Rumale::Validation.check_convert_sample_array(x)
69
+ y = ::Rumale::Validation.check_convert_target_value_array(y)
70
+ ::Rumale::Validation.check_sample_size(x, y)
71
+
72
+ n_samples, n_features = x.shape
73
+ n_outputs = y.ndim == 1 ? 1 : y.shape[1]
74
+
75
+ # training base regressors with all training data.
76
+ @estimators.each_key { |name| @estimators[name].fit(x, y) }
77
+
78
+ # detecting size of output for each base regressor.
79
+ @output_size = detect_output_size(n_features)
80
+
81
+ # extracting meta features with base regressors.
82
+ n_components = @output_size.values.sum
83
+ z = Numo::DFloat.zeros(n_samples, n_components)
84
+
85
+ kf = ::Rumale::ModelSelection::KFold.new(
86
+ n_splits: @params[:n_splits], shuffle: @params[:shuffle], random_seed: @params[:random_seed]
87
+ )
88
+
89
+ kf.split(x, y).each do |train_ids, valid_ids|
90
+ x_train = x[train_ids, true]
91
+ y_train = n_outputs == 1 ? y[train_ids] : y[train_ids, true]
92
+ x_valid = x[valid_ids, true]
93
+ f_start = 0
94
+ @estimators.each_key do |name|
95
+ est_fold = Marshal.load(Marshal.dump(@estimators[name]))
96
+ f_last = f_start + @output_size[name]
97
+ f_position = @output_size[name] == 1 ? f_start : f_start...f_last
98
+ z[valid_ids, f_position] = est_fold.fit(x_train, y_train).predict(x_valid)
99
+ f_start = f_last
100
+ end
101
+ end
102
+
103
+ # concatenating original features.
104
+ z = Numo::NArray.hstack([z, x]) if @params[:passthrough]
105
+
106
+ # training meta regressor.
107
+ @meta_estimator.fit(z, y)
108
+
109
+ self
110
+ end
111
+
112
+ # Predict values for samples.
113
+ #
114
+ # @param x [Numo::DFloat] (shape: [n_samples, n_features]) The samples to predict the values.
115
+ # @return [Numo::DFloat] (shape: [n_samples, n_outputs]) The predicted values per sample.
116
+ def predict(x)
117
+ x = ::Rumale::Validation.check_convert_sample_array(x)
118
+
119
+ z = transform(x)
120
+ @meta_estimator.predict(z)
121
+ end
122
+
123
+ # Transform the given data with the learned model.
124
+ #
125
+ # @param x [Numo::DFloat] (shape: [n_samples, n_features]) The samples to be transformed with the learned model.
126
+ # @return [Numo::DFloat] (shape: [n_samples, n_components]) The meta features for samples.
127
+ def transform(x)
128
+ x = ::Rumale::Validation.check_convert_sample_array(x)
129
+
130
+ n_samples = x.shape[0]
131
+ n_components = @output_size.values.sum
132
+ z = Numo::DFloat.zeros(n_samples, n_components)
133
+ f_start = 0
134
+ @estimators.each_key do |name|
135
+ f_last = f_start + @output_size[name]
136
+ f_position = @output_size[name] == 1 ? f_start : f_start...f_last
137
+ z[true, f_position] = @estimators[name].predict(x)
138
+ f_start = f_last
139
+ end
140
+ z = Numo::NArray.hstack([z, x]) if @params[:passthrough]
141
+ z
142
+ end
143
+
144
+ # Fit the model with training data, and then transform them with the learned model.
145
+ #
146
+ # @param x [Numo::DFloat] (shape: [n_samples, n_features]) The training data to be used for fitting the model.
147
+ # @param y [Numo::DFloat] (shape: [n_samples, n_outputs]) The target variables to be used for fitting the model.
148
+ # @return [Numo::DFloat] (shape: [n_samples, n_components]) The meta features for training data.
149
+ def fit_transform(x, y)
150
+ x = ::Rumale::Validation.check_convert_sample_array(x)
151
+ y = ::Rumale::Validation.check_convert_target_value_array(y)
152
+ ::Rumale::Validation.check_sample_size(x, y)
153
+
154
+ fit(x, y).transform(x)
155
+ end
156
+
157
+ private
158
+
159
+ def detect_output_size(n_features)
160
+ x_dummy = Numo::DFloat.new(2, n_features).rand
161
+ @estimators.each_key.with_object({}) do |name, obj|
162
+ output_dummy = @estimators[name].predict(x_dummy)
163
+ obj[name] = output_dummy.ndim == 1 ? 1 : output_dummy.shape[1]
164
+ end
165
+ end
166
+ end
167
+ end
168
+ end
@@ -0,0 +1,13 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rumale
4
+ module Ensemble
5
+ # @!visibility private
6
+ module Value
7
+ # @!visibility private
8
+ N_BITS = 1.size * 8
9
+ # @!visibility private
10
+ SEED_BASE = 2**(N_BITS - 1) - 1
11
+ end
12
+ end
13
+ end
@@ -0,0 +1,10 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Rumale is a machine learning library in Ruby.
4
+ module Rumale
5
+ # This module consists of the classes that implement ensemble-based methods.
6
+ module Ensemble
7
+ # @!visibility private
8
+ VERSION = '0.24.0'
9
+ end
10
+ end
@@ -0,0 +1,129 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'rumale/validation'
4
+ require 'rumale/base/estimator'
5
+ require 'rumale/base/classifier'
6
+ require 'rumale/preprocessing/label_encoder'
7
+
8
+ module Rumale
9
+ module Ensemble
10
+ # VotingClassifier is a class that implements classifier with voting ensemble method.
11
+ #
12
+ # @example
13
+ # require 'rumale/ensemble/voting_classifier'
14
+ #
15
+ # estimators = {
16
+ # lgr: Rumale::LinearModel::LogisticRegression.new(reg_param: 1e-2, random_seed: 1),
17
+ # mlp: Rumale::NeuralNetwork::MLPClassifier.new(hidden_units: [256], random_seed: 1),
18
+ # rnd: Rumale::Ensemble::RandomForestClassifier.new(random_seed: 1)
19
+ # }
20
+ # weights = { lgr: 0.2, mlp: 0.3, rnd: 0.5 }
21
+ #
22
+ # classifier = Rumale::Ensemble::VotingClassifier.new(estimators: estimators, weights: weights, voting: 'soft')
23
+ # classifier.fit(x_train, y_train)
24
+ # results = classifier.predict(x_test)
25
+ #
26
+ # *Reference*
27
+ # - Zhou, Z-H., "Ensemble Methods - Foundations and Algorithms," CRC Press Taylor and Francis Group, Chapman and Hall/CRC, 2012.
28
+ class VotingClassifier < ::Rumale::Base::Estimator
29
+ include ::Rumale::Base::Classifier
30
+
31
+ # Return the sub-classifiers that voted.
32
+ # @return [Hash<Symbol,Classifier>]
33
+ attr_reader :estimators
34
+
35
+ # Return the class labels.
36
+ # @return [Numo::Int32] (size: n_classes)
37
+ attr_reader :classes
38
+
39
+ # Create a new ensembled classifier with voting rule.
40
+ #
41
+ # @param estimators [Hash<Symbol,Classifier>] The sub-classifiers to vote.
42
+ # @param weights [Hash<Symbol,Float>] The weight value for each classifier.
43
+ # @param voting [String] The voting rule for the predicted results of each classifier.
44
+ # If 'hard' is given, the ensembled classifier predicts the class label by majority vote.
45
+ # If 'soft' is given, the ensembled classifier uses the weighted average of predicted probabilities for the prediction.
46
+ def initialize(estimators:, weights: nil, voting: 'hard')
47
+ super()
48
+ @estimators = estimators
49
+ @params = {
50
+ weights: weights || estimators.each_key.with_object({}) { |name, w| w[name] = 1.0 },
51
+ voting: voting
52
+ }
53
+ end
54
+
55
+ # Fit the model with given training data.
56
+ #
57
+ # @param x [Numo::DFloat] (shape: [n_samples, n_features]) The training data to be used for fitting the model.
58
+ # @param y [Numo::Int32] (shape: [n_samples]) The labels to be used for fitting the model.
59
+ # @return [VotingClassifier] The learned classifier itself.
60
+ def fit(x, y)
61
+ x = ::Rumale::Validation.check_convert_sample_array(x)
62
+ y = ::Rumale::Validation.check_convert_label_array(y)
63
+ ::Rumale::Validation.check_sample_size(x, y)
64
+
65
+ @encoder = ::Rumale::Preprocessing::LabelEncoder.new
66
+ y_encoded = @encoder.fit_transform(y)
67
+ @classes = Numo::NArray[*@encoder.classes]
68
+ @estimators.each_key { |name| @estimators[name].fit(x, y_encoded) }
69
+
70
+ self
71
+ end
72
+
73
+ # Calculate confidence scores for samples.
74
+ #
75
+ # @param x [Numo::DFloat] (shape: [n_samples, n_features]) The samples to compute the scores.
76
+ # @return [Numo::DFloat] (shape: [n_samples, n_classes]) The confidence score per sample.
77
+ def decision_function(x)
78
+ x = ::Rumale::Validation.check_convert_sample_array(x)
79
+
80
+ return predict_proba(x) if soft_voting?
81
+
82
+ n_samples = x.shape[0]
83
+ n_classes = @classes.size
84
+ z = Numo::DFloat.zeros(n_samples, n_classes)
85
+ @estimators.each do |name, estimator|
86
+ estimator.predict(x).to_a.each_with_index { |c, i| z[i, c] += @params[:weights][name] }
87
+ end
88
+ z
89
+ end
90
+
91
+ # Predict class labels for samples.
92
+ #
93
+ # @param x [Numo::DFloat] (shape: [n_samples, n_features]) The samples to predict the labels.
94
+ # @return [Numo::Int32] (shape: [n_samples]) The predicted class label per sample.
95
+ def predict(x)
96
+ x = ::Rumale::Validation.check_convert_sample_array(x)
97
+
98
+ n_samples = x.shape[0]
99
+ n_classes = @classes.size
100
+ z = decision_function(x)
101
+ predicted = z.max_index(axis: 1) - Numo::Int32.new(n_samples).seq * n_classes
102
+ Numo::Int32.cast(@encoder.inverse_transform(predicted))
103
+ end
104
+
105
+ # Predict probability for samples.
106
+ #
107
+ # @param x [Numo::DFloat] (shape: [n_samples, n_features]) The samples to predict the probabilities.
108
+ # @return [Numo::DFloat] (shape: [n_samples, n_classes]) Predicted probability of each class per sample.
109
+ def predict_proba(x)
110
+ x = ::Rumale::Validation.check_convert_sample_array(x)
111
+
112
+ n_samples = x.shape[0]
113
+ n_classes = @classes.size
114
+ z = Numo::DFloat.zeros(n_samples, n_classes)
115
+ sum_weight = @params[:weights].each_value.sum
116
+ @estimators.each do |name, estimator|
117
+ z += @params[:weights][name] * estimator.predict_proba(x)
118
+ end
119
+ z /= sum_weight
120
+ end
121
+
122
+ private
123
+
124
+ def soft_voting?
125
+ @params[:voting] == 'soft'
126
+ end
127
+ end
128
+ end
129
+ end
@@ -0,0 +1,84 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'rumale/validation'
4
+ require 'rumale/base/estimator'
5
+ require 'rumale/base/regressor'
6
+
7
+ module Rumale
8
+ module Ensemble
9
+ # VotingRegressor is a class that implements regressor with voting ensemble method.
10
+ #
11
+ # @example
12
+ # require 'rumale/ensemble/voting_regressor'
13
+ #
14
+ # estimators = {
15
+ # rdg: Rumale::LinearModel::Ridge.new(reg_param: 1e-2, random_seed: 1),
16
+ # mlp: Rumale::NeuralNetwork::MLPRegressor.new(hidden_units: [256], random_seed: 1),
17
+ # rnd: Rumale::Ensemble::RandomForestRegressor.new(random_seed: 1)
18
+ # }
19
+ # weights = { rdg: 0.2, mlp: 0.3, rnd: 0.5 }
20
+ #
21
+ # regressor = Rumale::Ensemble::VotingRegressor.new(estimators: estimators, weights: weights, voting: 'soft')
22
+ # regressor.fit(x_train, y_train)
23
+ # results = regressor.predict(x_test)
24
+ #
25
+ # *Reference*
26
+ # - Zhou, Z-H., "Ensemble Methods - Foundations and Algorithms," CRC Press Taylor and Francis Group, Chapman and Hall/CRC, 2012.
27
+ class VotingRegressor < ::Rumale::Base::Estimator
28
+ include ::Rumale::Base::Regressor
29
+
30
+ # Return the sub-regressors that voted.
31
+ # @return [Hash<Symbol,Regressor>]
32
+ attr_reader :estimators
33
+
34
+ # Create a new ensembled regressor with voting rule.
35
+ #
36
+ # @param estimators [Hash<Symbol,Regressor>] The sub-regressors to vote.
37
+ # @param weights [Hash<Symbol,Float>] The weight value for each regressor.
38
+ def initialize(estimators:, weights: nil)
39
+ super()
40
+ @estimators = estimators
41
+ @params = {
42
+ weights: weights || estimators.each_key.with_object({}) { |name, w| w[name] = 1.0 }
43
+ }
44
+ end
45
+
46
+ # Fit the model with given training data.
47
+ #
48
+ # @param x [Numo::DFloat] (shape: [n_samples, n_features]) The training data to be used for fitting the model.
49
+ # @param y [Numo::Int32] (shape: [n_samples]) The labels to be used for fitting the model.
50
+ # @return [VotingRegressor] The learned regressor itself.
51
+ def fit(x, y)
52
+ x = ::Rumale::Validation.check_convert_sample_array(x)
53
+ y = ::Rumale::Validation.check_convert_target_value_array(y)
54
+ ::Rumale::Validation.check_sample_size(x, y)
55
+
56
+ @n_outputs = y.ndim > 1 ? y.shape[1] : 1
57
+ @estimators.each_key { |name| @estimators[name].fit(x, y) }
58
+
59
+ self
60
+ end
61
+
62
+ # Predict values for samples.
63
+ #
64
+ # @param x [Numo::DFloat] (shape: [n_samples, n_features]) The samples to predict the values.
65
+ # @return [Numo::DFloat] (shape: [n_samples, n_outputs]) Predicted value per sample.
66
+ def predict(x)
67
+ x = ::Rumale::Validation.check_convert_sample_array(x)
68
+
69
+ z = single_target? ? Numo::DFloat.zeros(x.shape[0]) : Numo::DFloat.zeros(x.shape[0], @n_outputs)
70
+ sum_weight = @params[:weights].each_value.sum
71
+ @estimators.each do |name, estimator|
72
+ z += @params[:weights][name] * estimator.predict(x)
73
+ end
74
+ z / sum_weight
75
+ end
76
+
77
+ private
78
+
79
+ def single_target?
80
+ @n_outputs == 1
81
+ end
82
+ end
83
+ end
84
+ end
@@ -0,0 +1,20 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'numo/narray'
4
+
5
+ require_relative 'ensemble/version'
6
+
7
+ require_relative 'ensemble/value'
8
+
9
+ require_relative 'ensemble/ada_boost_classifier'
10
+ require_relative 'ensemble/ada_boost_regressor'
11
+ require_relative 'ensemble/extra_trees_classifier'
12
+ require_relative 'ensemble/extra_trees_regressor'
13
+ require_relative 'ensemble/gradient_boosting_classifier'
14
+ require_relative 'ensemble/gradient_boosting_regressor'
15
+ require_relative 'ensemble/random_forest_classifier'
16
+ require_relative 'ensemble/random_forest_regressor'
17
+ require_relative 'ensemble/stacking_classifier'
18
+ require_relative 'ensemble/stacking_regressor'
19
+ require_relative 'ensemble/voting_classifier'
20
+ require_relative 'ensemble/voting_regressor'
metadata ADDED
@@ -0,0 +1,152 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: rumale-ensemble
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.24.0
5
+ platform: ruby
6
+ authors:
7
+ - yoshoku
8
+ autorequire:
9
+ bindir: exe
10
+ cert_chain: []
11
+ date: 2022-12-31 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ name: numo-narray
15
+ requirement: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - ">="
18
+ - !ruby/object:Gem::Version
19
+ version: 0.9.1
20
+ type: :runtime
21
+ prerelease: false
22
+ version_requirements: !ruby/object:Gem::Requirement
23
+ requirements:
24
+ - - ">="
25
+ - !ruby/object:Gem::Version
26
+ version: 0.9.1
27
+ - !ruby/object:Gem::Dependency
28
+ name: rumale-core
29
+ requirement: !ruby/object:Gem::Requirement
30
+ requirements:
31
+ - - "~>"
32
+ - !ruby/object:Gem::Version
33
+ version: 0.24.0
34
+ type: :runtime
35
+ prerelease: false
36
+ version_requirements: !ruby/object:Gem::Requirement
37
+ requirements:
38
+ - - "~>"
39
+ - !ruby/object:Gem::Version
40
+ version: 0.24.0
41
+ - !ruby/object:Gem::Dependency
42
+ name: rumale-linear_model
43
+ requirement: !ruby/object:Gem::Requirement
44
+ requirements:
45
+ - - "~>"
46
+ - !ruby/object:Gem::Version
47
+ version: 0.24.0
48
+ type: :runtime
49
+ prerelease: false
50
+ version_requirements: !ruby/object:Gem::Requirement
51
+ requirements:
52
+ - - "~>"
53
+ - !ruby/object:Gem::Version
54
+ version: 0.24.0
55
+ - !ruby/object:Gem::Dependency
56
+ name: rumale-model_selection
57
+ requirement: !ruby/object:Gem::Requirement
58
+ requirements:
59
+ - - "~>"
60
+ - !ruby/object:Gem::Version
61
+ version: 0.24.0
62
+ type: :runtime
63
+ prerelease: false
64
+ version_requirements: !ruby/object:Gem::Requirement
65
+ requirements:
66
+ - - "~>"
67
+ - !ruby/object:Gem::Version
68
+ version: 0.24.0
69
+ - !ruby/object:Gem::Dependency
70
+ name: rumale-preprocessing
71
+ requirement: !ruby/object:Gem::Requirement
72
+ requirements:
73
+ - - "~>"
74
+ - !ruby/object:Gem::Version
75
+ version: 0.24.0
76
+ type: :runtime
77
+ prerelease: false
78
+ version_requirements: !ruby/object:Gem::Requirement
79
+ requirements:
80
+ - - "~>"
81
+ - !ruby/object:Gem::Version
82
+ version: 0.24.0
83
+ - !ruby/object:Gem::Dependency
84
+ name: rumale-tree
85
+ requirement: !ruby/object:Gem::Requirement
86
+ requirements:
87
+ - - "~>"
88
+ - !ruby/object:Gem::Version
89
+ version: 0.24.0
90
+ type: :runtime
91
+ prerelease: false
92
+ version_requirements: !ruby/object:Gem::Requirement
93
+ requirements:
94
+ - - "~>"
95
+ - !ruby/object:Gem::Version
96
+ version: 0.24.0
97
+ description: |
98
+ Rumale::Ensemble provides ensemble learning algorithms,
99
+ such as AdaBoost, Gradient Tree Boosting, and Random Forest,
100
+ with Rumale interface.
101
+ email:
102
+ - yoshoku@outlook.com
103
+ executables: []
104
+ extensions: []
105
+ extra_rdoc_files: []
106
+ files:
107
+ - LICENSE.txt
108
+ - README.md
109
+ - lib/rumale/ensemble.rb
110
+ - lib/rumale/ensemble/ada_boost_classifier.rb
111
+ - lib/rumale/ensemble/ada_boost_regressor.rb
112
+ - lib/rumale/ensemble/extra_trees_classifier.rb
113
+ - lib/rumale/ensemble/extra_trees_regressor.rb
114
+ - lib/rumale/ensemble/gradient_boosting_classifier.rb
115
+ - lib/rumale/ensemble/gradient_boosting_regressor.rb
116
+ - lib/rumale/ensemble/random_forest_classifier.rb
117
+ - lib/rumale/ensemble/random_forest_regressor.rb
118
+ - lib/rumale/ensemble/stacking_classifier.rb
119
+ - lib/rumale/ensemble/stacking_regressor.rb
120
+ - lib/rumale/ensemble/value.rb
121
+ - lib/rumale/ensemble/version.rb
122
+ - lib/rumale/ensemble/voting_classifier.rb
123
+ - lib/rumale/ensemble/voting_regressor.rb
124
+ homepage: https://github.com/yoshoku/rumale
125
+ licenses:
126
+ - BSD-3-Clause
127
+ metadata:
128
+ homepage_uri: https://github.com/yoshoku/rumale
129
+ source_code_uri: https://github.com/yoshoku/rumale/tree/main/rumale-ensemble
130
+ changelog_uri: https://github.com/yoshoku/rumale/blob/main/CHANGELOG.md
131
+ documentation_uri: https://yoshoku.github.io/rumale/doc/
132
+ rubygems_mfa_required: 'true'
133
+ post_install_message:
134
+ rdoc_options: []
135
+ require_paths:
136
+ - lib
137
+ required_ruby_version: !ruby/object:Gem::Requirement
138
+ requirements:
139
+ - - ">="
140
+ - !ruby/object:Gem::Version
141
+ version: '0'
142
+ required_rubygems_version: !ruby/object:Gem::Requirement
143
+ requirements:
144
+ - - ">="
145
+ - !ruby/object:Gem::Version
146
+ version: '0'
147
+ requirements: []
148
+ rubygems_version: 3.3.26
149
+ signing_key:
150
+ specification_version: 4
151
+ summary: Rumale::Ensemble provides ensemble learning algorithms with Rumale interface.
152
+ test_files: []