svmkit 0.7.0 → 0.7.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
- SHA256:
3
- metadata.gz: 1e3564c00ae91e7c4cc9db2d19b19df0158803571a18ba9b1446bea22c36b93e
4
- data.tar.gz: ebd9a2d55e3935533144bc25498bc9db24d95cfa74fa9b2e8d6299a8b7d54409
2
+ SHA1:
3
+ metadata.gz: 1f708500e8ba450849736cd02b152b30e302c1bb
4
+ data.tar.gz: 210256fcd91375e96b4d68015fae2bf2a5b0d4be
5
5
  SHA512:
6
- metadata.gz: 3fc3e77783fa89bb73b68bd39c21251eae553f8f70259dab31189569b1950f760b72c41b5f839e4d133d156f952995cd2201f2538076435828d19393ea8df007
7
- data.tar.gz: 953a07537d01e28b4c00714aebcea67789e654896af7ed5bb18900fcbf3d3fdd0be6d60bf8bfe43932e9e6b0e17fc1ade13aa7c9a9aeb564cbcfd79626979bcc
6
+ metadata.gz: 4a03201b32dc7a5c0db43bfdfe742cc5b369449c2ce0dc63f4905e3e0b9186276f5b382c1b6bff45acdfb91f61712befc58538c1dfa74874722127bec5e2bf03
7
+ data.tar.gz: deea7c688685935ebe2574448973903115e727cc479b12ea42f382ea697f7218b0de27cccf978c7ea0f4aa0f0fa0bfd5a97ccbbb88e2ea0b87cd1a5ca00cc5f3
data/.travis.yml CHANGED
@@ -7,5 +7,6 @@ rvm:
7
7
  - 2.3
8
8
  - 2.4
9
9
  - 2.5
10
+ - 2.6
10
11
  before_install:
11
- - gem install --no-document bundler -v '~> 1.16'
12
+ - gem install --no-document bundler -v '>= 1.17'
data/HISTORY.md CHANGED
@@ -1,3 +1,7 @@
1
+ # 0.7.1
2
+ - Fix to use CSV class in parsing libsvm format file.
3
+ - Refactor ensemble estimators.
4
+
1
5
  # 0.7.0
2
6
  - Add class for AdaBoost classifier.
3
7
  - Add class for AdaBoost regressor.
data/lib/svmkit.rb CHANGED
@@ -4,6 +4,8 @@ require 'numo/narray'
4
4
 
5
5
  require 'svmkit/version'
6
6
  require 'svmkit/validation'
7
+ require 'svmkit/values'
8
+ require 'svmkit/utils'
7
9
  require 'svmkit/pairwise_metric'
8
10
  require 'svmkit/dataset'
9
11
  require 'svmkit/probabilistic_output'
@@ -1,5 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
+ require 'csv'
4
+
3
5
  module SVMKit
4
6
  # Module for loading and saving a dataset file.
5
7
  module Dataset
@@ -16,11 +18,11 @@ module SVMKit
16
18
  ftvecs = []
17
19
  labels = []
18
20
  n_features = 0
19
- File.read(filename).split("\n").each do |line|
21
+ CSV.foreach(filename, col_sep: "\s", headers: false) do |line|
20
22
  label, ftvec, max_idx = parse_libsvm_line(line, zero_based)
21
23
  labels.push(label)
22
24
  ftvecs.push(ftvec)
23
- n_features = [n_features, max_idx].max
25
+ n_features = max_idx if n_features < max_idx
24
26
  end
25
27
  [convert_to_matrix(ftvecs, n_features), Numo::NArray.asarray(labels)]
26
28
  end
@@ -48,16 +50,17 @@ module SVMKit
48
50
  private
49
51
 
50
52
  def parse_libsvm_line(line, zero_based)
51
- tokens = line.split
52
- label = parse_label(tokens.shift)
53
- ftvec = tokens.map do |el|
53
+ label = parse_label(line.shift)
54
+ adj_idx = zero_based == false ? 1 : 0
55
+ max_idx = -1
56
+ ftvec = []
57
+ while (el = line.shift)
54
58
  idx, val = el.split(':')
55
- idx = idx.to_i - (zero_based == false ? 1 : 0)
59
+ idx = idx.to_i - adj_idx
56
60
  val = val.to_i.to_s == val ? val.to_i : val.to_f
57
- [idx, val]
61
+ max_idx = idx if max_idx < idx
62
+ ftvec.push([idx, val])
58
63
  end
59
- max_idx = ftvec.map { |el| el[0] }.max
60
- max_idx ||= 0
61
64
  [label, ftvec, max_idx]
62
65
  end
63
66
 
@@ -1,6 +1,8 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  require 'svmkit/validation'
4
+ require 'svmkit/values'
5
+ require 'svmkit/utils'
4
6
  require 'svmkit/base/base_estimator'
5
7
  require 'svmkit/base/classifier'
6
8
  require 'svmkit/tree/decision_tree_classifier'
@@ -22,6 +24,7 @@ module SVMKit
22
24
  class AdaBoostClassifier
23
25
  include Base::BaseEstimator
24
26
  include Base::Classifier
27
+ include Validation
25
28
 
26
29
  # Return the set of estimators.
27
30
  # @return [Array<DecisionTreeClassifier>]
@@ -52,15 +55,16 @@ module SVMKit
52
55
  # If nil is given, split process considers all features.
53
56
  # @param random_seed [Integer] The seed value using to initialize the random generator.
54
57
  # It is used to randomly determine the order of features when deciding spliting point.
55
- def initialize(n_estimators: 50, criterion: 'gini', max_depth: nil, max_leaf_nodes: nil, min_samples_leaf: 1,
58
+ def initialize(n_estimators: 50,
59
+ criterion: 'gini', max_depth: nil, max_leaf_nodes: nil, min_samples_leaf: 1,
56
60
  max_features: nil, random_seed: nil)
57
- SVMKit::Validation.check_params_type_or_nil(Integer, max_depth: max_depth, max_leaf_nodes: max_leaf_nodes,
58
- max_features: max_features, random_seed: random_seed)
59
- SVMKit::Validation.check_params_integer(n_estimators: n_estimators, min_samples_leaf: min_samples_leaf)
60
- SVMKit::Validation.check_params_string(criterion: criterion)
61
- SVMKit::Validation.check_params_positive(n_estimators: n_estimators, max_depth: max_depth,
62
- max_leaf_nodes: max_leaf_nodes, min_samples_leaf: min_samples_leaf,
63
- max_features: max_features)
61
+ check_params_type_or_nil(Integer, max_depth: max_depth, max_leaf_nodes: max_leaf_nodes,
62
+ max_features: max_features, random_seed: random_seed)
63
+ check_params_integer(n_estimators: n_estimators, min_samples_leaf: min_samples_leaf)
64
+ check_params_string(criterion: criterion)
65
+ check_params_positive(n_estimators: n_estimators, max_depth: max_depth,
66
+ max_leaf_nodes: max_leaf_nodes, min_samples_leaf: min_samples_leaf,
67
+ max_features: max_features)
64
68
  @params = {}
65
69
  @params[:n_estimators] = n_estimators
66
70
  @params[:criterion] = criterion
@@ -82,9 +86,9 @@ module SVMKit
82
86
  # @param y [Numo::Int32] (shape: [n_samples]) The labels to be used for fitting the model.
83
87
  # @return [AdaBoostClassifier] The learned classifier itself.
84
88
  def fit(x, y) # rubocop:disable Metrics/AbcSize
85
- SVMKit::Validation.check_sample_array(x)
86
- SVMKit::Validation.check_label_array(y)
87
- SVMKit::Validation.check_sample_label_size(x, y)
89
+ check_sample_array(x)
90
+ check_label_array(y)
91
+ check_sample_label_size(x, y)
88
92
  ## Initialize some variables.
89
93
  n_samples, n_features = x.shape
90
94
  @estimators = []
@@ -100,12 +104,12 @@ module SVMKit
100
104
  observation_weights = Numo::DFloat.zeros(n_samples) + 1.fdiv(n_samples)
101
105
  @params[:n_estimators].times do |_t|
102
106
  # Fit classfier.
103
- ids = weighted_sampling(observation_weights)
107
+ ids = SVMKit::Utils.choice_ids(n_samples, observation_weights, @rng)
104
108
  break if y[ids].to_a.uniq.size != n_classes
105
109
  tree = Tree::DecisionTreeClassifier.new(
106
110
  criterion: @params[:criterion], max_depth: @params[:max_depth],
107
111
  max_leaf_nodes: @params[:max_leaf_nodes], min_samples_leaf: @params[:min_samples_leaf],
108
- max_features: @params[:max_features], random_seed: @rng.rand(int_max)
112
+ max_features: @params[:max_features], random_seed: @rng.rand(SVMKit::Values::int_max)
109
113
  )
110
114
  tree.fit(x[ids, true], y[ids])
111
115
  # Calculate estimator error.
@@ -134,7 +138,7 @@ module SVMKit
134
138
  # @param x [Numo::DFloat] (shape: [n_samples, n_features]) The samples to compute the scores.
135
139
  # @return [Numo::DFloat] (shape: [n_samples, n_classes]) Confidence score per sample.
136
140
  def decision_function(x)
137
- SVMKit::Validation.check_sample_array(x)
141
+ check_sample_array(x)
138
142
  n_samples, = x.shape
139
143
  n_classes = @classes.size
140
144
  sum_probs = Numo::DFloat.zeros(n_samples, n_classes)
@@ -150,7 +154,7 @@ module SVMKit
150
154
  # @param x [Numo::DFloat] (shape: [n_samples, n_features]) The samples to predict the labels.
151
155
  # @return [Numo::Int32] (shape: [n_samples]) Predicted class label per sample.
152
156
  def predict(x)
153
- SVMKit::Validation.check_sample_array(x)
157
+ check_sample_array(x)
154
158
  n_samples, = x.shape
155
159
  probs = decision_function(x)
156
160
  Numo::Int32.asarray(Array.new(n_samples) { |n| @classes[probs[n, true].max_index] })
@@ -161,7 +165,7 @@ module SVMKit
161
165
  # @param x [Numo::DFloat] (shape: [n_samples, n_features]) The samples to predict the probailities.
162
166
  # @return [Numo::DFloat] (shape: [n_samples, n_classes]) Predicted probability of each class per sample.
163
167
  def predict_proba(x)
164
- SVMKit::Validation.check_sample_array(x)
168
+ check_sample_array(x)
165
169
  n_classes = @classes.size
166
170
  probs = Numo::NMath.exp(1.fdiv(n_classes - 1) * decision_function(x))
167
171
  sum_probs = probs.sum(1)
@@ -172,8 +176,11 @@ module SVMKit
172
176
  # Dump marshal data.
173
177
  # @return [Hash] The marshal data about AdaBoostClassifier.
174
178
  def marshal_dump
175
- { params: @params, estimators: @estimators, classes: @classes,
176
- feature_importances: @feature_importances, rng: @rng }
179
+ { params: @params,
180
+ estimators: @estimators,
181
+ classes: @classes,
182
+ feature_importances: @feature_importances,
183
+ rng: @rng }
177
184
  end
178
185
 
179
186
  # Load marshal data.
@@ -186,27 +193,6 @@ module SVMKit
186
193
  @rng = obj[:rng]
187
194
  nil
188
195
  end
189
-
190
- private
191
-
192
- def weighted_sampling(weights)
193
- Array.new(weights.size) do
194
- target = @rng.rand
195
- chosen = 0
196
- weights.each_with_index do |w, idx|
197
- if target <= w
198
- chosen = idx
199
- break
200
- end
201
- target -= w
202
- end
203
- chosen
204
- end
205
- end
206
-
207
- def int_max
208
- @int_max ||= 2**([42].pack('i').size * 16 - 2) - 1
209
- end
210
196
  end
211
197
  end
212
198
  end
@@ -1,6 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  require 'svmkit/validation'
4
+ require 'svmkit/values'
4
5
  require 'svmkit/base/base_estimator'
5
6
  require 'svmkit/base/regressor'
6
7
  require 'svmkit/tree/decision_tree_regressor'
@@ -106,11 +107,11 @@ module SVMKit
106
107
  # Construct forest.
107
108
  @params[:n_estimators].times do |_t|
108
109
  # Fit weak learner.
109
- ids = weighted_sampling(observation_weights)
110
+ ids = SVMKit::Utils.choice_ids(n_samples, observation_weights, @rng)
110
111
  tree = Tree::DecisionTreeRegressor.new(
111
112
  criterion: @params[:criterion], max_depth: @params[:max_depth],
112
113
  max_leaf_nodes: @params[:max_leaf_nodes], min_samples_leaf: @params[:min_samples_leaf],
113
- max_features: @params[:max_features], random_seed: @rng.rand(int_max)
114
+ max_features: @params[:max_features], random_seed: @rng.rand(SVMKit::Values::int_max)
114
115
  )
115
116
  tree.fit(x[ids, true], y[ids])
116
117
  p = tree.predict(x)
@@ -174,27 +175,6 @@ module SVMKit
174
175
  @rng = obj[:rng]
175
176
  nil
176
177
  end
177
-
178
- private
179
-
180
- def weighted_sampling(weights)
181
- Array.new(weights.size) do
182
- target = @rng.rand
183
- chosen = 0
184
- weights.each_with_index do |w, idx|
185
- if target <= w
186
- chosen = idx
187
- break
188
- end
189
- target -= w
190
- end
191
- chosen
192
- end
193
- end
194
-
195
- def int_max
196
- @int_max ||= 2**([42].pack('i').size * 16 - 2) - 1
197
- end
198
178
  end
199
179
  end
200
180
  end
@@ -1,6 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  require 'svmkit/validation'
4
+ require 'svmkit/values'
4
5
  require 'svmkit/base/base_estimator'
5
6
  require 'svmkit/base/classifier'
6
7
  require 'svmkit/tree/decision_tree_classifier'
@@ -20,6 +21,7 @@ module SVMKit
20
21
  class RandomForestClassifier
21
22
  include Base::BaseEstimator
22
23
  include Base::Classifier
24
+ include Validation
23
25
 
24
26
  # Return the set of estimators.
25
27
  # @return [Array<DecisionTreeClassifier>]
@@ -50,15 +52,16 @@ module SVMKit
50
52
  # If nil is given, split process considers all features.
51
53
  # @param random_seed [Integer] The seed value using to initialize the random generator.
52
54
  # It is used to randomly determine the order of features when deciding spliting point.
53
- def initialize(n_estimators: 10, criterion: 'gini', max_depth: nil, max_leaf_nodes: nil, min_samples_leaf: 1,
55
+ def initialize(n_estimators: 10,
56
+ criterion: 'gini', max_depth: nil, max_leaf_nodes: nil, min_samples_leaf: 1,
54
57
  max_features: nil, random_seed: nil)
55
- SVMKit::Validation.check_params_type_or_nil(Integer, max_depth: max_depth, max_leaf_nodes: max_leaf_nodes,
56
- max_features: max_features, random_seed: random_seed)
57
- SVMKit::Validation.check_params_integer(n_estimators: n_estimators, min_samples_leaf: min_samples_leaf)
58
- SVMKit::Validation.check_params_string(criterion: criterion)
59
- SVMKit::Validation.check_params_positive(n_estimators: n_estimators, max_depth: max_depth,
60
- max_leaf_nodes: max_leaf_nodes, min_samples_leaf: min_samples_leaf,
61
- max_features: max_features)
58
+ check_params_type_or_nil(Integer, max_depth: max_depth, max_leaf_nodes: max_leaf_nodes,
59
+ max_features: max_features, random_seed: random_seed)
60
+ check_params_integer(n_estimators: n_estimators, min_samples_leaf: min_samples_leaf)
61
+ check_params_string(criterion: criterion)
62
+ check_params_positive(n_estimators: n_estimators, max_depth: max_depth,
63
+ max_leaf_nodes: max_leaf_nodes, min_samples_leaf: min_samples_leaf,
64
+ max_features: max_features)
62
65
  @params = {}
63
66
  @params[:n_estimators] = n_estimators
64
67
  @params[:criterion] = criterion
@@ -80,9 +83,9 @@ module SVMKit
80
83
  # @param y [Numo::Int32] (shape: [n_samples]) The labels to be used for fitting the model.
81
84
  # @return [RandomForestClassifier] The learned classifier itself.
82
85
  def fit(x, y)
83
- SVMKit::Validation.check_sample_array(x)
84
- SVMKit::Validation.check_label_array(y)
85
- SVMKit::Validation.check_sample_label_size(x, y)
86
+ check_sample_array(x)
87
+ check_label_array(y)
88
+ check_sample_label_size(x, y)
86
89
  # Initialize some variables.
87
90
  n_samples, n_features = x.shape
88
91
  @params[:max_features] = Math.sqrt(n_features).to_i unless @params[:max_features].is_a?(Integer)
@@ -94,7 +97,7 @@ module SVMKit
94
97
  tree = Tree::DecisionTreeClassifier.new(
95
98
  criterion: @params[:criterion], max_depth: @params[:max_depth],
96
99
  max_leaf_nodes: @params[:max_leaf_nodes], min_samples_leaf: @params[:min_samples_leaf],
97
- max_features: @params[:max_features], random_seed: @rng.rand(int_max)
100
+ max_features: @params[:max_features], random_seed: @rng.rand(SVMKit::Values::int_max)
98
101
  )
99
102
  bootstrap_ids = Array.new(n_samples) { @rng.rand(0...n_samples) }
100
103
  tree.fit(x[bootstrap_ids, true], y[bootstrap_ids])
@@ -110,7 +113,7 @@ module SVMKit
110
113
  # @param x [Numo::DFloat] (shape: [n_samples, n_features]) The samples to predict the labels.
111
114
  # @return [Numo::Int32] (shape: [n_samples]) Predicted class label per sample.
112
115
  def predict(x)
113
- SVMKit::Validation.check_sample_array(x)
116
+ check_sample_array(x)
114
117
  n_samples, = x.shape
115
118
  n_classes = @classes.size
116
119
  classes_arr = @classes.to_a
@@ -130,7 +133,7 @@ module SVMKit
130
133
  # @param x [Numo::DFloat] (shape: [n_samples, n_features]) The samples to predict the probailities.
131
134
  # @return [Numo::DFloat] (shape: [n_samples, n_classes]) Predicted probability of each class per sample.
132
135
  def predict_proba(x)
133
- SVMKit::Validation.check_sample_array(x)
136
+ check_sample_array(x)
134
137
  n_samples, = x.shape
135
138
  n_classes = @classes.size
136
139
  classes_arr = @classes.to_a
@@ -150,7 +153,7 @@ module SVMKit
150
153
  # @param x [Numo::DFloat] (shape: [n_samples, n_features]) The samples to predict the labels.
151
154
  # @return [Numo::Int32] (shape: [n_samples, n_estimators]) Leaf index for sample.
152
155
  def apply(x)
153
- SVMKit::Validation.check_sample_array(x)
156
+ check_sample_array(x)
154
157
  Numo::Int32[*Array.new(@params[:n_estimators]) { |n| @estimators[n].apply(x) }].transpose
155
158
  end
156
159
 
@@ -174,12 +177,6 @@ module SVMKit
174
177
  @rng = obj[:rng]
175
178
  nil
176
179
  end
177
-
178
- private
179
-
180
- def int_max
181
- @int_max ||= 2**([42].pack('i').size * 16 - 2) - 1
182
- end
183
180
  end
184
181
  end
185
182
  end
@@ -1,6 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  require 'svmkit/validation'
4
+ require 'svmkit/values'
4
5
  require 'svmkit/base/base_estimator'
5
6
  require 'svmkit/base/regressor'
6
7
  require 'svmkit/tree/decision_tree_regressor'
@@ -46,7 +47,8 @@ module SVMKit
46
47
  # If nil is given, split process considers all features.
47
48
  # @param random_seed [Integer] The seed value using to initialize the random generator.
48
49
  # It is used to randomly determine the order of features when deciding spliting point.
49
- def initialize(n_estimators: 10, criterion: 'mse', max_depth: nil, max_leaf_nodes: nil, min_samples_leaf: 1,
50
+ def initialize(n_estimators: 10,
51
+ criterion: 'mse', max_depth: nil, max_leaf_nodes: nil, min_samples_leaf: 1,
50
52
  max_features: nil, random_seed: nil)
51
53
  check_params_type_or_nil(Integer, max_depth: max_depth, max_leaf_nodes: max_leaf_nodes,
52
54
  max_features: max_features, random_seed: random_seed)
@@ -89,7 +91,7 @@ module SVMKit
89
91
  tree = Tree::DecisionTreeRegressor.new(
90
92
  criterion: @params[:criterion], max_depth: @params[:max_depth],
91
93
  max_leaf_nodes: @params[:max_leaf_nodes], min_samples_leaf: @params[:min_samples_leaf],
92
- max_features: @params[:max_features], random_seed: @rng.rand(int_max)
94
+ max_features: @params[:max_features], random_seed: @rng.rand(SVMKit::Values::int_max)
93
95
  )
94
96
  bootstrap_ids = Array.new(n_samples) { @rng.rand(0...n_samples) }
95
97
  tree.fit(x[bootstrap_ids, true], single_target ? y[bootstrap_ids] : y[bootstrap_ids, true])
@@ -136,12 +138,6 @@ module SVMKit
136
138
  @rng = obj[:rng]
137
139
  nil
138
140
  end
139
-
140
- private
141
-
142
- def int_max
143
- @int_max ||= 2**([42].pack('i').size * 16 - 2) - 1
144
- end
145
141
  end
146
142
  end
147
143
  end
@@ -0,0 +1,22 @@
1
+ # frozen_string_literal: true
2
+
3
+ module SVMKit
4
+ # @!visibility private
5
+ module Utils
6
+ module_function
7
+
8
+ # @!visibility private
9
+ def choice_ids(size, probs, rng=nil)
10
+ rng ||= Random.new
11
+ Array.new(size) do
12
+ target = rng.rand
13
+ chosen = 0
14
+ probs.each_with_index do |p, idx|
15
+ break (chosen = idx) if target <= p
16
+ target -= p
17
+ end
18
+ chosen
19
+ end
20
+ end
21
+ end
22
+ end
@@ -0,0 +1,13 @@
1
+ # frozen_string_literal: true
2
+
3
+ module SVMKit
4
+ # @!visibility private
5
+ module Values
6
+ module_function
7
+
8
+ # @!visibility private
9
+ def int_max
10
+ @int_max ||= 2**([42].pack('i').size * 16 - 2) - 1
11
+ end
12
+ end
13
+ end
@@ -3,5 +3,5 @@
3
3
  # SVMKit is a machine learning library in Ruby.
4
4
  module SVMKit
5
5
  # @!visibility private
6
- VERSION = '0.7.0'.freeze
6
+ VERSION = '0.7.1'.freeze
7
7
  end
data/svmkit.gemspec CHANGED
@@ -35,7 +35,7 @@ MSG
35
35
 
36
36
  spec.add_runtime_dependency 'numo-narray', '>= 0.9.1'
37
37
 
38
- spec.add_development_dependency 'bundler', '~> 1.16'
38
+ spec.add_development_dependency 'bundler', '>= 1.16'
39
39
  spec.add_development_dependency 'coveralls', '~> 0.8'
40
40
  spec.add_development_dependency 'rake', '~> 12.0'
41
41
  spec.add_development_dependency 'rspec', '~> 3.0'
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: svmkit
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.7.0
4
+ version: 0.7.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - yoshoku
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2018-12-02 00:00:00.000000000 Z
11
+ date: 2018-12-17 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: numo-narray
@@ -28,14 +28,14 @@ dependencies:
28
28
  name: bundler
29
29
  requirement: !ruby/object:Gem::Requirement
30
30
  requirements:
31
- - - "~>"
31
+ - - ">="
32
32
  - !ruby/object:Gem::Version
33
33
  version: '1.16'
34
34
  type: :development
35
35
  prerelease: false
36
36
  version_requirements: !ruby/object:Gem::Requirement
37
37
  requirements:
38
- - - "~>"
38
+ - - ">="
39
39
  - !ruby/object:Gem::Version
40
40
  version: '1.16'
41
41
  - !ruby/object:Gem::Dependency
@@ -168,7 +168,9 @@ files:
168
168
  - lib/svmkit/tree/decision_tree_classifier.rb
169
169
  - lib/svmkit/tree/decision_tree_regressor.rb
170
170
  - lib/svmkit/tree/node.rb
171
+ - lib/svmkit/utils.rb
171
172
  - lib/svmkit/validation.rb
173
+ - lib/svmkit/values.rb
172
174
  - lib/svmkit/version.rb
173
175
  - svmkit.gemspec
174
176
  homepage: https://github.com/yoshoku/svmkit
@@ -191,7 +193,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
191
193
  version: '0'
192
194
  requirements: []
193
195
  rubyforge_project:
194
- rubygems_version: 2.7.6
196
+ rubygems_version: 2.5.2.3
195
197
  signing_key:
196
198
  specification_version: 4
197
199
  summary: SVMKit is a machine learninig library in Ruby. SVMKit provides machine learning