feldtruby 0.3.18 → 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -23,7 +23,7 @@ class Optimizer
23
23
 
24
24
  # Must setup logger before setting options since verbosity of logger is
25
25
  # an option!
26
- setup_logger_and_distribute_to_instance_variables()
26
+ setup_logger_and_distribute_to_instance_variables(options)
27
27
 
28
28
  initialize_options(@options)
29
29
  end
@@ -35,11 +35,11 @@ class Optimizer
35
35
 
36
36
  # Optimize the objective in the given search space.
37
37
  def optimize()
38
+ logger.log "Optimization with optimizer #{self.class.inspect} started"
38
39
  @num_optimization_steps = 0
39
40
  # Set up a random best since other methods require it
40
41
  update_best([search_space.gen_candidate()])
41
42
  begin
42
- logger.log "Optimization with optimizer #{self.class.inspect} started"
43
43
  while !termination_criterion.terminate?(self)
44
44
  new_candidates = optimization_step()
45
45
  @num_optimization_steps += 1
@@ -83,11 +83,10 @@ class Optimizer
83
83
  if @best.nil? || @objective.is_better_than?(best_new, @best)
84
84
  qb = @best.nil? ? nil : @objective.quality_of(@best)
85
85
  logger.log_data :new_best, {
86
- :new_best => best_new,
87
- :new_quality_value => @objective.quality_of(best_new),
88
- :old_best => @best,
89
- :old_quality_value => qb
90
- }, "New best candidate found"
86
+ "New best" => best_new,
87
+ "New quality" => @objective.quality_of(best_new),
88
+ "Old best" => @best,
89
+ "Old quality" => qb}, "Optimizer (step #{@num_optimization_steps}): New best solution found", true
91
90
  @best = best_new
92
91
  true
93
92
  else
@@ -212,7 +211,7 @@ DefaultOptimizationOptions = {
212
211
  :verbose => true,
213
212
  :populationSize => 200,
214
213
  :samplerClass => FeldtRuby::Optimize::RadiusLimitedPopulationSampler,
215
- :samplerRadius => 10 # Max distance between individuals selected in same tournament
214
+ :samplerRadius => 8 # Max distance between individuals selected in same tournament.
216
215
  }
217
216
 
218
217
  def self.override_default_options_with(options)
@@ -1,3 +1,3 @@
1
1
  module FeldtRuby
2
- VERSION = "0.3.18"
2
+ VERSION = "0.4.0"
3
3
  end
@@ -32,62 +32,6 @@ SamplerRadiuses = SamplerRadiuses1
32
32
 
33
33
  NumRepetitionsPerSampler = 5
34
34
 
35
- # This is Lévi function number 13 as stated on the page:
36
- # http://en.wikipedia.org/wiki/Test_functions_for_optimization
37
- # It has a global minima at f(1,1) = 0. -10 <= x,y <= 10
38
- class MinLeviFunctionNum13 < FeldtRuby::Optimize::Objective
39
- TwoPi = 2*Math::PI
40
- ThreePi = 3*Math::PI
41
-
42
- def objective_min_levi13(candidate)
43
- x, y = candidate[0], candidate[1]
44
- sin_3pi_x = Math.sin(ThreePi * x)
45
- sin_3pi_y = Math.sin(ThreePi * y)
46
- sin_2pi_y = Math.sin(TwoPi * y)
47
- x_min1 = x - 1.0
48
- y_min1 = y - 1.0
49
-
50
- (sin_3pi_x * sin_3pi_x) +
51
- (x_min1 * x_min1) * (1 + (sin_3pi_y * sin_3pi_y)) +
52
- (y_min1 * y_min1) * (1 + (sin_3pi_y * sin_2pi_y))
53
- end
54
- end
55
-
56
- # This is Beale's function as stated on the page:
57
- # http://en.wikipedia.org/wiki/Test_functions_for_optimization
58
- # It has a global minima at f(3,0.5) = 0. -4.5 <= x,y <= 4.5
59
- class MinBealeFunction < FeldtRuby::Optimize::Objective
60
- def objective_min_beales_func(candidate)
61
- x, y = candidate[0], candidate[1]
62
-
63
- t1 = 1.5 - x + (x*y)
64
- t2 = 2.25 - x + (x*y*y)
65
- t3 = 2.625 - x + (x*y*y*y)
66
-
67
- (t1*t1) + (t2*t2) + (t3*t3)
68
- end
69
- end
70
-
71
- # This is Easom's function as stated on the page:
72
- # http://en.wikipedia.org/wiki/Test_functions_for_optimization
73
- # It has a global minima at f(3,0.5) = 0. -4.5 <= x,y <= 4.5
74
- class MinEasomFunction < FeldtRuby::Optimize::Objective
75
- def objective_min_easom_func(candidate)
76
- x, y = candidate[0], candidate[1]
77
-
78
- f1 = Math.cos(x)
79
-
80
- f2 = Math.cos(y)
81
-
82
- x_min_pi = x - Math::PI
83
- y_min_pi = y - Math::PI
84
-
85
- f3 = Math.exp(-(x_min_pi*x_min_pi + y_min_pi*y_min_pi))
86
-
87
- (-f1) * f2 * f3
88
- end
89
- end
90
-
91
35
  # EggHolder function as stated on the page:
92
36
  # http://en.wikipedia.org/wiki/Test_functions_for_optimization
93
37
  class MinEggHolderFunction < FeldtRuby::Optimize::Objective
@@ -105,26 +49,6 @@ class MinEggHolderFunction < FeldtRuby::Optimize::Objective
105
49
  end
106
50
  end
107
51
 
108
- class MinFunctionOfDimension < FeldtRuby::Optimize::Objective
109
- attr_accessor :dimension
110
- def minimum
111
- 0.0
112
- end
113
- def min_solutions
114
- @min_solutions ||= ([[0.0] * dimension])
115
- end
116
- end
117
-
118
- # Sphere function as stated in the JADE paper:
119
- # http://150.214.190.154/EAMHCO/pdf/JADE.pdf
120
- class MinSphere < MinFunctionOfDimension
121
- def objective_min_func(x)
122
- x.inject(0.0) do |sum, xi|
123
- sum + (xi*xi)
124
- end
125
- end
126
- end
127
-
128
52
  # Schwefel 2.22 function as stated in the JADE paper:
129
53
  # http://150.214.190.154/EAMHCO/pdf/JADE.pdf
130
54
  class MinSchwefel2_22 < MinFunctionOfDimension
data/test/helper.rb CHANGED
@@ -2,8 +2,13 @@ require 'rubygems'
2
2
  require 'minitest/autorun'
3
3
  require 'minitest/spec'
4
4
 
5
- $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
6
- $LOAD_PATH.unshift(File.dirname(__FILE__))
5
+ FeldtRubyTestDir = File.dirname(__FILE__)
6
+ FeldtRubyLibDir = File.join(FeldtRubyTestDir, '..', 'lib')
7
+ FeldtRubyLongTestDir = File.join(FeldtRubyTestDir, 'long_running')
8
+
9
+ $LOAD_PATH.unshift FeldtRubyLibDir
10
+ $LOAD_PATH.unshift FeldtRubyTestDir
11
+ $LOAD_PATH.unshift FeldtRubyLongTestDir
7
12
 
8
13
  require 'feldtruby'
9
14
  require 'feldtruby/minitest_extensions'
@@ -0,0 +1,58 @@
1
+ require File.join(File.dirname(__FILE__), "single_objective_problems")
2
+
3
+ class MinMulti2ObjectiveFuncOfDimensions < MinFuncOfDimensionObj
4
+ def minimum1
5
+ 0.0
6
+ end
7
+
8
+ def minimum2
9
+ 0.0
10
+ end
11
+
12
+ # Known optima that gives the minimum value.
13
+ def min_solutions
14
+ @min_solutions ||= ([[0.0] * dimension])
15
+ end
16
+
17
+ def objective_min_func1(x)
18
+ calc_func1(x) - minimum1
19
+ end
20
+
21
+ def objective_min_func2(x)
22
+ calc_func2(x) - minimum2
23
+ end
24
+
25
+ def calc_func1(x)
26
+ raise NotImplementedError
27
+ end
28
+
29
+ def calc_func2(x)
30
+ raise NotImplementedError
31
+ end
32
+ end
33
+
34
+
35
+ # This is the OmniTest bi-criteria test function as described in the paper:
36
+ # Shir et al, "Enhancing Decision Space Diversity in Evolutionary Multiobjective Algorithms", 2009.
37
+ # They used dimensions == 5.
38
+ class MinOmniTest < MinFuncOfDimensionObj
39
+ def domain_per_dimension
40
+ [0.0, 6.0]
41
+ end
42
+
43
+ def minimum1
44
+ @minimum1 ||= (-1 * dimensions)
45
+ end
46
+
47
+ def minimum2
48
+ @minimum2 ||= (-1 * dimensions)
49
+ end
50
+
51
+ def calc_func1(x)
52
+ x.map {|xi| Math.sin(Math::PI * xi)}.sum
53
+ end
54
+
55
+ def calc_func2(x)
56
+ x.map {|xi| Math.cos(Math::PI * xi)}.sum
57
+ end
58
+ end
@@ -0,0 +1,163 @@
1
+ require 'feldtruby/optimize/search_space'
2
+
3
+ class MinContinousFunction < FeldtRuby::Optimize::Objective
4
+ # Subclasses should fix this value.
5
+ def dimensions
6
+ @dimensions
7
+ end
8
+
9
+ # Domain (min, max) per dimension. Override this (or just override
10
+ # search_space or domain_as_mins_maxs directly) so that a typical/valid
11
+ # search space can be created.
12
+ def domain_per_dimension
13
+ raise NotImplementedError
14
+ end
15
+
16
+ # For more complex domains sub-classes can override this method.
17
+ def domain_as_mins_maxs
18
+ [domain_per_dimension] * self.dimensions
19
+ end
20
+
21
+ # Create a valid search space. Default is to use the domain_per_dimension
22
+ # a dimension number of times.
23
+ def search_space
24
+ FeldtRuby::Optimize::SearchSpace.new_from_min_max_per_variable domain_as_mins_maxs
25
+ end
26
+ end
27
+
28
+ class MinSingleObjectiveFunc < MinContinousFunction
29
+ # Known minimum (or nil if not known). Default is at 0.0.
30
+ def minimum
31
+ 0.0
32
+ end
33
+
34
+ # Known optima that gives the minimum value. Default is that it is at 0,...,0.
35
+ def min_solutions
36
+ @min_solutions ||= ([[0.0] * dimensions])
37
+ end
38
+
39
+ def objective_min_func(x)
40
+ calc_func(x) - minimum
41
+ end
42
+
43
+ # Subclasses must implement this which is the function to be minimized.
44
+ def calc_func(x)
45
+ raise NotImplementedError
46
+ end
47
+ end
48
+
49
+ # Objectives that minimizes a function which is parameterized on a dimension
50
+ # parameter should include this module.
51
+ module MinFuncOfDimensionObj
52
+ attr_writer :dimensions
53
+ end
54
+
55
+ class MinSingleObjectiveFuncOfDimensions < MinSingleObjectiveFunc
56
+ include MinFuncOfDimensionObj
57
+ end
58
+
59
+ # Sphere function as stated in the JADE paper:
60
+ # http://150.214.190.154/EAMHCO/pdf/JADE.pdf
61
+ class MinSphere < MinSingleObjectiveFuncOfDimensions
62
+ def calc_func(x)
63
+ x.inject(0.0) do |sum, xi|
64
+ sum + (xi*xi)
65
+ end
66
+ end
67
+
68
+ def domain_per_dimension
69
+ [-100, 100]
70
+ end
71
+ end
72
+
73
+ class Min2DSingleObjectiveFunc < MinSingleObjectiveFunc
74
+ def dimensions
75
+ 2
76
+ end
77
+ end
78
+
79
+ # This is Lévi function number 13 as stated on the page:
80
+ # http://en.wikipedia.org/wiki/Test_functions_for_optimization
81
+ # It has a global minima at f(1,1) = 0. -10 <= x,y <= 10
82
+ class MinLevi13 < Min2DSingleObjectiveFunc
83
+ def min_solutions
84
+ [[1.0, 1.0]]
85
+ end
86
+
87
+ def domain_per_dimension
88
+ [-10.0, 10.0]
89
+ end
90
+
91
+ TwoPi = 2*Math::PI
92
+ ThreePi = 3*Math::PI
93
+
94
+ def calc_func(candidate)
95
+ x, y = candidate[0], candidate[1]
96
+
97
+ sin_3pi_x = Math.sin(ThreePi * x)
98
+ sin_3pi_y = Math.sin(ThreePi * y)
99
+ sin_2pi_y = Math.sin(TwoPi * y)
100
+ x_min1 = x - 1.0
101
+ y_min1 = y - 1.0
102
+
103
+ (sin_3pi_x * sin_3pi_x) +
104
+ (x_min1 * x_min1) * (1 + (sin_3pi_y * sin_3pi_y)) +
105
+ (y_min1 * y_min1) * (1 + (sin_3pi_y * sin_2pi_y))
106
+ end
107
+ end
108
+
109
+ # This is Beale's function as stated on the page:
110
+ # http://en.wikipedia.org/wiki/Test_functions_for_optimization
111
+ class MinBeale < Min2DSingleObjectiveFunc
112
+ def min_solutions
113
+ [[3.0, 0.5]]
114
+ end
115
+
116
+ def domain_per_dimension
117
+ [-4.5, 4.5]
118
+ end
119
+
120
+ def calc_func(candidate)
121
+ x, y = candidate[0], candidate[1]
122
+
123
+ t1 = 1.5 - x + (x*y)
124
+ t2 = 2.25 - x + (x*y*y)
125
+ t3 = 2.625 - x + (x*y*y*y)
126
+
127
+ (t1*t1) + (t2*t2) + (t3*t3)
128
+ end
129
+ end
130
+
131
+ # This is Easom's function as stated on the page:
132
+ # http://en.wikipedia.org/wiki/Test_functions_for_optimization
133
+ class MinEasom < Min2DSingleObjectiveFunc
134
+ def minimum
135
+ -1.0
136
+ end
137
+
138
+ PI = Math::PI
139
+
140
+ def min_solutions
141
+ [[PI, PI]]
142
+ end
143
+
144
+ def domain_per_dimension
145
+ [-100.0, 100.0]
146
+ end
147
+
148
+ def calc_func(candidate)
149
+ x, y = candidate[0], candidate[1]
150
+
151
+ f1 = Math.cos(x)
152
+
153
+ f2 = Math.cos(y)
154
+
155
+ x_min_pi = x - PI
156
+ y_min_pi = y - PI
157
+
158
+ f3 = Math.exp(-(x_min_pi*x_min_pi + y_min_pi*y_min_pi))
159
+
160
+ (-f1) * f2 * f3
161
+ end
162
+ end
163
+
@@ -0,0 +1,112 @@
1
+ require File.join(FeldtRubyLongTestDir, "single_objective_problems")
2
+ require 'feldtruby/optimize/differential_evolution'
3
+ include FeldtRuby::Optimize
4
+
5
+ module MiniTest::Assertions
6
+ # Assert that _bestSolution_ is close to at least one of the solutions that minimize
7
+ # the _objective_. We use the minimum RMS distance to the solutions, which should
8
+ # be close to zero for at least one solution.
9
+ def assert_close_to_one_solution(objective, bestSolution, precision = 0.01, msg = nil)
10
+ rmss = objective.min_solutions.map do |min_solution|
11
+ bestSolution.to_a.rms_from(min_solution)
12
+ end
13
+ # The minimum RMS to a solution must be close to zero.
14
+ rmss.min.must_be_close_to 0.0, precision
15
+ end
16
+ end
17
+
18
+ module MiniTest::Expectations
19
+ infect_an_assertion :assert_close_to_one_solution, :must_be_close_to_one_solution_of
20
+ end
21
+
22
+ describe "Sphere function" do
23
+ def best_from_de_on_sphere(dimensions, numSteps = 25_000, verbose = false)
24
+ sphere = MinSphere.new
25
+ sphere.dimensions = dimensions
26
+ ss = sphere.search_space
27
+ de = DEOptimizer.new(sphere, ss, {:verbose => verbose,
28
+ :maxNumSteps => numSteps})
29
+ best = de.optimize().to_a
30
+ return best, sphere
31
+ end
32
+
33
+ it 'can optimize the Sphere function in 3 dimensions' do
34
+ best, sphere3 = best_from_de_on_sphere 3, 15_000
35
+
36
+ val = sphere3.calc_func(best)
37
+ val.must_be_close_to 0.0
38
+ val.must_be :>, 0.0
39
+
40
+ best.must_be_close_to_one_solution_of sphere3
41
+ end
42
+
43
+ it 'can optimize the Sphere function in 10 dimensions' do
44
+ best, sphere10 = best_from_de_on_sphere 10, 60_000
45
+
46
+ val = sphere10.calc_func(best)
47
+ val.must_be_close_to 0.0
48
+ val.must_be :>=, 0.0
49
+
50
+ best.must_be_close_to_one_solution_of sphere10
51
+ end
52
+
53
+ it 'can optimize the Sphere function in 30 dimensions' do
54
+ best, obj = best_from_de_on_sphere 30, 210_000
55
+
56
+ val = obj.calc_func(best)
57
+ val.must_be_close_to 0.0
58
+ val.must_be :>=, 0.0
59
+
60
+ # We don't test closeness since it might take very long for 30D to get close on all dimensions.
61
+ end
62
+ end
63
+
64
+ describe "Levi13 function" do
65
+ it 'can optimize the Levi13 function' do
66
+ objective = MinLevi13.new
67
+ ss = objective.search_space
68
+ de = DEOptimizer.new(objective, ss, {:verbose => false,
69
+ :maxNumSteps => 7_500})
70
+ best = de.optimize().to_a
71
+
72
+ val = objective.calc_func(best)
73
+ val.must_be_close_to objective.minimum
74
+ val.must_be :>=, objective.minimum
75
+
76
+ best.must_be_close_to_one_solution_of objective, 0.01
77
+ end
78
+ end
79
+
80
+ describe "Beale function" do
81
+ it 'can optimize the Beale function' do
82
+ objective = MinBeale.new
83
+ ss = objective.search_space
84
+ de = DEOptimizer.new(objective, ss, {:verbose => false,
85
+ :maxNumSteps => 7_500})
86
+ best = de.optimize().to_a
87
+
88
+ val = objective.calc_func(best)
89
+ val.must_be_close_to objective.minimum
90
+ val.must_be :>=, objective.minimum
91
+
92
+ best.must_be_close_to_one_solution_of objective, 0.01
93
+ end
94
+ end
95
+
96
+ describe "Easom function" do
97
+ it 'can optimize the Easom function' do
98
+ objective = MinEasom.new
99
+ ss = objective.search_space
100
+ # Why can't we do this in 25_000 evals anymore? We did it before. Repeatedly. Very strange.
101
+ de = DEOptimizer.new(objective, ss, {:verbose => true,
102
+ :maxNumSteps => 25_000, :printFrequency => 0.0,
103
+ :samplerRadius => 5})
104
+ best = de.optimize().to_a
105
+
106
+ val = objective.calc_func(best)
107
+ val.must_be_close_to objective.minimum
108
+ val.must_be :>=, objective.minimum
109
+
110
+ best.must_be_close_to_one_solution_of objective, 0.01
111
+ end
112
+ end
@@ -32,11 +32,6 @@ class TestArrayBasicStats < MiniTest::Unit::TestCase
32
32
  assert_equal 2.0, [2, 4, 4, 4, 5, 5, 7, 9].stdev
33
33
  end
34
34
 
35
- def test_root_mean_square
36
- assert_equal Math.sqrt((1*1 + 2*2)/2.0), [1, 2].root_mean_square
37
- assert_equal Math.sqrt((10*10 + 243*243)/2.0), [10, 243].rms
38
- end
39
-
40
35
  def test_weighted_sum_one_element
41
36
  assert_equal 1, [1].weighted_sum([1])
42
37
  assert_equal 2, [1].weighted_sum([2])
@@ -88,15 +83,28 @@ describe "Basic statistics" do
88
83
  end
89
84
  end
90
85
 
91
- describe "rms_from_scalar" do
92
- it "is the same as rms if scalar is 0.0" do
93
- a = [1,2,3,4,5]
94
- a.rms_from_scalar(0.0).must_be_within_delta a.rms
86
+ describe "Root mean square" do
87
+ it 'can calculate the RMS on an array of numbers' do
88
+ [1, 2].root_mean_square.must_equal Math.sqrt((1*1 + 2*2)/2.0)
89
+ [10, 243].rms.must_equal Math.sqrt((10*10 + 243*243)/2.0)
90
+ end
91
+
92
+ describe "rms_from_scalar" do
93
+ it "is the same as rms if scalar is 0.0" do
94
+ a = [1,2,3,4,5]
95
+ a.rms_from_scalar(0.0).must_be_within_delta a.rms
96
+ end
97
+
98
+ it "is correct for concrete example" do
99
+ a = [1,2]
100
+ a.rms_from_scalar(1.5).must_equal Math.sqrt( (0.5**2 + 0.5**2)/2 )
101
+ end
95
102
  end
96
103
 
97
- it "is correct for concrete example" do
98
- a = [1,2]
99
- a.rms_from_scalar(1.5).must_equal Math.sqrt( (0.5**2 + 0.5**2)/2 )
104
+ it 'can calculate the rms between two arrays' do
105
+ a = [1, 2, 3]
106
+ b = [2, 4, 7]
107
+ a.rms_from(b).must_equal Math.sqrt((1-2)**2 + (2-4)**2 + (3-7)**2)
100
108
  end
101
109
  end
102
110
 
data/test/test_logger.rb CHANGED
@@ -6,7 +6,7 @@ describe 'Logger' do
6
6
  before do
7
7
  @sio = StringIO.new
8
8
  @l = FeldtRuby::Logger.new @sio, {:verbose => true,
9
- :print_frequency => 0.0 # So everything is logged
9
+ :printFrequency => 0.0 # So everything is logged
10
10
  }
11
11
  end
12
12
 
@@ -55,7 +55,7 @@ describe 'Logger with a non-zero print frequency' do
55
55
  before do
56
56
  @sio = StringIO.new
57
57
  @l = FeldtRuby::Logger.new @sio, {:verbose => true,
58
- :print_frequency => 0.1
58
+ :printFrequency => 0.1
59
59
  }
60
60
  end
61
61
 
@@ -96,7 +96,7 @@ describe 'Logger with two IO output streams' do
96
96
  it 'logs to all streams of more than one' do
97
97
  sio1 = StringIO.new
98
98
  l = FeldtRuby::Logger.new sio1, {:verbose => true,
99
- :print_frequency => 0.0
99
+ :printFrequency => 0.0
100
100
  }
101
101
  sio2 = StringIO.new
102
102
  l.add_io sio2
@@ -118,7 +118,7 @@ describe 'Logger with two IO output streams' do
118
118
  it 'can add filenames to which log output should be written' do
119
119
  sio1 = StringIO.new
120
120
  l = FeldtRuby::Logger.new sio1, {:verbose => true,
121
- :print_frequency => 0.0
121
+ :printFrequency => 0.0
122
122
  }
123
123
 
124
124
  filename = "temp390580943850834.log"