ai4ruby 1.11

Sign up to get free protection for your applications and to get access to all the features.
Files changed (79) hide show
  1. data/README.rdoc +47 -0
  2. data/examples/classifiers/id3_data.csv +121 -0
  3. data/examples/classifiers/id3_example.rb +29 -0
  4. data/examples/classifiers/naive_bayes_data.csv +11 -0
  5. data/examples/classifiers/naive_bayes_example.rb +16 -0
  6. data/examples/classifiers/results.txt +31 -0
  7. data/examples/genetic_algorithm/genetic_algorithm_example.rb +37 -0
  8. data/examples/genetic_algorithm/travel_cost.csv +16 -0
  9. data/examples/neural_network/backpropagation_example.rb +67 -0
  10. data/examples/neural_network/patterns_with_base_noise.rb +68 -0
  11. data/examples/neural_network/patterns_with_noise.rb +66 -0
  12. data/examples/neural_network/training_patterns.rb +68 -0
  13. data/examples/neural_network/xor_example.rb +35 -0
  14. data/examples/som/som_data.rb +156 -0
  15. data/examples/som/som_multi_node_example.rb +22 -0
  16. data/examples/som/som_single_example.rb +24 -0
  17. data/lib/ai4r.rb +33 -0
  18. data/lib/ai4r/classifiers/classifier.rb +62 -0
  19. data/lib/ai4r/classifiers/hyperpipes.rb +118 -0
  20. data/lib/ai4r/classifiers/ib1.rb +121 -0
  21. data/lib/ai4r/classifiers/id3.rb +326 -0
  22. data/lib/ai4r/classifiers/multilayer_perceptron.rb +135 -0
  23. data/lib/ai4r/classifiers/naive_bayes.rb +259 -0
  24. data/lib/ai4r/classifiers/one_r.rb +110 -0
  25. data/lib/ai4r/classifiers/prism.rb +197 -0
  26. data/lib/ai4r/classifiers/zero_r.rb +73 -0
  27. data/lib/ai4r/clusterers/average_linkage.rb +59 -0
  28. data/lib/ai4r/clusterers/bisecting_k_means.rb +93 -0
  29. data/lib/ai4r/clusterers/centroid_linkage.rb +66 -0
  30. data/lib/ai4r/clusterers/clusterer.rb +61 -0
  31. data/lib/ai4r/clusterers/complete_linkage.rb +67 -0
  32. data/lib/ai4r/clusterers/diana.rb +139 -0
  33. data/lib/ai4r/clusterers/k_means.rb +126 -0
  34. data/lib/ai4r/clusterers/median_linkage.rb +61 -0
  35. data/lib/ai4r/clusterers/single_linkage.rb +194 -0
  36. data/lib/ai4r/clusterers/ward_linkage.rb +64 -0
  37. data/lib/ai4r/clusterers/ward_linkage_hierarchical.rb +31 -0
  38. data/lib/ai4r/clusterers/weighted_average_linkage.rb +61 -0
  39. data/lib/ai4r/data/data_set.rb +266 -0
  40. data/lib/ai4r/data/parameterizable.rb +64 -0
  41. data/lib/ai4r/data/proximity.rb +100 -0
  42. data/lib/ai4r/data/statistics.rb +77 -0
  43. data/lib/ai4r/experiment/classifier_evaluator.rb +95 -0
  44. data/lib/ai4r/genetic_algorithm/genetic_algorithm.rb +270 -0
  45. data/lib/ai4r/neural_network/backpropagation.rb +326 -0
  46. data/lib/ai4r/neural_network/hopfield.rb +149 -0
  47. data/lib/ai4r/som/layer.rb +68 -0
  48. data/lib/ai4r/som/node.rb +96 -0
  49. data/lib/ai4r/som/som.rb +155 -0
  50. data/lib/ai4r/som/two_phase_layer.rb +90 -0
  51. data/test/classifiers/hyperpipes_test.rb +84 -0
  52. data/test/classifiers/ib1_test.rb +78 -0
  53. data/test/classifiers/id3_test.rb +208 -0
  54. data/test/classifiers/multilayer_perceptron_test.rb +79 -0
  55. data/test/classifiers/naive_bayes_test.rb +43 -0
  56. data/test/classifiers/one_r_test.rb +62 -0
  57. data/test/classifiers/prism_test.rb +85 -0
  58. data/test/classifiers/zero_r_test.rb +49 -0
  59. data/test/clusterers/average_linkage_test.rb +51 -0
  60. data/test/clusterers/bisecting_k_means_test.rb +66 -0
  61. data/test/clusterers/centroid_linkage_test.rb +53 -0
  62. data/test/clusterers/complete_linkage_test.rb +57 -0
  63. data/test/clusterers/diana_test.rb +69 -0
  64. data/test/clusterers/k_means_test.rb +100 -0
  65. data/test/clusterers/median_linkage_test.rb +53 -0
  66. data/test/clusterers/single_linkage_test.rb +122 -0
  67. data/test/clusterers/ward_linkage_hierarchical_test.rb +61 -0
  68. data/test/clusterers/ward_linkage_test.rb +53 -0
  69. data/test/clusterers/weighted_average_linkage_test.rb +53 -0
  70. data/test/data/data_set_test.rb +96 -0
  71. data/test/data/proximity_test.rb +81 -0
  72. data/test/data/statistics_test.rb +65 -0
  73. data/test/experiment/classifier_evaluator_test.rb +76 -0
  74. data/test/genetic_algorithm/chromosome_test.rb +58 -0
  75. data/test/genetic_algorithm/genetic_algorithm_test.rb +81 -0
  76. data/test/neural_network/backpropagation_test.rb +82 -0
  77. data/test/neural_network/hopfield_test.rb +72 -0
  78. data/test/som/som_test.rb +97 -0
  79. metadata +168 -0
@@ -0,0 +1,68 @@
1
+ # Author:: Thomas Kern
2
+ # License:: MPL 1.1
3
+ # Project:: ai4r
4
+ # Url:: http://ai4r.rubyforge.org/
5
+ #
6
+ # You can redistribute it and/or modify it under the terms of
7
+ # the Mozilla Public License version 1.1 as published by the
8
+ # Mozilla Foundation at http://www.mozilla.org/MPL/MPL-1.1.txt
9
+
10
+ require File.dirname(__FILE__) + '/../data/parameterizable'
11
+
12
+ module Ai4r
13
+
14
+ module Som
15
+
16
+ # responsible for the implementation of the algorithm's decays
17
+ # currently has methods for the decay of the radius, influence and learning rate.
18
+ # Has only one phase, which ends after the number of epochs is passed by the Som-class.
19
+ #
20
+ # = Parameters
21
+ # * nodes => number of nodes in the SOM (nodes x nodes). Has to be the same number
22
+ # you pass to the SOM. Has to be an integer
23
+ # * radius => the initial radius for the neighborhood
24
+ # * epochs => number of epochs the algorithm runs, has to be an integer. By default it is set to 100
25
+ # * learning_rate => sets the initial learning rate
26
+ class Layer
27
+
28
+ include Ai4r::Data::Parameterizable
29
+
30
+ parameters_info :nodes => "number of nodes, has to be equal to the som",
31
+ :epochs => "number of epochs the algorithm has to run",
32
+ :radius => "sets the initial neighborhoud radius"
33
+
34
+ def initialize(nodes, radius, epochs = 100, learning_rate = 0.7)
35
+ raise("Too few nodes") if nodes < 3
36
+
37
+ @nodes = nodes
38
+ @epochs = epochs
39
+ @radius = radius
40
+ @time_for_epoch = @epochs / Math.log(nodes / 4.0)
41
+ @time_for_epoch = @epochs + 1.0 if @time_for_epoch < @epochs
42
+
43
+ @initial_learning_rate = learning_rate
44
+ end
45
+
46
+ # calculates the influnce decay for a certain distance and the current radius
47
+ # of the epoch
48
+ def influence_decay(distance, radius)
49
+ Math.exp(- (distance.to_f**2 / 2.0 / radius.to_f**2))
50
+ end
51
+
52
+ # calculates the radius decay for the current epoch. Uses @time_for_epoch
53
+ # which has to be higher than the number of epochs, otherwise the decay will be - Infinity
54
+ def radius_decay(epoch)
55
+ (@radius * ( 1 - epoch/ @time_for_epoch)).round
56
+ end
57
+
58
+ # calculates the learning rate decay. uses @time_for_epoch again and same rule applies:
59
+ # @time_for_epoch has to be higher than the number of epochs, otherwise the decay will be - Infinity
60
+ def learning_rate_decay(epoch)
61
+ @initial_learning_rate * ( 1 - epoch / @time_for_epoch)
62
+ end
63
+
64
+ end
65
+
66
+ end
67
+
68
+ end
@@ -0,0 +1,96 @@
1
+ # Author:: Thomas Kern
2
+ # License:: MPL 1.1
3
+ # Project:: ai4r
4
+ # Url:: http://ai4r.rubyforge.org/
5
+ #
6
+ # You can redistribute it and/or modify it under the terms of
7
+ # the Mozilla Public License version 1.1 as published by the
8
+ # Mozilla Foundation at http://www.mozilla.org/MPL/MPL-1.1.txt
9
+
10
+ require File.dirname(__FILE__) + '/../data/parameterizable'
11
+ require File.dirname(__FILE__) + '/layer'
12
+
13
+ module Ai4r
14
+
15
+ module Som
16
+
17
+ # this class is used for the individual node and will be (nodes * nodes)-time instantiated
18
+ #
19
+ # = attributes
20
+ #
21
+ # * direct access to the x and y values is granted, those show the position of the node in
22
+ # the square map
23
+ # * id => is the uniq and sequential ID of the node
24
+ # * weights => values of the current weights are stored in an array of dimension 'dimensions'.
25
+ # Weights are of type float
26
+ # * instantiated_weight => the values of the first instantiation of weights. these values are
27
+ # never changed
28
+
29
+ class Node
30
+
31
+ include Ai4r::Data::Parameterizable
32
+
33
+ parameters_info :weights => "holds the current weight",
34
+ :instantiated_weight => "holds the very first weight",
35
+ :x => "holds the row ID of the unit in the map",
36
+ :y => "holds the column ID of the unit in the map",
37
+ :id => "id of the node"
38
+
39
+ # creates an instance of Node and instantiates the weights
40
+ # the parameters is a uniq and sequential ID as well as the number of total nodes
41
+ # dimensions signals the dimension of the input vector
42
+ def self.create(id, total, dimensions)
43
+ n = Node.new
44
+ n.id = id
45
+ n.instantiate_weight dimensions
46
+ n.x = id % total
47
+ n.y = (id / total.to_f).to_i
48
+ n
49
+ end
50
+
51
+ # instantiates the weights to the dimension (of the input vector)
52
+ # for backup reasons, the instantiated weight is stored into @instantiated_weight as well
53
+ def instantiate_weight(dimensions)
54
+ @weights = Array.new dimensions
55
+ @instantiated_weight = Array.new dimensions
56
+ @weights.each_with_index do |weight, index|
57
+ @weights[index] = rand
58
+ @instantiated_weight[index] = @weights[index]
59
+ end
60
+ end
61
+
62
+ # returns the square distance between the current weights and the input
63
+ # the input is a vector/array of the same size as weights
64
+ # at the end, the square root is extracted from the sum of differences
65
+ def distance_to_input(input)
66
+ dist = 0
67
+ input.each_with_index do |i, index|
68
+ dist += (i - @weights[index]) ** 2
69
+ end
70
+
71
+ Math.sqrt(dist)
72
+ end
73
+
74
+ # returns the distance in square-form from the instance node to the passed node
75
+ # example:
76
+ # 2 2 2 2 2
77
+ # 2 1 1 1 2
78
+ # 2 1 0 1 2
79
+ # 2 1 1 1 2
80
+ # 2 2 2 2 2
81
+ # 0 being the current node
82
+ def distance_to_node(node)
83
+ max((self.x - node.x).abs, (self.y - node.y).abs)
84
+ end
85
+
86
+ private
87
+
88
+ def max(a, b)
89
+ a > b ? a : b
90
+ end
91
+
92
+ end
93
+
94
+ end
95
+
96
+ end
@@ -0,0 +1,155 @@
1
+ # Author:: Thomas Kern
2
+ # License:: MPL 1.1
3
+ # Project:: ai4r
4
+ # Url:: http://ai4r.rubyforge.org/
5
+ #
6
+ # You can redistribute it and/or modify it under the terms of
7
+ # the Mozilla Public License version 1.1 as published by the
8
+ # Mozilla Foundation at http://www.mozilla.org/MPL/MPL-1.1.txt
9
+
10
+ require File.dirname(__FILE__) + '/../data/parameterizable'
11
+ require File.dirname(__FILE__) + '/layer'
12
+ require File.dirname(__FILE__) + '/two_phase_layer'
13
+ require File.dirname(__FILE__) + '/node'
14
+
15
+ module Ai4r
16
+
17
+ # A self-organizing map (SOM) or self-organizing feature map (SOFM) is a type
18
+ # of artificial neural network that is trained using unsupervised learning to
19
+ # produce a low-dimensional (typically two-dimensional), discretized
20
+ # representation of the input space of the training samples, called a map.
21
+
22
+ # for more have a look at http://en.wikipedia.org/wiki/Self-organizing_map
23
+ # an in-depth explanation is provided by Sandhya Samarasinghe in
24
+ # 'Neural Networks for Applied Sciences and Engineering'
25
+
26
+ module Som
27
+
28
+ # = Introduction
29
+ #
30
+ # This is an implementation of a Kohonen Self-Organizing Maps
31
+ #
32
+ # = Features
33
+ #
34
+ # * Support for any network architecture (number of layers and neurons)
35
+ # * Configurable propagation function
36
+ # * Optional usage of bias
37
+ # * Configurable momentum
38
+ # * Configurable learning rate
39
+ # * Configurable initial weight function
40
+ # * 100% ruby code, no external dependency
41
+ #
42
+ # = Parameters
43
+ # * dim => dimension of the input vector
44
+ # * number_of_nodes => is the number of nodes per row/column (square som).
45
+ # * layer => instante of a layer-algorithm class
46
+ #
47
+ # = About the project
48
+ # Author:: Thomas Kern
49
+ # License:: MPL 1.1
50
+ # Url:: http://ai4r.rubyforge.org
51
+
52
+ class Som
53
+
54
+ include Ai4r::Data::Parameterizable
55
+
56
+ parameters_info :nodes => "sets the architecture of the map (nodes x nodes)",
57
+ :dimension => "sets the dimension of the input",
58
+ :layer => "instance of a layer, defines how the training algorithm works",
59
+ :epoch => "number of finished epochs"
60
+
61
+ def initialize(dim, number_of_nodes, layer)
62
+ @layer = layer
63
+ @dimension = dim
64
+ @number_of_nodes = number_of_nodes
65
+ @nodes = Array.new(number_of_nodes * number_of_nodes)
66
+ @epoch = 0
67
+ @cache = {}
68
+ end
69
+
70
+ # finds the best matching unit (bmu) of a certain input in all the @nodes
71
+ # returns an array of length 2 => [node, distance] (distance is of eucledian type, not
72
+ # a neighborhood distance)
73
+ def find_bmu(input)
74
+ bmu = @nodes.first
75
+ dist = bmu.distance_to_input input
76
+ @nodes[1..-1].each do |node|
77
+ tmp_dist = node.distance_to_input(input)
78
+ if tmp_dist <= dist
79
+ dist = tmp_dist
80
+ bmu = node
81
+ end
82
+ end
83
+ [bmu, dist]
84
+ end
85
+
86
+ # adjusts all nodes within a certain radius to the bmu
87
+ def adjust_nodes(input, bmu, radius, learning_rate)
88
+ @nodes.each do |node|
89
+ dist = node.distance_to_node(bmu[0])
90
+ next unless dist < radius
91
+
92
+ influence = @layer.influence_decay dist, radius
93
+ node.weights.each_with_index do |weight, index|
94
+ node.weights[index] += influence * learning_rate * (input[index] - weight)
95
+ end
96
+ end
97
+ end
98
+
99
+ # main method for the som. trains the map with the passed data vector
100
+ # calls train_step as long as train_step returns false
101
+ def train(data)
102
+ while !train_step(data)
103
+ end
104
+ end
105
+
106
+ # calculates the global distance error for all data entries
107
+ def global_error(data)
108
+ data.inject(0) {|sum,entry| sum + find_bmu(entry)[1]**2 }
109
+ end
110
+
111
+ # trains the map with the data as long as the @epoch is smaller than the epoch-value of
112
+ # @layer
113
+ # returns true if @epoch is greater than the fixed epoch-value in @layer, otherwise false
114
+ # 1 is added to @epoch at each method call
115
+ # the radius and learning rate is decreased at each method call/epoch as well
116
+ def train_step(data)
117
+ return true if @epoch >= @layer.epochs
118
+
119
+ radius = @layer.radius_decay @epoch
120
+ learning_rate = @layer.learning_rate_decay @epoch
121
+
122
+ data.each do |entry|
123
+ adjust_nodes entry, find_bmu(entry), radius, learning_rate
124
+ end
125
+
126
+ @epoch += 1
127
+ false
128
+ end
129
+
130
+ # returns the node at position (x,y) in the square map
131
+ def get_node(x, y)
132
+ raise(Exception.new) if check_param_for_som(x,y)
133
+ @nodes[y + x * @number_of_nodes]
134
+ end
135
+
136
+ # intitiates the map by creating (@number_of_nodes * @number_of_nodes) nodes
137
+ def initiate_map
138
+ @nodes.each_with_index do |node, i|
139
+ @nodes[i] = Node.create i, @number_of_nodes, @dimension
140
+ end
141
+ end
142
+
143
+ private
144
+
145
+ # checks whether or not there is a node in the map at the coordinates (x,y).
146
+ # x is the row, y the column indicator
147
+ def check_param_for_som(x, y)
148
+ y > @number_of_nodes - 1 || x > @number_of_nodes - 1 || x < 0 || y < 0
149
+ end
150
+
151
+ end
152
+
153
+ end
154
+
155
+ end
@@ -0,0 +1,90 @@
1
+ # Author:: Thomas Kern
2
+ # License:: MPL 1.1
3
+ # Project:: ai4r
4
+ # Url:: http://ai4r.rubyforge.org/
5
+ #
6
+ # You can redistribute it and/or modify it under the terms of
7
+ # the Mozilla Public License version 1.1 as published by the
8
+ # Mozilla Foundation at http://www.mozilla.org/MPL/MPL-1.1.txt
9
+
10
+ require File.dirname(__FILE__) + '/../data/parameterizable'
11
+ require File.dirname(__FILE__) + '/layer'
12
+
13
+ module Ai4r
14
+
15
+ module Som
16
+
17
+ # responsible for the implementation of the algorithm's decays, extends the class Layer.
18
+ # currently overrides the radius and learning rate decay methods of Layer.
19
+ # Has two phases, phase one has a decay in both the learning rate and the radius. The number
20
+ # of epochs for both phases can be passed and the total number of epochs is the sum of epoch
21
+ # for phase one and phase two.
22
+ # In the scond phase, the learning and radius decay is steady, normally set to a small number (ie. 0.01)
23
+ #
24
+ # = Parameters
25
+ # * nodes => number of nodes in the SOM (nodes x nodes). Has to be the same number
26
+ # you pass to the SOM. Has to be an integer
27
+ # * radius => the initial radius for the neighborhood
28
+ # * phase_one => number of epochs for phase one, has to be an integer. By default it is set to 150
29
+ # * phase_two => number of epochs for phase two, has to be an integer. By default it is set to 100
30
+ # * learning_rate => sets the initial learning rate
31
+ # * phase_one_learning_rate => sets the learning rate for phase one
32
+ # * phase_two_learning_rate => sets the learning rate for phase two
33
+
34
+ class TwoPhaseLayer < Layer
35
+
36
+ def initialize(nodes, learning_rate = 0.9, phase_one = 150, phase_two = 100,
37
+ phase_one_learning_rate = 0.1, phase_two_learning_rate = 0)
38
+ super nodes, nodes, phase_one + phase_two, learning_rate
39
+ @phase_one = phase_one
40
+ @phase_two = phase_two
41
+ @lr = @initial_learning_rate
42
+
43
+ @phase_one_learning_rate = phase_one_learning_rate
44
+ @phase_two_learning_rate = phase_two_learning_rate
45
+
46
+ @radius_reduction = @phase_one / (nodes/2.0 - 1) + 1
47
+ @delta_lr = (@lr - @phase_one_learning_rate)/ @phase_one
48
+ @radius = (nodes / 2.0).to_i
49
+ end
50
+
51
+ # two different values will be returned, depending on the phase
52
+ # in phase one, the radius will incrementially reduced by 1 every @radius_reduction time
53
+ # in phase two, the radius is fixed to 1
54
+ def radius_decay(epoch)
55
+ if epoch > @phase_one
56
+ return 1
57
+ else
58
+ if (epoch % @radius_reduction) == 0
59
+ @radius -= 1
60
+ end
61
+ @radius
62
+ end
63
+
64
+ end
65
+
66
+ # two different values will be returned, depending on the phase
67
+ # in phase one, the rate will incrementially reduced everytime this method is called
68
+ # on the switch of phases, the learning rate will be reset and the delta_lr (which signals
69
+ # the decay value of the learning rate) is reset as well
70
+ # in phase two, the newly reset delta_lr rate will be used to incrementially reduce the
71
+ # learning rate
72
+ def learning_rate_decay(epoch)
73
+ if epoch < @phase_one
74
+ @lr -= @delta_lr
75
+ return @lr
76
+ elsif epoch == @phase_one
77
+ @lr = @phase_one_learning_rate
78
+ @delta_lr = (@phase_one_learning_rate - @phase_two_learning_rate)/@phase_two
79
+ return @lr
80
+ else
81
+ @lr -= @delta_lr
82
+ end
83
+ end
84
+
85
+ end
86
+
87
+ end
88
+
89
+ end
90
+
@@ -0,0 +1,84 @@
1
+ # Author:: Sergio Fierens
2
+ # License:: MPL 1.1
3
+ # Project:: ai4r
4
+ # Url:: http://ai4r.rubyforge.org/
5
+ #
6
+ # You can redistribute it and/or modify it under the terms of
7
+ # the Mozilla Public License version 1.1 as published by the
8
+ # Mozilla Foundation at http://www.mozilla.org/MPL/MPL-1.1.txt
9
+
10
+ require File.dirname(__FILE__) + '/../../lib/ai4r/classifiers/hyperpipes'
11
+ require 'test/unit'
12
+
13
+ class Ai4r::Classifiers::Hyperpipes
14
+ attr_accessor :data_set, :pipes
15
+ end
16
+
17
+ include Ai4r::Classifiers
18
+ include Ai4r::Data
19
+
20
+ class HyperpipesTest < Test::Unit::TestCase
21
+
22
+ @@data_labels = [ 'city', 'age', 'gender', 'marketing_target' ]
23
+
24
+ @@data_items = [['New York', 25, 'M', 'Y'],
25
+ ['New York', 23, 'M', 'Y'],
26
+ ['New York', 18, 'M', 'Y'],
27
+ ['Chicago', 43, 'M', 'Y'],
28
+ ['New York', 34, 'F', 'N'],
29
+ ['Chicago', 33, 'F', 'Y'],
30
+ ['New York', 31, 'F', 'N'],
31
+ ['Chicago', 55, 'M', 'N'],
32
+ ['New York', 58, 'F', 'N'],
33
+ ['New York', 59, 'M', 'N'],
34
+ ['Chicago', 71, 'M', 'N'],
35
+ ['New York', 60, 'F', 'N'],
36
+ ['Chicago', 85, 'F', 'Y']
37
+ ]
38
+
39
+
40
+ def setup
41
+ Hyperpipes.send(:public, *Hyperpipes.protected_instance_methods)
42
+ @data_set = DataSet.new(:data_items => @@data_items, :data_labels => @@data_labels)
43
+ end
44
+
45
+ def test_build_pipe
46
+ classifier = Hyperpipes.new
47
+ assert_equal [{}, {:max=>-1.0/0, :min=>1.0/0}, {}], classifier.build_pipe(@data_set)
48
+ end
49
+
50
+ def test_get_rules
51
+
52
+ end
53
+
54
+ def test_build
55
+ assert_raise(ArgumentError) { Hyperpipes.new.build(DataSet.new) }
56
+ classifier = Hyperpipes.new.build(@data_set)
57
+ assert classifier.pipes.include?("Y")
58
+ assert classifier.pipes.include?("N")
59
+ end
60
+
61
+ def test_eval
62
+ classifier = Hyperpipes.new.build(@data_set)
63
+ assert classifier
64
+ assert_equal('N', classifier.eval(['Chicago', 55, 'M']))
65
+ assert_equal('N', classifier.eval(['New York', 35, 'F']))
66
+ assert_equal('Y', classifier.eval(['New York', 25, 'M']))
67
+ assert_equal('Y', classifier.eval(['Chicago', 85, 'F']))
68
+ end
69
+
70
+ def test_get_rules
71
+ classifier = Hyperpipes.new.build(@data_set)
72
+ age = 28
73
+ gender = "M"
74
+ marketing_target = nil
75
+ eval classifier.get_rules
76
+ assert_equal 'Y', marketing_target
77
+ age = 44
78
+ city='New York'
79
+ eval classifier.get_rules
80
+ assert_equal 'N', marketing_target
81
+ end
82
+ end
83
+
84
+