fselector 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. data/LICENSE +21 -0
  2. data/README.md +195 -0
  3. data/lib/fselector.rb +41 -0
  4. data/lib/fselector/algo_continuous/PMetric.rb +51 -0
  5. data/lib/fselector/algo_continuous/ReliefF_c.rb +190 -0
  6. data/lib/fselector/algo_continuous/Relief_c.rb +150 -0
  7. data/lib/fselector/algo_continuous/TScore.rb +52 -0
  8. data/lib/fselector/algo_continuous/discretizer.rb +219 -0
  9. data/lib/fselector/algo_continuous/normalizer.rb +59 -0
  10. data/lib/fselector/algo_discrete/Accuracy.rb +35 -0
  11. data/lib/fselector/algo_discrete/AccuracyBalanced.rb +37 -0
  12. data/lib/fselector/algo_discrete/BiNormalSeparation.rb +45 -0
  13. data/lib/fselector/algo_discrete/ChiSquaredTest.rb +69 -0
  14. data/lib/fselector/algo_discrete/CorrelationCoefficient.rb +42 -0
  15. data/lib/fselector/algo_discrete/DocumentFrequency.rb +36 -0
  16. data/lib/fselector/algo_discrete/F1Measure.rb +41 -0
  17. data/lib/fselector/algo_discrete/FishersExactTest.rb +47 -0
  18. data/lib/fselector/algo_discrete/GMean.rb +37 -0
  19. data/lib/fselector/algo_discrete/GSSCoefficient.rb +43 -0
  20. data/lib/fselector/algo_discrete/GiniIndex.rb +44 -0
  21. data/lib/fselector/algo_discrete/InformationGain.rb +96 -0
  22. data/lib/fselector/algo_discrete/MatthewsCorrelationCoefficient.rb +45 -0
  23. data/lib/fselector/algo_discrete/McNemarsTest.rb +57 -0
  24. data/lib/fselector/algo_discrete/MutualInformation.rb +42 -0
  25. data/lib/fselector/algo_discrete/OddsRatio.rb +46 -0
  26. data/lib/fselector/algo_discrete/OddsRatioNumerator.rb +41 -0
  27. data/lib/fselector/algo_discrete/Power.rb +46 -0
  28. data/lib/fselector/algo_discrete/Precision.rb +31 -0
  29. data/lib/fselector/algo_discrete/ProbabilityRatio.rb +41 -0
  30. data/lib/fselector/algo_discrete/Random.rb +40 -0
  31. data/lib/fselector/algo_discrete/ReliefF_d.rb +173 -0
  32. data/lib/fselector/algo_discrete/Relief_d.rb +135 -0
  33. data/lib/fselector/algo_discrete/Sensitivity.rb +38 -0
  34. data/lib/fselector/algo_discrete/Specificity.rb +35 -0
  35. data/lib/fselector/base.rb +322 -0
  36. data/lib/fselector/base_continuous.rb +25 -0
  37. data/lib/fselector/base_discrete.rb +355 -0
  38. data/lib/fselector/ensemble.rb +181 -0
  39. data/lib/fselector/fileio.rb +455 -0
  40. data/lib/fselector/util.rb +707 -0
  41. metadata +86 -0
@@ -0,0 +1,40 @@
1
+ #
2
+ # FSelector: a Ruby gem for feature selection and ranking
3
+ #
4
+ module FSelector
5
+ #
6
+ # Random (Rand), no pratical use but can be used as a baseline
7
+ #
8
+ # Rand = rand numbers within [0..1)
9
+ #
10
+ # ref: [An extensive empirical study of feature selection metrics
11
+ # for text classification][url]
12
+ # [url]: http://dl.acm.org/citation.cfm?id=944974
13
+ #
14
+ class Random < BaseDiscrete
15
+ #
16
+ # initialize from an existing data structure
17
+ #
18
+ # @param [Integer] seed seed form random number
19
+ # generator. provided for reproducible results,
20
+ # otherwise use current time as seed
21
+ #
22
+ def initialize(seed=nil, data=nil)
23
+ super(data)
24
+ srand(seed) if seed
25
+ end
26
+
27
+ private
28
+
29
+ # calculate contribution of each feature (f) for each class (k)
30
+ def calc_contribution(f)
31
+ each_class do |k|
32
+ set_feature_score(f, k, rand)
33
+ end
34
+ end # calc_contribution
35
+
36
+
37
+ end # class
38
+
39
+
40
+ end # module
@@ -0,0 +1,173 @@
1
+ #
2
+ # FSelector: a Ruby gem for feature selection and ranking
3
+ #
4
+ module FSelector
5
+ # extended Relief algorithm for discrete feature (ReliefF_d)
6
+ #
7
+ # @note applicable to multi-class problem with missing data
8
+ #
9
+ # ref: [Estimating Attributes: Analysis and Extensions of RELIEF][url]
10
+ # [url]: http://www.springerlink.com/content/fp23jh2h0426ww45/
11
+ #
12
+ class ReliefF_d < BaseDiscrete
13
+ #
14
+ # new()
15
+ #
16
+ # @param [Integer] m number of samples to be used
17
+ # for estimating feature contribution. max can be
18
+ # the number of training samples
19
+ # @param [Integer] k number of k-nearest neighbor
20
+ # @param [Hash] data existing data structure
21
+ #
22
+ def initialize(m=nil, k=10, data=nil)
23
+ super(data)
24
+ @m = m # use all samples
25
+ @k = (k || 10) # default 10
26
+ end
27
+
28
+ private
29
+
30
+ # calculate contribution of each feature (f) across all classes
31
+ def calc_contribution(f)
32
+ score = 0.0
33
+
34
+ # use all samples if @m not provided
35
+ @m = get_sample_size if not @m
36
+
37
+ @m.times do
38
+ # pick a sample at random
39
+ rs, rk = pick_a_sample_at_random
40
+
41
+ # find k nearest neighbor for each class
42
+ nbrs = find_k_nearest_nb(rs, rk)
43
+
44
+ # calc contribution from neighbors
45
+ score += calc_score(f, rs, rk, nbrs)
46
+ end
47
+
48
+ s = score / @m
49
+
50
+ set_feature_score(f, :BEST, s)
51
+ end # calc_contribution
52
+
53
+
54
+ # pick a sample at random
55
+ def pick_a_sample_at_random
56
+ rk = get_classes[rand(get_classes.size)]
57
+ rks = get_data[rk]
58
+
59
+ [ rks[rand(rks.size)], rk ]
60
+ end # pick_a_sample_at_random
61
+
62
+ # # find k nearest neighbors of sample (rs) for each class
63
+ def find_k_nearest_nb(rs, rk)
64
+ nbrs = {}
65
+
66
+ each_class do |k|
67
+ res = []
68
+
69
+ get_data[k].each do |s|
70
+ next if s == rs # exclude self
71
+
72
+ d = diff_sample(rs, s, rk, k)
73
+ res << [d, s]
74
+ end
75
+
76
+ nbrs[k] = (res.sort { |x, y| x[0] <=> y[0] }[0...@k]).collect { |z| z[1] }
77
+ end
78
+
79
+ nbrs
80
+ end # find_k_nearest_nb
81
+
82
+
83
+ # difference between two samples
84
+ def diff_sample(s1, s2, k1, k2)
85
+ d = 0.0
86
+
87
+ each_feature do |f|
88
+ d += diff_feature(f, s1, s2, k1, k2)**2
89
+ end
90
+
91
+ d
92
+ end # diff_sample
93
+
94
+
95
+ # difference beween the feature (f) of two samples
96
+ def diff_feature(f, s1, s2, k1, k2)
97
+ d = 0.0
98
+
99
+ if s1.has_key?(f) and s2.has_key?(f) # no missing value
100
+ d = (s1[f] == s2[f]) ? 0.0 : 1.0
101
+ elsif not s1.has_key?(f) and not s2.has_key?(f) # two missing values
102
+ fvs = get_feature_values(f).uniq
103
+ fvs.each do |mv|
104
+ d -= calc_p(f, mv, k1)*calc_p(f, mv, k2)
105
+ end
106
+ d += 1
107
+ elsif not s1.has_key?(f) # s1: one missing value
108
+ # diff(f, s1, s2) = 1 - P(value(f, s2)|class(s1))
109
+ d = 1 - calc_p(f, s2[f], k1)
110
+ else # s2: one missing value
111
+ # diff(f, s1, s2) = 1 - P(value(f, s1)|class(s2))
112
+ d = 1 - calc_p(f, s1[f], k2)
113
+ end
114
+
115
+ d
116
+ end # diff_feature
117
+
118
+
119
+ # calc probability of missing value (mv)
120
+ def calc_p(f, mv, k)
121
+ # cache
122
+ if not @f2mvp
123
+ @f2mvp = {}
124
+
125
+ each_feature do |f|
126
+ @f2mvp[f] = {}
127
+
128
+ each_class do |k|
129
+ @f2mvp[f][k] = {}
130
+
131
+ fvs = get_feature_values(f).uniq
132
+ fvs.each do |v|
133
+ n = 0.0
134
+
135
+ get_data[k].each do |s|
136
+ n += 1 if s.has_key?(f) and s[f] == v
137
+ end
138
+
139
+ @f2mvp[f][k][v] = n/get_data[k].size
140
+ end
141
+ end
142
+ end
143
+ end
144
+
145
+ @f2mvp[f][k][mv]
146
+ end
147
+
148
+
149
+ # calc feature (f) contribution from neighbors
150
+ def calc_score(f, rs, rk, nbrs)
151
+ score = 0.0
152
+
153
+ nbrs.each do |k, nbs|
154
+ if k == rk # near hit
155
+ nbs.each do |s|
156
+ score -= (diff_feature(f, rs, s, rk, k)**2/nbs.size)
157
+ end
158
+ else # near_miss
159
+ nbs.each do |s|
160
+ score += (get_data[k].size/get_sample_size.to_f *
161
+ diff_feature(f, rs, s, rk, k)**2/nbs.size)
162
+ end
163
+ end
164
+ end
165
+
166
+ score
167
+ end
168
+
169
+
170
+ end # class
171
+
172
+
173
+ end # module
@@ -0,0 +1,135 @@
1
+ #
2
+ # FSelector: a Ruby gem for feature selection and ranking
3
+ #
4
+ module FSelector
5
+ #
6
+ # Relief algorithm for discrete feature (Relief_d)
7
+ #
8
+ # @note Relief applicable only to two-class problem without missing data
9
+ #
10
+ # ref: [The Feature Selection Problem: Traditional Methods
11
+ # and a New Algorithm][url]
12
+ # [url]: http://www.aaai.org/Papers/AAAI/1992/AAAI92-020.pdf
13
+ #
14
+ class Relief_d < BaseDiscrete
15
+ #
16
+ # new()
17
+ #
18
+ # @param [Integer] m number of samples to be used
19
+ # for estimating feature contribution. max can be
20
+ # the number of training samples
21
+ # @param [Hash] data existing data structure
22
+ #
23
+ def initialize(m=nil, data=nil)
24
+ super(data)
25
+ @m = m # default use all samples
26
+ end
27
+
28
+ private
29
+
30
+ # calculate contribution of each feature (f) across all classes
31
+ def calc_contribution(f)
32
+ if not get_classes.size == 2
33
+ abort "[#{__FILE__}@#{__LINE__}]: "+
34
+ "Relief applicable only to two-class problems without missing data"
35
+ end
36
+
37
+ # use all samples if @m not provided
38
+ @m = get_sample_size if not @m
39
+
40
+ k1, k2 = get_classes
41
+ score = 0.0
42
+
43
+ @m.times do
44
+ # pick a sample at random
45
+ rs, rk = pick_a_sample_at_random
46
+
47
+ # find the nearest neighbor for each class
48
+ nbrs = find_nearest_nb(rs, rk)
49
+
50
+ # calc contribution from neighbors
51
+ score += calc_score(f, rs, rk, nbrs)
52
+ end
53
+
54
+ s = score / @m
55
+
56
+ set_feature_score(f, :BEST, s)
57
+ end # calc_contribution
58
+
59
+
60
+ # pick a sample at random
61
+ def pick_a_sample_at_random
62
+ rk = get_classes[rand(get_classes.size)]
63
+ rks = get_data[rk]
64
+
65
+ [ rks[rand(rks.size)], rk ]
66
+ end # pick_a_sample_at_random
67
+
68
+
69
+ # find nearest neighbor sample for given sample (rs) within class (k)
70
+ def find_nearest_nb(rs, rk)
71
+ nbrs = {}
72
+
73
+ each_class do |k|
74
+ nb, dmin = nil, 999
75
+ get_data[k].each do |s|
76
+ next if s == rs # exclude self
77
+ d = diff_sample(rs, s)
78
+ if d < dmin
79
+ dmin = d
80
+ nb = s
81
+ end
82
+ end
83
+
84
+ nbrs[k] = nb
85
+ end
86
+
87
+ nbrs
88
+ end # find_nearest_nb
89
+
90
+
91
+ # difference between two samples
92
+ def diff_sample(s1, s2)
93
+ d = 0.0
94
+
95
+ each_feature do |f|
96
+ d += diff_feature(f, s1, s2)**2
97
+ end
98
+
99
+ d
100
+ end # diff_sample
101
+
102
+
103
+ # difference beween the feature (f) of two samples
104
+ def diff_feature(f, s1, s2)
105
+ d = 0.0
106
+
107
+ if not s1.has_key?(f) or not s2.has_key?(f)
108
+ abort "[#{__FILE__}@#{__LINE__}]: "+
109
+ "Relief does not allow missing values"
110
+ end
111
+
112
+ (s1[f] == s2[f]) ? 0.0 : 1.0
113
+ end # diff_feature
114
+
115
+
116
+ # calc feature (f) contribution from neighbors
117
+ def calc_score(f, rs, rk, nbrs)
118
+ score = 0.0
119
+
120
+ nbrs.each do |k, s|
121
+ if k == rk # near hit
122
+ score -= diff_feature(f, rs, s)**2
123
+ else # near_miss
124
+ score += diff_feature(f, rs, s)**2
125
+ end
126
+ end
127
+
128
+ score
129
+ end # calc_score
130
+
131
+
132
+ end # class
133
+
134
+
135
+ end # module
@@ -0,0 +1,38 @@
1
+ #
2
+ # FSelector: a Ruby gem for feature selection and ranking
3
+ #
4
+ module FSelector
5
+ #
6
+ # Sensitivity (SN)
7
+ #
8
+ # TP A
9
+ # SN = ------- = -----
10
+ # TP+FN A+C
11
+ #
12
+ class Sensitivity < BaseDiscrete
13
+
14
+ private
15
+
16
+ # calculate contribution of each feature (f) for each class (k)
17
+ def calc_contribution(f)
18
+ each_class do |k|
19
+ a, c = get_A(f, k), get_C(f, k)
20
+
21
+ s = a/(a+c)
22
+
23
+ set_feature_score(f, k, s)
24
+ end
25
+ end # calc_contribution
26
+
27
+
28
+ end # class
29
+
30
+
31
+
32
+ # shortcut so that you can use FSelector::SN instead of FSelector::Sensitivity
33
+ SN = Sensitivity
34
+ # Sensitivity, also known as Recall
35
+ Recall = Sensitivity
36
+
37
+
38
+ end # module
@@ -0,0 +1,35 @@
1
+ #
2
+ # FSelector: a Ruby gem for feature selection and ranking
3
+ #
4
+ module FSelector
5
+ #
6
+ # Specificity (SP)
7
+ #
8
+ # TN D
9
+ # SP = ------- = -----
10
+ # TN+FP B+D
11
+ #
12
+ class Specificity < BaseDiscrete
13
+
14
+ private
15
+
16
+ # calculate contribution of each feature (f) for each class (k)
17
+ def calc_contribution(f)
18
+ each_class do |k|
19
+ b, d = get_B(f, k), get_D(f, k)
20
+
21
+ s = d/(b+d)
22
+
23
+ set_feature_score(f, k, s)
24
+ end
25
+ end # calc_contribution
26
+
27
+
28
+ end # class
29
+
30
+
31
+ # shortcut so that you can use FSelector::SP instead of FSelector::Specificity
32
+ SP = Specificity
33
+
34
+
35
+ end # module
@@ -0,0 +1,322 @@
1
+ #
2
+ # FSelector: a Ruby gem for feature selection and ranking
3
+ #
4
+ module FSelector
5
+ #
6
+ # base ranking algorithm
7
+ #
8
+ class Base
9
+ # include FileIO
10
+ include FileIO
11
+
12
+ # initialize from an existing data structure
13
+ def initialize(data=nil)
14
+ @data = data
15
+ @opts = {} # store non-data information
16
+ end
17
+
18
+
19
+ #
20
+ # iterator for each class
21
+ #
22
+ # e.g.
23
+ # self.each_class do |k|
24
+ # puts k
25
+ # end
26
+ #
27
+ def each_class
28
+ if not block_given?
29
+ abort "[#{__FILE__}@#{__LINE__}]: "+
30
+ "block must be given!"
31
+ else
32
+ get_classes.each { |k| yield k }
33
+ end
34
+ end
35
+
36
+
37
+ #
38
+ # iterator for each feature
39
+ #
40
+ # e.g.
41
+ # self.each_feature do |f|
42
+ # puts f
43
+ # end
44
+ #
45
+ def each_feature
46
+ if not block_given?
47
+ abort "[#{__FILE__}@#{__LINE__}]: "+
48
+ "block must be given!"
49
+ else
50
+ get_features.each { |f| yield f }
51
+ end
52
+ end
53
+
54
+
55
+ #
56
+ # iterator for each sample with class label
57
+ #
58
+ # e.g.
59
+ # self.each_sample do |k, s|
60
+ # print k
61
+ # s.each { |f, v| ' '+v }
62
+ # puts
63
+ # end
64
+ #
65
+ def each_sample
66
+ if not block_given?
67
+ abort "[#{__FILE__}@#{__LINE__}]: "+
68
+ " block must be given!"
69
+ else
70
+ get_data.each do |k, samples|
71
+ samples.each { |s| yield k, s }
72
+ end
73
+ end
74
+ end
75
+
76
+
77
+ # get classes
78
+ def get_classes
79
+ @classes ||= @data.keys
80
+ end
81
+
82
+
83
+ # set classes
84
+ def set_classes(classes)
85
+ if classes and classes.class == Array
86
+ @classes = classes
87
+ else
88
+ abort "[#{__FILE__}@#{__LINE__}]: "+
89
+ "classes must be a Array object!"
90
+ end
91
+ end
92
+
93
+
94
+ # get unique features
95
+ def get_features
96
+ @features ||= @data.map { |x| x[1].map { |y| y.keys } }.flatten.uniq
97
+ end
98
+
99
+
100
+ #
101
+ # get feature values
102
+ #
103
+ # @param [Symbol] f feature of interest
104
+ #
105
+ def get_feature_values(f)
106
+ @fvs ||= {}
107
+
108
+ if not @fvs.has_key? f
109
+ @fvs[f] = []
110
+ each_sample do |k, s|
111
+ @fvs[f] << s[f] if s.has_key? f
112
+ end
113
+ end
114
+
115
+ @fvs[f]
116
+ end
117
+
118
+ # set features
119
+ def set_features(features)
120
+ if features and features.class == Array
121
+ @features = features
122
+ else
123
+ abort "[#{__FILE__}@#{__LINE__}]: "+
124
+ "features must be a Array object!"
125
+ end
126
+ end
127
+
128
+
129
+ # get data
130
+ def get_data
131
+ @data
132
+ end
133
+
134
+ # set data
135
+ def set_data(data)
136
+ if data and data.class == Hash
137
+ @data = data
138
+ # clear
139
+ @classes, @features, @fvs = nil, nil, nil
140
+ @scores, @ranks, @sz = nil, nil, nil
141
+ else
142
+ abort "[#{__FILE__}@#{__LINE__}]: "+
143
+ "data must be a Hash object!"
144
+ end
145
+ end
146
+
147
+
148
+ # get non-data information
149
+ def get_opt(key)
150
+ @opts.has_key?(key) ? @opts[key] : nil
151
+ end
152
+
153
+
154
+ # set non-data information as a key-value pair
155
+ def set_opt(key, value)
156
+ @opts[key] = value
157
+ end
158
+
159
+
160
+ # number of samples
161
+ def get_sample_size
162
+ @sz ||= get_data.values.flatten.size
163
+ end
164
+
165
+
166
+ #
167
+ # print feature scores
168
+ #
169
+ # @param [String] kclass class of interest
170
+ #
171
+ def print_feature_scores(feat=nil, kclass=nil)
172
+ scores = get_feature_scores
173
+
174
+ scores.each do |f, ks|
175
+ next if feat and feat != f
176
+
177
+ print "#{f} =>"
178
+ ks.each do |k, s|
179
+ if kclass
180
+ print " #{k}->#{s}" if k == kclass
181
+ else
182
+ print " #{k}->#{s}"
183
+ end
184
+ end
185
+ puts
186
+ end
187
+ end
188
+
189
+
190
+ # print feature ranks
191
+ def print_feature_ranks
192
+ ranks = get_feature_ranks
193
+
194
+ ranks.each do |f, r|
195
+ puts "#{f} => #{r}"
196
+ end
197
+ end
198
+
199
+
200
+ #
201
+ # get scores of all features for all classes
202
+ #
203
+ # @return [Hash] \{ feature =>
204
+ # \{ class_1 => score_1, class_2 => score_2, :BEST => score_best } }
205
+ #
206
+ def get_feature_scores
207
+ return @scores if @scores # already done
208
+
209
+ each_feature do |f|
210
+ calc_contribution(f)
211
+ end
212
+
213
+ # best score for feature
214
+ @scores.each do |f, ks|
215
+ # the larger, the better
216
+ @scores[f][:BEST] = ks.values.max
217
+ end
218
+ #@scores.each { |x,v| puts "#{x} => #{v[:BEST]}" }
219
+
220
+ @scores
221
+ end
222
+
223
+
224
+ # set feature (f) score (f) for class (k)
225
+ def set_feature_score(f, k, s)
226
+ @scores ||= {}
227
+ @scores[f] ||= {}
228
+ @scores[f][k] = s
229
+ end
230
+
231
+ #
232
+ # get the ranked features based on their best scores
233
+ #
234
+ # @return [Hash] feature ranks
235
+ #
236
+ def get_feature_ranks
237
+ return @ranks if @ranks # already done
238
+
239
+ scores = get_feature_scores
240
+
241
+ # get the ranked features
242
+ @ranks = {} # feature => rank
243
+
244
+ # the larger, the better
245
+ sorted_features = scores.keys.sort do |x,y|
246
+ scores[y][:BEST] <=> scores[x][:BEST]
247
+ end
248
+
249
+ sorted_features.each_with_index do |sf, si|
250
+ @ranks[sf] = si+1
251
+ end
252
+
253
+ @ranks
254
+ end
255
+
256
+
257
+ #
258
+ # reconstruct data with feature scores satisfying cutoff
259
+ #
260
+ # @param [String] criterion
261
+ # valid criterion can be '>0.5', '>= 0.4', '==2', '<=1' or '<0.2'
262
+ # @param [Hash] my_scores
263
+ # user customized feature scores
264
+ # @return [Hash] data after feature selection
265
+ # @note data structure will be altered
266
+ #
267
+ def select_data_by_score!(criterion, my_scores=nil)
268
+ # user scores or internal scores
269
+ scores = my_scores || get_feature_scores
270
+
271
+ my_data = {}
272
+
273
+ each_sample do |k, s|
274
+ my_data[k] ||= []
275
+ my_s = {}
276
+
277
+ s.each do |f, v|
278
+ my_s[f] = v if eval("#{scores[f][:BEST]} #{criterion}")
279
+ end
280
+
281
+ my_data[k] << my_s if not my_s.empty?
282
+ end
283
+
284
+ set_data(my_data)
285
+ end
286
+
287
+
288
+ #
289
+ # reconstruct data by rank
290
+ #
291
+ # @param [String] criterion
292
+ # valid criterion can be '>11', '>= 10', '==1', '<=10' or '<20'
293
+ # @param [Hash] my_ranks
294
+ # user customized feature ranks
295
+ # @return [Hash] data after feature selection
296
+ # @note data structure will be altered
297
+ #
298
+ def select_data_by_rank!(criterion, my_ranks=nil)
299
+ # user ranks or internal ranks
300
+ ranks = my_ranks || get_feature_ranks
301
+
302
+ my_data = {}
303
+
304
+ each_sample do |k, s|
305
+ my_data[k] ||= []
306
+ my_s = {}
307
+
308
+ s.each do |f,v|
309
+ my_s[f] = v if eval("#{ranks[f]} #{criterion}")
310
+ end
311
+
312
+ my_data[k] << my_s if not my_s.empty?
313
+ end
314
+
315
+ set_data(my_data)
316
+ end
317
+
318
+
319
+ end # class
320
+
321
+
322
+ end # module