kvg_character_recognition 0.1.3 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/kvg_character_recognition.gemspec +1 -1
- data/lib/kvg_character_recognition.rb +16 -29
- data/lib/kvg_character_recognition/datastore.rb +0 -2
- data/lib/kvg_character_recognition/heatmap_feature.rb +50 -0
- data/lib/kvg_character_recognition/kvg_parser.rb +272 -0
- data/lib/kvg_character_recognition/non_structural_feature.rb +72 -0
- data/lib/kvg_character_recognition/normalization.rb +195 -0
- data/lib/kvg_character_recognition/preprocessor.rb +28 -212
- data/lib/kvg_character_recognition/recognizer.rb +16 -18
- data/lib/kvg_character_recognition/template.rb +43 -0
- data/lib/kvg_character_recognition/trainer.rb +50 -73
- data/lib/kvg_character_recognition/utils.rb +0 -311
- data/lib/kvg_character_recognition/version.rb +1 -1
- metadata +7 -2
@@ -0,0 +1,195 @@
|
|
1
|
+
module KvgCharacterRecognition
|
2
|
+
# This module contains various normalization methods
|
3
|
+
module Normalization
|
4
|
+
#This methods normalizes the strokes using bi moment
|
5
|
+
#Params:
|
6
|
+
#+strokes+:: [[[x1, y1], [x2, y2], ...], [[x1, y1], ...]]
|
7
|
+
#+slant_correction+:: boolean whether a slant correction should be performed
|
8
|
+
#returns normed_strokes, normed_strokes_with_slant_correction
|
9
|
+
def bi_moment_normalize strokes
|
10
|
+
means, diffs = means_and_diffs strokes
|
11
|
+
|
12
|
+
#calculating delta values
|
13
|
+
delta = Proc.new do |diff, operator|
|
14
|
+
#d_x or d_y
|
15
|
+
#operator: >= or <
|
16
|
+
accum = 0
|
17
|
+
counter = 0
|
18
|
+
|
19
|
+
diff.each do |d|
|
20
|
+
if d.send operator, 0
|
21
|
+
accum += d ** 2
|
22
|
+
counter += 1
|
23
|
+
end
|
24
|
+
end
|
25
|
+
accum / counter
|
26
|
+
end
|
27
|
+
|
28
|
+
new_strokes = []
|
29
|
+
|
30
|
+
strokes.each do |stroke|
|
31
|
+
new_stroke = []
|
32
|
+
stroke.each do |point|
|
33
|
+
x = point[0]
|
34
|
+
y = point[1]
|
35
|
+
|
36
|
+
if x - means[0] >= 0
|
37
|
+
new_x = ( @size * (x - means[0]) / (4 * Math.sqrt(delta.call(diffs[0], :>=))).round(2) ) + @size/2
|
38
|
+
else
|
39
|
+
new_x = ( @size * (x - means[0]) / (4 * Math.sqrt(delta.call(diffs[0], :<))).round(2) ) + @size/2
|
40
|
+
end
|
41
|
+
|
42
|
+
if y - means[1] >= 0
|
43
|
+
new_y = ( @size * (y - means[1]) / (4 * Math.sqrt(delta.call(diffs[1], :>=))).round(2) ) + @size/2
|
44
|
+
else
|
45
|
+
new_y = ( @size * (y - means[1]) / (4 * Math.sqrt(delta.call(diffs[1], :<))).round(2) ) + @size/2
|
46
|
+
end
|
47
|
+
|
48
|
+
if new_x >= 0 && new_x <= @size && new_y >= 0 && new_y <= @size
|
49
|
+
new_stroke << [new_x.round(3), new_y.round(3)]
|
50
|
+
end
|
51
|
+
end
|
52
|
+
new_strokes << new_stroke unless new_stroke.empty?
|
53
|
+
end
|
54
|
+
new_strokes
|
55
|
+
end
|
56
|
+
|
57
|
+
# line density equalization
|
58
|
+
# strokes must be scaled to 109x109
|
59
|
+
def line_density_normalize strokes
|
60
|
+
hist_x, hist_y = line_density_histogram strokes
|
61
|
+
strokes.map do |stroke|
|
62
|
+
stroke.map do |point|
|
63
|
+
if point[0] < 109 && point[1] < 109
|
64
|
+
[@size * hist_x[point[0].floor] / hist_x.last, @size * hist_y[point[1].floor] / hist_y.last]
|
65
|
+
else
|
66
|
+
point
|
67
|
+
end
|
68
|
+
end
|
69
|
+
end
|
70
|
+
end
|
71
|
+
|
72
|
+
# point density normalization
|
73
|
+
def point_density_normalize strokes
|
74
|
+
points = strokes.flatten(1)
|
75
|
+
h_x, h_y = accumulated_histogram strokes
|
76
|
+
strokes.map do |stroke|
|
77
|
+
stroke.map do |point|
|
78
|
+
[(@size * h_x[point[0].round] / points.length.to_f).round(2), (@size * h_y[point[1].round] / points.length.to_f).round(2)]
|
79
|
+
end
|
80
|
+
end
|
81
|
+
end
|
82
|
+
|
83
|
+
private
|
84
|
+
# bitmap for calculating background runlength in line density normalization
|
85
|
+
# bitmap_x[i] is a row of position y = i and contains x-values of existing points
|
86
|
+
# bitmap_y[i] is a column of position x = i and contains y-values of existing points
|
87
|
+
def runlength_bitmap strokes
|
88
|
+
bitmap_x = Array.new(@size, [])
|
89
|
+
bitmap_y = Array.new(@size, [])
|
90
|
+
|
91
|
+
strokes.each do |stroke|
|
92
|
+
stroke.each do |point|
|
93
|
+
x = point[0].floor
|
94
|
+
y = point[1].floor
|
95
|
+
if x < @size && y < @size
|
96
|
+
bitmap_x[y] = bitmap_x[y] + [x]
|
97
|
+
bitmap_y[x] = bitmap_y[x] + [y]
|
98
|
+
end
|
99
|
+
end
|
100
|
+
end
|
101
|
+
[bitmap_x, bitmap_y]
|
102
|
+
end
|
103
|
+
|
104
|
+
def runlength row, i
|
105
|
+
left = 0
|
106
|
+
right = 109
|
107
|
+
|
108
|
+
row.each do |j|
|
109
|
+
left = j if j < i && j > left
|
110
|
+
right = j if j > i && j < right
|
111
|
+
end
|
112
|
+
(right - left).to_f
|
113
|
+
end
|
114
|
+
|
115
|
+
def line_density_histogram strokes
|
116
|
+
bitmap_x, bitmap_y = runlength_bitmap strokes
|
117
|
+
acc_x = 0
|
118
|
+
acc_y = 0
|
119
|
+
hist_x = []
|
120
|
+
hist_y = []
|
121
|
+
(0..(@size - 1)).each do |i|
|
122
|
+
sum_x = 0
|
123
|
+
sum_y = 0
|
124
|
+
(0..(@size - 1)).each do |j|
|
125
|
+
|
126
|
+
if bitmap_x[j].include? i
|
127
|
+
# x = i is in pattern area
|
128
|
+
sum_x += 0
|
129
|
+
else
|
130
|
+
sum_x += 1 / runlength(bitmap_x[j], i)
|
131
|
+
end
|
132
|
+
|
133
|
+
if bitmap_y[j].include? i
|
134
|
+
# y = i is in pattern area
|
135
|
+
sum_y += 0
|
136
|
+
else
|
137
|
+
sum_y += 1 / runlength(bitmap_y[j], i)
|
138
|
+
end
|
139
|
+
end
|
140
|
+
|
141
|
+
acc_x += sum_x
|
142
|
+
acc_y += sum_y
|
143
|
+
hist_x << acc_x
|
144
|
+
hist_y << acc_y
|
145
|
+
end
|
146
|
+
[hist_x, hist_y]
|
147
|
+
end
|
148
|
+
|
149
|
+
|
150
|
+
# accumulated histogram needed by point density normalization
|
151
|
+
def accumulated_histogram strokes
|
152
|
+
points = strokes.flatten(1)
|
153
|
+
grids = @size + 1
|
154
|
+
h_x = []
|
155
|
+
h_y = []
|
156
|
+
(0..grids).each do |i|
|
157
|
+
h_x[i] = points.count{ |p| p[0].round == i }
|
158
|
+
h_y[i] = points.count{ |p| p[1].round == i }
|
159
|
+
h_x[i] = h_x[i] + h_x[i - 1] if i > 0
|
160
|
+
h_y[i] = h_y[i] + h_y[i - 1] if i > 0
|
161
|
+
end
|
162
|
+
|
163
|
+
[h_x, h_y]
|
164
|
+
end
|
165
|
+
|
166
|
+
|
167
|
+
#This method calculates means and diffs of x and y coordinates in the strokes
|
168
|
+
#The return values are used in the normalization step
|
169
|
+
#means, diffs = means_and_diffs strokes
|
170
|
+
#Return values:
|
171
|
+
#+means+:: [mean_of_x, mean_of_y]
|
172
|
+
#+diffs+:: differences of the x and y coordinates to their means i.e. [[d_x1, d_x2 ...], [d_y1, d_y2 ...]]
|
173
|
+
def means_and_diffs strokes
|
174
|
+
points = strokes.flatten(1)
|
175
|
+
sums = points.inject([0, 0]){ |acc, point| acc = [acc[0] + point[0], acc[1] + point[1]] }
|
176
|
+
#means = [x_c, y_c]
|
177
|
+
means = sums.map{ |sum| (sum / points.length.to_f).round(2) }
|
178
|
+
|
179
|
+
#for slant correction
|
180
|
+
diff_x = []
|
181
|
+
diff_y = []
|
182
|
+
u11 = 0
|
183
|
+
u02 = 0
|
184
|
+
points.each do |point|
|
185
|
+
diff_x << point[0] - means[0]
|
186
|
+
diff_y << point[1] - means[1]
|
187
|
+
|
188
|
+
u11 += (point[0] - means[0]) * (point[1] - means[1])
|
189
|
+
u02 += (point[1] - means[1])**2
|
190
|
+
end
|
191
|
+
[means, [diff_x, diff_y], -1 * u11 / u02]
|
192
|
+
end
|
193
|
+
|
194
|
+
end
|
195
|
+
end
|
@@ -1,6 +1,22 @@
|
|
1
1
|
module KvgCharacterRecognition
|
2
|
-
#This class has a collection of methods for the preprocessing step of character recognition
|
3
2
|
class Preprocessor
|
3
|
+
include KvgCharacterRecognition::Normalization
|
4
|
+
attr_accessor :smooth_weights, :interpolate_distance, :size, :smooth, :number_of_points
|
5
|
+
def initialize interpolate_distance, size, smooth=true, smooth_weights=[1,2,3,2,1]
|
6
|
+
@smooth = smooth
|
7
|
+
@smooth_weights = smooth_weights
|
8
|
+
#@downsample_rate = downsample_rate
|
9
|
+
@interpolate_distance = interpolate_distance
|
10
|
+
@size = size
|
11
|
+
end
|
12
|
+
|
13
|
+
# preprocess steps bi moment size normalization, smooth and interpolate
|
14
|
+
def preprocess strokes
|
15
|
+
bi_moment_normalize(strokes).map do |stroke|
|
16
|
+
stroke = smooth(stroke) if @smooth
|
17
|
+
smooth(interpolate(stroke))
|
18
|
+
end
|
19
|
+
end
|
4
20
|
|
5
21
|
#A simple smooth method using the following formula
|
6
22
|
#p'(i) = (w(-M)*p(i-M) + ... + w(0)*p(i) + ... + w(M)*p(i+M)) / S
|
@@ -8,19 +24,18 @@ module KvgCharacterRecognition
|
|
8
24
|
#Only the user input should be smoothed, it is not necessary for kvg data.
|
9
25
|
#Params:
|
10
26
|
#+stroke+:: array of points i.e [[x1, y1], [x2, y2] ...]
|
11
|
-
def
|
12
|
-
|
13
|
-
|
14
|
-
wsum = weights.inject{ |sum, x| sum + x}
|
27
|
+
def smooth stroke
|
28
|
+
offset = @smooth_weights.length / 2
|
29
|
+
wsum = @smooth_weights.inject{ |sum, x| sum + x}
|
15
30
|
|
16
|
-
return stroke if stroke.length <
|
31
|
+
return stroke if stroke.length < @smooth_weights.length
|
17
32
|
|
18
33
|
copy = stroke.dup
|
19
34
|
|
20
35
|
(offset..(stroke.length - offset - 1)).each do |i|
|
21
36
|
accum = [0, 0]
|
22
37
|
|
23
|
-
|
38
|
+
@smooth_weights.each_with_index do |w, j|
|
24
39
|
accum[0] += w * copy[i + j - offset][0]
|
25
40
|
accum[1] += w * copy[i + j - offset][1]
|
26
41
|
end
|
@@ -30,140 +45,9 @@ module KvgCharacterRecognition
|
|
30
45
|
stroke
|
31
46
|
end
|
32
47
|
|
33
|
-
#This method executes different preprocessing steps
|
34
|
-
#strokes are normalized
|
35
|
-
#1.Smooth strokes if set to true
|
36
|
-
#2.Interpolate points by given distance, in order to equalize the sample rate of input and template
|
37
|
-
#3.Downsample by given interval
|
38
|
-
def self.preprocess strokes, interpolate_distance=0.8, downsample_interval=4, smooth=true
|
39
|
-
strokes.map do |stroke|
|
40
|
-
stroke = smooth(stroke) if smooth
|
41
|
-
interpolated = smooth(interpolate(stroke, interpolate_distance))
|
42
|
-
downsample(interpolated, downsample_interval)
|
43
|
-
end
|
44
|
-
end
|
45
|
-
|
46
|
-
# accumulated histogram needed by line density normalization
|
47
|
-
def self.accumulated_histogram points
|
48
|
-
grids = CONFIG[:size] + 1
|
49
|
-
h_x = []
|
50
|
-
h_y = []
|
51
|
-
(0..grids).each do |i|
|
52
|
-
h_x[i] = points.count{ |p| p[0].round == i }
|
53
|
-
h_y[i] = points.count{ |p| p[1].round == i }
|
54
|
-
h_x[i] = h_x[i] + h_x[i - 1] if i > 0
|
55
|
-
h_y[i] = h_y[i] + h_y[i - 1] if i > 0
|
56
|
-
end
|
57
|
-
|
58
|
-
[h_x, h_y]
|
59
|
-
end
|
60
|
-
|
61
|
-
# line density normalization
|
62
|
-
def self.line_density_normalize strokes
|
63
|
-
points = strokes.flatten(1)
|
64
|
-
h_x, h_y = accumulated_histogram points
|
65
|
-
strokes.map do |stroke|
|
66
|
-
stroke.map do |point|
|
67
|
-
[(CONFIG[:size] * h_x[point[0].round] / points.length.to_f).round(2), (CONFIG[:size] * h_y[point[1].round] / points.length.to_f).round(2)]
|
68
|
-
end
|
69
|
-
end
|
70
|
-
end
|
71
|
-
|
72
|
-
#This method calculates means and diffs of x and y coordinates in the strokes
|
73
|
-
#The return values are used in the normalization step
|
74
|
-
#means, diffs = means_and_diffs strokes
|
75
|
-
#Return values:
|
76
|
-
#+means+:: [mean_of_x, mean_of_y]
|
77
|
-
#+diffs+:: differences of the x and y coordinates to their means i.e. [[d_x1, d_x2 ...], [d_y1, d_y2 ...]]
|
78
|
-
def self.means_and_diffs strokes
|
79
|
-
points = strokes.flatten(1)
|
80
|
-
sums = points.inject([0, 0]){ |acc, point| acc = [acc[0] + point[0], acc[1] + point[1]] }
|
81
|
-
#means = [x_c, y_c]
|
82
|
-
means = sums.map{ |sum| (sum / points.length.to_f).round(2) }
|
83
|
-
|
84
|
-
#for slant correction
|
85
|
-
diff_x = []
|
86
|
-
diff_y = []
|
87
|
-
u11 = 0
|
88
|
-
u02 = 0
|
89
|
-
points.each do |point|
|
90
|
-
diff_x << point[0] - means[0]
|
91
|
-
diff_y << point[1] - means[1]
|
92
|
-
|
93
|
-
u11 += (point[0] - means[0]) * (point[1] - means[1])
|
94
|
-
u02 += (point[1] - means[1])**2
|
95
|
-
end
|
96
|
-
[means, [diff_x, diff_y], -1 * u11 / u02]
|
97
|
-
end
|
98
|
-
|
99
|
-
#This methods normalizes the strokes using bi moment
|
100
|
-
#Params:
|
101
|
-
#+strokes+:: [[[x1, y1], [x2, y2], ...], [[x1, y1], ...]]
|
102
|
-
#+slant_correction+:: boolean whether a slant correction should be performed
|
103
|
-
#returns normed_strokes, normed_strokes_with_slant_correction
|
104
|
-
def self.bi_moment_normalize strokes
|
105
|
-
means, diffs, slant_slope = means_and_diffs strokes
|
106
|
-
|
107
|
-
#calculating delta values
|
108
|
-
delta = Proc.new do |diff, operator|
|
109
|
-
#d_x or d_y
|
110
|
-
#operator: >= or <
|
111
|
-
accum = 0
|
112
|
-
counter = 0
|
113
|
-
|
114
|
-
diff.each do |d|
|
115
|
-
if d.send operator, 0
|
116
|
-
accum += d ** 2
|
117
|
-
counter += 1
|
118
|
-
end
|
119
|
-
end
|
120
|
-
accum / counter
|
121
|
-
end
|
122
|
-
|
123
|
-
new_strokes = []
|
124
|
-
new_strokes_with_slant = []
|
125
|
-
|
126
|
-
strokes.each do |stroke|
|
127
|
-
new_stroke = []
|
128
|
-
new_stroke_slant = []
|
129
|
-
stroke.each do |point|
|
130
|
-
x = point[0]
|
131
|
-
y = point[1]
|
132
|
-
x_slant = x + (y - means[1]) * slant_slope
|
133
|
-
|
134
|
-
if x - means[0] >= 0
|
135
|
-
new_x = ( CONFIG[:size] * (x - means[0]) / (4 * Math.sqrt(delta.call(diffs[0], :>=))).round(2) ) + CONFIG[:size]/2
|
136
|
-
else
|
137
|
-
new_x = ( CONFIG[:size] * (x - means[0]) / (4 * Math.sqrt(delta.call(diffs[0], :<))).round(2) ) + CONFIG[:size]/2
|
138
|
-
end
|
139
|
-
if x_slant - means[0] >= 0
|
140
|
-
new_x_slant = ( CONFIG[:size] * (x_slant - means[0]) / (4 * Math.sqrt(delta.call(diffs[0], :>=))).round(2) ) + CONFIG[:size]/2
|
141
|
-
else
|
142
|
-
new_x_slant = ( CONFIG[:size] * (x_slant - means[0]) / (4 * Math.sqrt(delta.call(diffs[0], :<))).round(2) ) + CONFIG[:size]/2
|
143
|
-
end
|
144
|
-
|
145
|
-
if y - means[1] >= 0
|
146
|
-
new_y = ( CONFIG[:size] * (y - means[1]) / (4 * Math.sqrt(delta.call(diffs[1], :>=))).round(2) ) + CONFIG[:size]/2
|
147
|
-
else
|
148
|
-
new_y = ( CONFIG[:size] * (y - means[1]) / (4 * Math.sqrt(delta.call(diffs[1], :<))).round(2) ) + CONFIG[:size]/2
|
149
|
-
end
|
150
|
-
|
151
|
-
if new_x >= 0 && new_x <= CONFIG[:size] && new_y >= 0 && new_y <= CONFIG[:size]
|
152
|
-
new_stroke << [new_x.round(3), new_y.round(3)]
|
153
|
-
end
|
154
|
-
if new_x_slant >= 0 && new_x_slant <= CONFIG[:size] && new_y >= 0 && new_y <= CONFIG[:size]
|
155
|
-
new_stroke_slant << [new_x_slant.round(3), new_y.round(3)]
|
156
|
-
end
|
157
|
-
end
|
158
|
-
new_strokes << new_stroke unless new_stroke.empty?
|
159
|
-
new_strokes_with_slant << new_stroke_slant unless new_stroke_slant.empty?
|
160
|
-
end
|
161
|
-
[new_strokes, new_strokes_with_slant]
|
162
|
-
end
|
163
|
-
|
164
48
|
#This method interpolates points into a stroke with given distance
|
165
49
|
#The algorithm is taken from the paper preprocessing techniques for online character recognition
|
166
|
-
def
|
50
|
+
def interpolate stroke
|
167
51
|
current = stroke.first
|
168
52
|
new_stroke = [current]
|
169
53
|
|
@@ -173,7 +57,7 @@ module KvgCharacterRecognition
|
|
173
57
|
point = stroke[index]
|
174
58
|
|
175
59
|
#only consider point with greater than d distance to current point
|
176
|
-
if Math.euclidean_distance(current, point) <
|
60
|
+
if Math.euclidean_distance(current, point) < @interpolate_distance
|
177
61
|
index += 1
|
178
62
|
else
|
179
63
|
|
@@ -181,16 +65,16 @@ module KvgCharacterRecognition
|
|
181
65
|
new_point = []
|
182
66
|
if point[0].round(2) == current[0].round(2) # x2 == x1
|
183
67
|
if point[1] > current[1] # y2 > y1
|
184
|
-
new_point = [current[0], current[1] +
|
68
|
+
new_point = [current[0], current[1] + @interpolate_distance]
|
185
69
|
else # y2 < y1
|
186
|
-
new_point = [current[0], current[1] -
|
70
|
+
new_point = [current[0], current[1] - @interpolate_distance]
|
187
71
|
end
|
188
72
|
else # x2 != x1
|
189
73
|
slope = (point[1] - current[1]) / (point[0] - current[0]).to_f
|
190
74
|
if point[0] > current[0] # x2 > x1
|
191
|
-
new_point[0] = current[0] + Math.sqrt(
|
75
|
+
new_point[0] = current[0] + Math.sqrt(@interpolate_distance**2 / (slope**2 + 1))
|
192
76
|
else # x2 < x1
|
193
|
-
new_point[0] = current[0] - Math.sqrt(
|
77
|
+
new_point[0] = current[0] - Math.sqrt(@interpolate_distance**2 / (slope**2 + 1))
|
194
78
|
end
|
195
79
|
new_point[1] = slope * new_point[0] + point[1] - (slope * point[0])
|
196
80
|
end
|
@@ -208,73 +92,5 @@ module KvgCharacterRecognition
|
|
208
92
|
|
209
93
|
new_stroke
|
210
94
|
end
|
211
|
-
|
212
|
-
#This methods downsamples a stroke in given interval
|
213
|
-
#The number of points in the stroke will be reduced
|
214
|
-
def self.downsample stroke, interval=3
|
215
|
-
stroke.each_slice(interval).map(&:first)
|
216
|
-
end
|
217
|
-
|
218
|
-
#This methods generates a heatmap for the given character pattern
|
219
|
-
#A heatmap divides the input character pattern(image of the character) into nxn grids
|
220
|
-
#We count the points in each grid and store the number in a map
|
221
|
-
#The map array can be used as feature
|
222
|
-
#Params:
|
223
|
-
#+points+:: flattened strokes i.e. [[x1, y1], [x2, y2]...] because the seperation of points in strokes is irrelevant in this case
|
224
|
-
#+grid+:: number of grids
|
225
|
-
def self.heatmap points, grid, size
|
226
|
-
|
227
|
-
grid_size = size / grid.to_f
|
228
|
-
|
229
|
-
map = Map.new grid, grid, 0
|
230
|
-
|
231
|
-
#fill the heatmap
|
232
|
-
points.each do |point|
|
233
|
-
if point[0] < size && point[1] < size
|
234
|
-
x_i = (point[0] / grid_size).floor if point[0] < size
|
235
|
-
y_i = (point[1] / grid_size).floor if point[1] < size
|
236
|
-
|
237
|
-
map[y_i, x_i] += (1 / points.length.to_f).round(4)
|
238
|
-
end
|
239
|
-
end
|
240
|
-
|
241
|
-
map
|
242
|
-
end
|
243
|
-
#This method smooths a heatmap using spatial_weight_filter technique
|
244
|
-
#but instead of taking every 2nd grid, it processes every grid and stores the average of the weighted sum of adjacent grids
|
245
|
-
#Params:
|
246
|
-
#+map+:: a heatmap
|
247
|
-
def self.smooth_heatmap map
|
248
|
-
grid = map.size
|
249
|
-
#map is a heatmap
|
250
|
-
new_map = Map.new(grid, grid, 0)
|
251
|
-
|
252
|
-
(0..(grid - 1)).each do |i|
|
253
|
-
(0..(grid - 1)).each do |j|
|
254
|
-
#weights alternative
|
255
|
-
# = [1/16, 2/16, 1/16];
|
256
|
-
# [2/16, 4/16, 2/16];
|
257
|
-
# [1/16, 2/16, 1/16]
|
258
|
-
#
|
259
|
-
#weights = [1/9, 1/9, 1/9];
|
260
|
-
# [1/9, 1/9, 1/9];
|
261
|
-
# [1/9, 1/9, 1/9]
|
262
|
-
#
|
263
|
-
w11 = (0..(grid-1)).cover?(i+1) && (0..(grid-1)).cover?(j-1)? map[i+1,j-1] * 1 / 9.0 : 0
|
264
|
-
w12 = (0..(grid-1)).cover?(i+1) && (0..(grid-1)).cover?(j)? map[i+1,j] * 1 / 9.0 : 0
|
265
|
-
w13 = (0..(grid-1)).cover?(i+1) && (0..(grid-1)).cover?(j+1)? map[i+1,j+1] * 1 / 9.0 : 0
|
266
|
-
w21 = (0..(grid-1)).cover?(i) && (0..(grid-1)).cover?(j-1)? map[i,j-1] * 1 / 9.0 : 0
|
267
|
-
w22 = (0..(grid-1)).cover?(i) && (0..(grid-1)).cover?(j)? map[i,j] * 1 / 9.0 : 0
|
268
|
-
w23 = (0..(grid-1)).cover?(i) && (0..(grid-1)).cover?(j+1)? map[i,j+1] * 1 / 9.0 : 0
|
269
|
-
w31 = (0..(grid-1)).cover?(i-1) && (0..(grid-1)).cover?(j-1)? map[i-1,j-1] * 1 / 9.0 : 0
|
270
|
-
w32 = (0..(grid-1)).cover?(i-1) && (0..(grid-1)).cover?(j)? map[i-1,j] * 1 / 9.0 : 0
|
271
|
-
w33 = (0..(grid-1)).cover?(i-1) && (0..(grid-1)).cover?(j+1)? map[i-1,j+1] * 1 / 9.0 : 0
|
272
|
-
|
273
|
-
new_map[i,j] = (w11 + w12 + w13 + w21 + w22 + w23 + w31 + w32 + w33).round(4)
|
274
|
-
end
|
275
|
-
end
|
276
|
-
|
277
|
-
new_map
|
278
|
-
end
|
279
95
|
end
|
280
96
|
end
|