ruby-opencv 0.0.8.pre-mswin32 → 0.0.9.pre2-mswin32

Sign up to get free protection for your applications and to get access to all the features.
Files changed (70) hide show
  1. checksums.yaml +7 -0
  2. data/DEVELOPERS_NOTE.md +137 -0
  3. data/Gemfile +1 -1
  4. data/License.txt +30 -30
  5. data/Manifest.txt +7 -5
  6. data/README.md +98 -0
  7. data/Rakefile +63 -5
  8. data/config.yml +7 -0
  9. data/examples/alpha_blend.rb +21 -21
  10. data/examples/find_obj.rb +169 -169
  11. data/examples/match_kdtree.rb +88 -88
  12. data/ext/opencv/cvcapture.cpp +19 -12
  13. data/ext/opencv/cvutils.cpp +192 -194
  14. data/ext/opencv/cvutils.h +30 -29
  15. data/{extconf.rb → ext/opencv/extconf.rb} +12 -4
  16. data/lib/opencv.rb +12 -3
  17. data/lib/opencv/psyched_yaml.rb +22 -22
  18. data/lib/opencv/version.rb +1 -1
  19. data/ruby-opencv.gemspec +44 -43
  20. data/test/helper.rb +1 -1
  21. data/test/runner.rb +30 -30
  22. data/test/test_curve.rb +1 -1
  23. data/test/test_cvavgcomp.rb +24 -24
  24. data/test/test_cvbox2d.rb +76 -76
  25. data/test/test_cvcapture.rb +183 -183
  26. data/test/test_cvchain.rb +108 -108
  27. data/test/test_cvcircle32f.rb +41 -41
  28. data/test/test_cvconnectedcomp.rb +61 -61
  29. data/test/test_cvcontour.rb +150 -150
  30. data/test/test_cvcontourtree.rb +43 -43
  31. data/test/test_cverror.rb +1 -1
  32. data/test/test_cvfeaturetree.rb +65 -65
  33. data/test/test_cvfont.rb +58 -58
  34. data/test/test_cvhaarclassifiercascade.rb +63 -63
  35. data/test/test_cvhistogram.rb +1 -1
  36. data/test/test_cvhumoments.rb +83 -83
  37. data/test/test_cvline.rb +50 -50
  38. data/test/test_cvmat.rb +1 -1
  39. data/test/test_cvmat_drawing.rb +1 -1
  40. data/test/test_cvmat_dxt.rb +1 -1
  41. data/test/test_cvmat_imageprocessing.rb +1 -1
  42. data/test/test_cvmat_matching.rb +1 -1
  43. data/test/test_cvmoments.rb +180 -180
  44. data/test/test_cvpoint.rb +75 -75
  45. data/test/test_cvpoint2d32f.rb +75 -75
  46. data/test/test_cvpoint3d32f.rb +93 -93
  47. data/test/test_cvrect.rb +144 -144
  48. data/test/test_cvscalar.rb +113 -113
  49. data/test/test_cvseq.rb +295 -295
  50. data/test/test_cvsize.rb +75 -75
  51. data/test/test_cvsize2d32f.rb +75 -75
  52. data/test/test_cvslice.rb +31 -31
  53. data/test/test_cvsurfparams.rb +57 -57
  54. data/test/test_cvsurfpoint.rb +66 -66
  55. data/test/test_cvtermcriteria.rb +56 -56
  56. data/test/test_cvtwopoints.rb +40 -40
  57. data/test/test_cvvideowriter.rb +58 -58
  58. data/test/test_iplconvkernel.rb +54 -54
  59. data/test/test_iplimage.rb +1 -1
  60. data/test/test_mouseevent.rb +17 -17
  61. data/test/test_opencv.rb +1 -1
  62. data/test/test_pointset.rb +1 -1
  63. data/test/test_preliminary.rb +130 -130
  64. data/test/test_trackbar.rb +47 -47
  65. data/test/test_window.rb +115 -115
  66. metadata +28 -56
  67. data/README.rdoc +0 -149
  68. data/ext/opencv/lib/opencv.rb +0 -3
  69. data/ext/opencv/lib/opencv/psyched_yaml.rb +0 -22
  70. data/ext/opencv/lib/opencv/version.rb +0 -3
data/config.yml ADDED
@@ -0,0 +1,7 @@
1
+ platform: mingw32
2
+ rubies:
3
+ - C:/ruby-1.9.3-p392-mingw32/bin/ruby.exe
4
+ - C:/ruby-2.0.0-p0-mingw32/bin/ruby.exe
5
+ extopts:
6
+ - --with-opencv-include=C:/opencv/build/include
7
+ - --with-opencv-lib=C:/opencv/build/x86/mingw/lib
@@ -1,21 +1,21 @@
1
- #!/usr/bin/env ruby
2
- # -*- mode: ruby; coding: utf-8-unix -*-
3
-
4
- # Alpha blending sample with GUI
5
-
6
- require 'opencv'
7
- include OpenCV
8
-
9
- img1 = IplImage.load('lenna.jpg', CV_LOAD_IMAGE_ANYCOLOR | CV_LOAD_IMAGE_ANYDEPTH)
10
- img2 = IplImage.load('tiffany.jpg', CV_LOAD_IMAGE_ANYCOLOR | CV_LOAD_IMAGE_ANYDEPTH)
11
-
12
- window = GUI::Window.new('Alpha blend')
13
- max = 100.0
14
- val = max / 2.0
15
- window.set_trackbar("Alpha", max, val) { |v|
16
- a = v.to_f / max
17
- window.show CvMat.add_weighted(img1, a, img2, 1.0 - a, 0)
18
- }
19
- window.show CvMat.add_weighted(img1, val / max, img2, 1.0 - val / max, 0)
20
- GUI::wait_key
21
-
1
+ #!/usr/bin/env ruby
2
+ # -*- mode: ruby; coding: utf-8 -*-
3
+
4
+ # Alpha blending sample with GUI
5
+
6
+ require 'opencv'
7
+ include OpenCV
8
+
9
+ img1 = IplImage.load('lenna.jpg', CV_LOAD_IMAGE_ANYCOLOR | CV_LOAD_IMAGE_ANYDEPTH)
10
+ img2 = IplImage.load('tiffany.jpg', CV_LOAD_IMAGE_ANYCOLOR | CV_LOAD_IMAGE_ANYDEPTH)
11
+
12
+ window = GUI::Window.new('Alpha blend')
13
+ max = 100.0
14
+ val = max / 2.0
15
+ window.set_trackbar("Alpha", max, val) { |v|
16
+ a = v.to_f / max
17
+ window.show CvMat.add_weighted(img1, a, img2, 1.0 - a, 0)
18
+ }
19
+ window.show CvMat.add_weighted(img1, val / max, img2, 1.0 - val / max, 0)
20
+ GUI::wait_key
21
+
data/examples/find_obj.rb CHANGED
@@ -1,169 +1,169 @@
1
- #!/usr/bin/env ruby
2
- # -*- mode: ruby; coding: utf-8-unix -*-
3
-
4
- # A Demo Ruby/OpenCV Implementation of SURF
5
- # See https://code.ros.org/trac/opencv/browser/tags/2.3.1/opencv/samples/c/find_obj.cpp
6
- require 'opencv'
7
- require 'benchmark'
8
- include OpenCV
9
-
10
- def compare_surf_descriptors(d1, d2, best, length)
11
- raise ArgumentError unless (length % 4) == 0
12
- total_cost = 0
13
- 0.step(length - 1, 4) { |i|
14
- t0 = d1[i] - d2[i]
15
- t1 = d1[i + 1] - d2[i + 1]
16
- t2 = d1[i + 2] - d2[i + 2]
17
- t3 = d1[i + 3] - d2[i + 3]
18
- total_cost += t0 * t0 + t1 * t1 + t2 * t2 + t3 * t3
19
- break if total_cost > best
20
- }
21
- total_cost
22
- end
23
-
24
- def naive_nearest_neighbor(vec, laplacian, model_keypoints, model_descriptors)
25
- length = model_descriptors[0].size
26
- neighbor = nil
27
- dist1 = 1e6
28
- dist2 = 1e6
29
-
30
- model_descriptors.size.times { |i|
31
- kp = model_keypoints[i]
32
- mvec = model_descriptors[i]
33
- next if laplacian != kp.laplacian
34
-
35
- d = compare_surf_descriptors(vec, mvec, dist2, length)
36
- if d < dist1
37
- dist2 = dist1
38
- dist1 = d
39
- neighbor = i
40
- elsif d < dist2
41
- dist2 = d
42
- end
43
- }
44
-
45
- return (dist1 < 0.6 * dist2) ? neighbor : nil
46
- end
47
-
48
- def find_pairs(object_keypoints, object_descriptors,
49
- image_keypoints, image_descriptors)
50
- ptpairs = []
51
- object_descriptors.size.times { |i|
52
- kp = object_keypoints[i]
53
- descriptor = object_descriptors[i]
54
- nearest_neighbor = naive_nearest_neighbor(descriptor, kp.laplacian, image_keypoints, image_descriptors)
55
- unless nearest_neighbor.nil?
56
- ptpairs << i
57
- ptpairs << nearest_neighbor
58
- end
59
- }
60
- ptpairs
61
- end
62
-
63
- def locate_planar_object(object_keypoints, object_descriptors,
64
- image_keypoints, image_descriptors, src_corners)
65
- ptpairs = find_pairs(object_keypoints, object_descriptors, image_keypoints, image_descriptors)
66
- n = ptpairs.size / 2
67
- return nil if n < 4
68
-
69
- pt1 = []
70
- pt2 = []
71
- n.times { |i|
72
- pt1 << object_keypoints[ptpairs[i * 2]].pt
73
- pt2 << image_keypoints[ptpairs[i * 2 + 1]].pt
74
- }
75
-
76
- _pt1 = CvMat.new(1, n, CV_32F, 2)
77
- _pt2 = CvMat.new(1, n, CV_32F, 2)
78
- _pt1.set_data(pt1)
79
- _pt2.set_data(pt2)
80
- h = CvMat.find_homography(_pt1, _pt2, :ransac, 5)
81
-
82
- dst_corners = []
83
- 4.times { |i|
84
- x = src_corners[i].x
85
- y = src_corners[i].y
86
- z = 1.0 / (h[6][0] * x + h[7][0] * y + h[8][0])
87
- x = (h[0][0] * x + h[1][0] * y + h[2][0]) * z
88
- y = (h[3][0] * x + h[4][0] * y + h[5][0]) * z
89
- dst_corners << CvPoint.new(x.to_i, y.to_i)
90
- }
91
-
92
- dst_corners
93
- end
94
-
95
-
96
- ##### Main #####
97
- puts 'This program demonstrated the use of the SURF Detector and Descriptor using'
98
- puts 'brute force matching on planar objects.'
99
- puts 'Usage:'
100
- puts "ruby #{__FILE__} <object_filename> <scene_filename>, default is box.png and box_in_scene.png"
101
- puts
102
-
103
- object_filename = (ARGV.size == 2) ? ARGV[0] : 'box.png'
104
- scene_filename = (ARGV.size == 2) ? ARGV[1] : 'box_in_scene.png'
105
-
106
- object, image = nil, nil
107
- begin
108
- object = IplImage.load(object_filename, CV_LOAD_IMAGE_GRAYSCALE)
109
- image = IplImage.load(scene_filename, CV_LOAD_IMAGE_GRAYSCALE)
110
- rescue
111
- puts "Can not load #{object_filename} and/or #{scene_filename}"
112
- puts "Usage: ruby #{__FILE__} [<object_filename> <scene_filename>]"
113
- exit
114
- end
115
- object_color = object.GRAY2BGR
116
-
117
- param = CvSURFParams.new(1500)
118
-
119
- object_keypoints, object_descriptors = nil, nil
120
- image_keypoints, image_descriptors = nil, nil
121
- tms = Benchmark.measure {
122
- object_keypoints, object_descriptors = object.extract_surf(param)
123
- puts "Object Descriptors: #{object_descriptors.size}"
124
-
125
- image_keypoints, image_descriptors = image.extract_surf(param)
126
- puts "Image Descriptors: #{image_descriptors.size}"
127
- }
128
- puts "Extraction time = #{tms.real * 1000} ms"
129
-
130
- correspond = IplImage.new(image.width, object.height + image.height, CV_8U, 1);
131
- correspond.set_roi(CvRect.new(0, 0, object.width, object.height))
132
- object.copy(correspond)
133
- correspond.set_roi(CvRect.new(0, object.height, image.width, image.height))
134
- image.copy(correspond)
135
- correspond.reset_roi
136
-
137
- src_corners = [CvPoint.new(0, 0), CvPoint.new(object.width, 0),
138
- CvPoint.new(object.width, object.height), CvPoint.new(0, object.height)]
139
- dst_corners = locate_planar_object(object_keypoints, object_descriptors,
140
- image_keypoints, image_descriptors, src_corners)
141
-
142
- correspond = correspond.GRAY2BGR
143
- if dst_corners
144
- 4.times { |i|
145
- r1 = dst_corners[i % 4]
146
- r2 = dst_corners[(i + 1) % 4]
147
- correspond.line!(CvPoint.new(r1.x, r1.y + object.height), CvPoint.new(r2.x, r2.y + object.height),
148
- :color => CvColor::Red, :thickness => 2, :line_type => :aa)
149
- }
150
- end
151
-
152
- ptpairs = find_pairs(object_keypoints, object_descriptors, image_keypoints, image_descriptors)
153
-
154
- 0.step(ptpairs.size - 1, 2) { |i|
155
- r1 = object_keypoints[ptpairs[i]]
156
- r2 = image_keypoints[ptpairs[i + 1]]
157
- correspond.line!(r1.pt, CvPoint.new(r2.pt.x, r2.pt.y + object.height),
158
- :color => CvColor::Red, :line_type => :aa)
159
- }
160
-
161
- object_keypoints.each { |r|
162
- radius = (r.size * 1.2 / 9.0 * 2).to_i
163
- object_color.circle!(r.pt, radius, :color => CvColor::Red, :line_type => :aa)
164
- }
165
-
166
- GUI::Window.new('Object Correspond').show correspond
167
- GUI::Window.new('Object').show object_color
168
- GUI::wait_key
169
-
1
+ #!/usr/bin/env ruby
2
+ # -*- mode: ruby; coding: utf-8 -*-
3
+
4
+ # A Demo Ruby/OpenCV Implementation of SURF
5
+ # See https://code.ros.org/trac/opencv/browser/tags/2.3.1/opencv/samples/c/find_obj.cpp
6
+ require 'opencv'
7
+ require 'benchmark'
8
+ include OpenCV
9
+
10
+ def compare_surf_descriptors(d1, d2, best, length)
11
+ raise ArgumentError unless (length % 4) == 0
12
+ total_cost = 0
13
+ 0.step(length - 1, 4) { |i|
14
+ t0 = d1[i] - d2[i]
15
+ t1 = d1[i + 1] - d2[i + 1]
16
+ t2 = d1[i + 2] - d2[i + 2]
17
+ t3 = d1[i + 3] - d2[i + 3]
18
+ total_cost += t0 * t0 + t1 * t1 + t2 * t2 + t3 * t3
19
+ break if total_cost > best
20
+ }
21
+ total_cost
22
+ end
23
+
24
+ def naive_nearest_neighbor(vec, laplacian, model_keypoints, model_descriptors)
25
+ length = model_descriptors[0].size
26
+ neighbor = nil
27
+ dist1 = 1e6
28
+ dist2 = 1e6
29
+
30
+ model_descriptors.size.times { |i|
31
+ kp = model_keypoints[i]
32
+ mvec = model_descriptors[i]
33
+ next if laplacian != kp.laplacian
34
+
35
+ d = compare_surf_descriptors(vec, mvec, dist2, length)
36
+ if d < dist1
37
+ dist2 = dist1
38
+ dist1 = d
39
+ neighbor = i
40
+ elsif d < dist2
41
+ dist2 = d
42
+ end
43
+ }
44
+
45
+ return (dist1 < 0.6 * dist2) ? neighbor : nil
46
+ end
47
+
48
+ def find_pairs(object_keypoints, object_descriptors,
49
+ image_keypoints, image_descriptors)
50
+ ptpairs = []
51
+ object_descriptors.size.times { |i|
52
+ kp = object_keypoints[i]
53
+ descriptor = object_descriptors[i]
54
+ nearest_neighbor = naive_nearest_neighbor(descriptor, kp.laplacian, image_keypoints, image_descriptors)
55
+ unless nearest_neighbor.nil?
56
+ ptpairs << i
57
+ ptpairs << nearest_neighbor
58
+ end
59
+ }
60
+ ptpairs
61
+ end
62
+
63
+ def locate_planar_object(object_keypoints, object_descriptors,
64
+ image_keypoints, image_descriptors, src_corners)
65
+ ptpairs = find_pairs(object_keypoints, object_descriptors, image_keypoints, image_descriptors)
66
+ n = ptpairs.size / 2
67
+ return nil if n < 4
68
+
69
+ pt1 = []
70
+ pt2 = []
71
+ n.times { |i|
72
+ pt1 << object_keypoints[ptpairs[i * 2]].pt
73
+ pt2 << image_keypoints[ptpairs[i * 2 + 1]].pt
74
+ }
75
+
76
+ _pt1 = CvMat.new(1, n, CV_32F, 2)
77
+ _pt2 = CvMat.new(1, n, CV_32F, 2)
78
+ _pt1.set_data(pt1)
79
+ _pt2.set_data(pt2)
80
+ h = CvMat.find_homography(_pt1, _pt2, :ransac, 5)
81
+
82
+ dst_corners = []
83
+ 4.times { |i|
84
+ x = src_corners[i].x
85
+ y = src_corners[i].y
86
+ z = 1.0 / (h[6][0] * x + h[7][0] * y + h[8][0])
87
+ x = (h[0][0] * x + h[1][0] * y + h[2][0]) * z
88
+ y = (h[3][0] * x + h[4][0] * y + h[5][0]) * z
89
+ dst_corners << CvPoint.new(x.to_i, y.to_i)
90
+ }
91
+
92
+ dst_corners
93
+ end
94
+
95
+
96
+ ##### Main #####
97
+ puts 'This program demonstrated the use of the SURF Detector and Descriptor using'
98
+ puts 'brute force matching on planar objects.'
99
+ puts 'Usage:'
100
+ puts "ruby #{__FILE__} <object_filename> <scene_filename>, default is box.png and box_in_scene.png"
101
+ puts
102
+
103
+ object_filename = (ARGV.size == 2) ? ARGV[0] : 'box.png'
104
+ scene_filename = (ARGV.size == 2) ? ARGV[1] : 'box_in_scene.png'
105
+
106
+ object, image = nil, nil
107
+ begin
108
+ object = IplImage.load(object_filename, CV_LOAD_IMAGE_GRAYSCALE)
109
+ image = IplImage.load(scene_filename, CV_LOAD_IMAGE_GRAYSCALE)
110
+ rescue
111
+ puts "Can not load #{object_filename} and/or #{scene_filename}"
112
+ puts "Usage: ruby #{__FILE__} [<object_filename> <scene_filename>]"
113
+ exit
114
+ end
115
+ object_color = object.GRAY2BGR
116
+
117
+ param = CvSURFParams.new(1500)
118
+
119
+ object_keypoints, object_descriptors = nil, nil
120
+ image_keypoints, image_descriptors = nil, nil
121
+ tms = Benchmark.measure {
122
+ object_keypoints, object_descriptors = object.extract_surf(param)
123
+ puts "Object Descriptors: #{object_descriptors.size}"
124
+
125
+ image_keypoints, image_descriptors = image.extract_surf(param)
126
+ puts "Image Descriptors: #{image_descriptors.size}"
127
+ }
128
+ puts "Extraction time = #{tms.real * 1000} ms"
129
+
130
+ correspond = IplImage.new(image.width, object.height + image.height, CV_8U, 1);
131
+ correspond.set_roi(CvRect.new(0, 0, object.width, object.height))
132
+ object.copy(correspond)
133
+ correspond.set_roi(CvRect.new(0, object.height, image.width, image.height))
134
+ image.copy(correspond)
135
+ correspond.reset_roi
136
+
137
+ src_corners = [CvPoint.new(0, 0), CvPoint.new(object.width, 0),
138
+ CvPoint.new(object.width, object.height), CvPoint.new(0, object.height)]
139
+ dst_corners = locate_planar_object(object_keypoints, object_descriptors,
140
+ image_keypoints, image_descriptors, src_corners)
141
+
142
+ correspond = correspond.GRAY2BGR
143
+ if dst_corners
144
+ 4.times { |i|
145
+ r1 = dst_corners[i % 4]
146
+ r2 = dst_corners[(i + 1) % 4]
147
+ correspond.line!(CvPoint.new(r1.x, r1.y + object.height), CvPoint.new(r2.x, r2.y + object.height),
148
+ :color => CvColor::Red, :thickness => 2, :line_type => :aa)
149
+ }
150
+ end
151
+
152
+ ptpairs = find_pairs(object_keypoints, object_descriptors, image_keypoints, image_descriptors)
153
+
154
+ 0.step(ptpairs.size - 1, 2) { |i|
155
+ r1 = object_keypoints[ptpairs[i]]
156
+ r2 = image_keypoints[ptpairs[i + 1]]
157
+ correspond.line!(r1.pt, CvPoint.new(r2.pt.x, r2.pt.y + object.height),
158
+ :color => CvColor::Red, :line_type => :aa)
159
+ }
160
+
161
+ object_keypoints.each { |r|
162
+ radius = (r.size * 1.2 / 9.0 * 2).to_i
163
+ object_color.circle!(r.pt, radius, :color => CvColor::Red, :line_type => :aa)
164
+ }
165
+
166
+ GUI::Window.new('Object Correspond').show correspond
167
+ GUI::Window.new('Object').show object_color
168
+ GUI::wait_key
169
+
@@ -1,88 +1,88 @@
1
- #!/usr/bin/env ruby
2
- # -*- mode: ruby; coding: utf-8-unix -*-
3
-
4
- # A sample of matching SURF feature points using kd-tree
5
- # See http://tech.groups.yahoo.com/group/OpenCV/message/62318
6
-
7
- require 'opencv'
8
- include OpenCV
9
-
10
- USE_EXTENDED_DESCRIPTOR = true
11
- THRESHOLD = 1500
12
- DESCRIPTOR_SIZE = USE_EXTENDED_DESCRIPTOR ? 128 : 64
13
-
14
- img1 = CvMat.load('lenna.jpg', CV_LOAD_IMAGE_GRAYSCALE)
15
- img2 = CvMat.load('lenna-rotated.jpg', CV_LOAD_IMAGE_GRAYSCALE)
16
-
17
- puts 'Extracting features from img1 using SURF...'
18
- param = CvSURFParams.new(THRESHOLD, USE_EXTENDED_DESCRIPTOR)
19
- kp1, desc1 = img1.extract_surf(param)
20
- puts "found #{kp1.size} keypoints from img1"
21
-
22
- puts 'Extracting features from img2 using SURF...'
23
- kp2, desc2 = img2.extract_surf(param)
24
- puts "found #{kp2.size} keypoints from img2"
25
-
26
- puts 'Matching keypoints...'
27
- desc1mat = CvMat.new(kp1.size, DESCRIPTOR_SIZE, :cv32f, 1)
28
- desc2mat = CvMat.new(kp2.size, DESCRIPTOR_SIZE, :cv32f, 1)
29
- desc1.each_with_index { |desc, i|
30
- desc.each_with_index { |d, j|
31
- desc1mat[i, j] = CvScalar.new(d)
32
- }
33
- }
34
- desc2.each_with_index { |desc, i|
35
- desc.each_with_index { |d, j|
36
- desc2mat[i, j] = CvScalar.new(d)
37
- }
38
- }
39
-
40
- feature_tree = CvFeatureTree.new(desc1mat)
41
- results, distances = feature_tree.find_features(desc2mat, 1, 250)
42
-
43
- reverse_lookup = []
44
- reverse_lookup_dist = []
45
- kp1.size.times { |i|
46
- reverse_lookup << -1
47
- reverse_lookup_dist << Float::MAX
48
- }
49
-
50
- match_count = 0
51
- kp2.size.times { |j|
52
- i = results[j][0].to_i
53
- d = distances[j][0]
54
- if (d < reverse_lookup_dist[i])
55
- match_count += 1 if reverse_lookup_dist[i] == Float::MAX
56
- reverse_lookup[i] = j
57
- reverse_lookup_dist[i] = d
58
- end
59
- }
60
- puts "found #{match_count} putative correspondences"
61
-
62
- points1 = []
63
- points2 = []
64
- kp2.size.times { |j|
65
- i = results[j][0].to_i
66
- if (j == reverse_lookup[i])
67
- points1 << kp1[i].pt
68
- points2 << kp2[j].pt
69
- end
70
- }
71
-
72
- width = img1.cols + img2.cols
73
- height = (img1.rows > img2.rows) ? img1.rows : img2.rows
74
- correspond = IplImage.new(width, height, :cv8u, 1);
75
- correspond.set_roi(CvRect.new(0, 0, img1.cols, img1.rows))
76
- img1.copy(correspond)
77
- correspond.set_roi(CvRect.new(img1.cols, 0, img1.cols + img2.cols, img2.rows))
78
- img2.copy(correspond)
79
- correspond.reset_roi
80
-
81
- points1.zip(points2) { |pt1, pt2|
82
- pt2.x += img1.cols
83
- correspond.line!(pt1, pt2, :color => CvColor::White)
84
- }
85
-
86
- GUI::Window.new('Object Correspond').show correspond
87
- GUI::wait_key
88
-
1
+ #!/usr/bin/env ruby
2
+ # -*- mode: ruby; coding: utf-8 -*-
3
+
4
+ # A sample of matching SURF feature points using kd-tree
5
+ # See http://tech.groups.yahoo.com/group/OpenCV/message/62318
6
+
7
+ require 'opencv'
8
+ include OpenCV
9
+
10
+ USE_EXTENDED_DESCRIPTOR = true
11
+ THRESHOLD = 1500
12
+ DESCRIPTOR_SIZE = USE_EXTENDED_DESCRIPTOR ? 128 : 64
13
+
14
+ img1 = CvMat.load('lenna.jpg', CV_LOAD_IMAGE_GRAYSCALE)
15
+ img2 = CvMat.load('lenna-rotated.jpg', CV_LOAD_IMAGE_GRAYSCALE)
16
+
17
+ puts 'Extracting features from img1 using SURF...'
18
+ param = CvSURFParams.new(THRESHOLD, USE_EXTENDED_DESCRIPTOR)
19
+ kp1, desc1 = img1.extract_surf(param)
20
+ puts "found #{kp1.size} keypoints from img1"
21
+
22
+ puts 'Extracting features from img2 using SURF...'
23
+ kp2, desc2 = img2.extract_surf(param)
24
+ puts "found #{kp2.size} keypoints from img2"
25
+
26
+ puts 'Matching keypoints...'
27
+ desc1mat = CvMat.new(kp1.size, DESCRIPTOR_SIZE, :cv32f, 1)
28
+ desc2mat = CvMat.new(kp2.size, DESCRIPTOR_SIZE, :cv32f, 1)
29
+ desc1.each_with_index { |desc, i|
30
+ desc.each_with_index { |d, j|
31
+ desc1mat[i, j] = CvScalar.new(d)
32
+ }
33
+ }
34
+ desc2.each_with_index { |desc, i|
35
+ desc.each_with_index { |d, j|
36
+ desc2mat[i, j] = CvScalar.new(d)
37
+ }
38
+ }
39
+
40
+ feature_tree = CvFeatureTree.new(desc1mat)
41
+ results, distances = feature_tree.find_features(desc2mat, 1, 250)
42
+
43
+ reverse_lookup = []
44
+ reverse_lookup_dist = []
45
+ kp1.size.times { |i|
46
+ reverse_lookup << -1
47
+ reverse_lookup_dist << Float::MAX
48
+ }
49
+
50
+ match_count = 0
51
+ kp2.size.times { |j|
52
+ i = results[j][0].to_i
53
+ d = distances[j][0]
54
+ if (d < reverse_lookup_dist[i])
55
+ match_count += 1 if reverse_lookup_dist[i] == Float::MAX
56
+ reverse_lookup[i] = j
57
+ reverse_lookup_dist[i] = d
58
+ end
59
+ }
60
+ puts "found #{match_count} putative correspondences"
61
+
62
+ points1 = []
63
+ points2 = []
64
+ kp2.size.times { |j|
65
+ i = results[j][0].to_i
66
+ if (j == reverse_lookup[i])
67
+ points1 << kp1[i].pt
68
+ points2 << kp2[j].pt
69
+ end
70
+ }
71
+
72
+ width = img1.cols + img2.cols
73
+ height = (img1.rows > img2.rows) ? img1.rows : img2.rows
74
+ correspond = IplImage.new(width, height, :cv8u, 1);
75
+ correspond.set_roi(CvRect.new(0, 0, img1.cols, img1.rows))
76
+ img1.copy(correspond)
77
+ correspond.set_roi(CvRect.new(img1.cols, 0, img1.cols + img2.cols, img2.rows))
78
+ img2.copy(correspond)
79
+ correspond.reset_roi
80
+
81
+ points1.zip(points2) { |pt1, pt2|
82
+ pt2.x += img1.cols
83
+ correspond.line!(pt1, pt2, :color => CvColor::White)
84
+ }
85
+
86
+ GUI::Window.new('Object Correspond').show correspond
87
+ GUI::wait_key
88
+