rmotion 0.0.1

Sign up to get free protection for your applications and to get access to all the features.
data/README ADDED
@@ -0,0 +1,77 @@
1
+ RMotion
2
+ * RMotion provides a simple interface to build motion detection software in ruby.
3
+
4
+ Author
5
+ * Riccardo Cecolin
6
+ * rikiji at playkanji dot com
7
+ * http://www.rikiji.it
8
+
9
+ Documentation
10
+ * You need libopencv-dev, libhighgui-dev and libfftw3-dev to compile this extension.
11
+ * For usage examples check examples/first.rb and examples/second.rb
12
+ * http://www.rikiji.it/post/19
13
+
14
+ Require the lib, instantiate the main object and print default options:
15
+
16
+ irb(main):001:0> require 'rmotion'
17
+ => true
18
+ irb(main):002:0> m=Motion.new
19
+ => #<Motion:0xb74d14a0>
20
+ irb(main):003:0> m.show?
21
+ => true
22
+ irb(main):004:0> m.write?
23
+ => false
24
+ irb(main):005:0> m.fft?
25
+ => true
26
+
27
+ http://www.youtube.com/watch?v=HNL-2pNkuSo&feature=player_embedded
28
+
29
+ show, write and fft are the most important options. When show is true
30
+ the video analyzed is also displayed in a window. You can set write
31
+ to a string (a filename) to save the video analyzed to a file! Both
32
+ displayed and saved video will have motion detection markings on it,
33
+ like the video i just showed to you.
34
+
35
+ fft manages how the video frames are processed (fast fourier transform
36
+ or direct): i recommend to set it always true, except in case your
37
+ cpu isn't fast enough to follow the stream in realtime. Soon i'll
38
+ post an in-depth analysis of what's the difference between fft=true
39
+ and fft=false.
40
+
41
+ irb(main):006:0> m.fill?
42
+ => true
43
+ irb(main):007:0> m.rect?
44
+ => false
45
+ irb(main):008:0> m.point?
46
+ => false
47
+
48
+ Next group of options is about output frames: fill default true behaves
49
+ like the previous video, rect draws rectangles around moving objects
50
+ and point draws instead a small circle centered on the object. Let's
51
+ see them! Note that i can swap them live!
52
+
53
+ http://www.youtube.com/watch?v=0LQIt0XiWqE&feature=player_embedded
54
+
55
+ irb(main):012:0> m.threshold_fft?
56
+ => 1.0
57
+ irb(main):013:0> m.threshold_direct?
58
+ => 8.0
59
+ irb(main):014:0> m.threshold_distance?
60
+ => 9.0
61
+ irb(main):015:0> m.threshold_group?
62
+ => 20
63
+
64
+ threshold_fft and threshold_direct values influence what is recognized
65
+ as noise and what as an object. Note that they are used in a mutually
66
+ exclusive way: if fft is true then threshold_direct won't affect result.
67
+ Same for fft as false and threshold_fft. Play with those value to fit
68
+ your enviroment (light, object speed..).
69
+
70
+ threshold_distance and threshold_group also have a role on recognizing
71
+ objects: an object is a group of al least group cells distant each other
72
+ no more than distance. So you can choose min size of objects and spread
73
+ of cells. Cells are a group of pixel determined dynamically on camera
74
+ resolution, so don't worry about it.
75
+
76
+
77
+
@@ -0,0 +1,13 @@
1
+ #!/usr/bin/ruby
2
+ require 'rubygems'
3
+ require 'rmotion'
4
+
5
+ m=Motion.new
6
+
7
+ m.write= "capture.avi"
8
+
9
+ m.cam { |e|
10
+ p e.first.center
11
+ p e.first.corners
12
+
13
+ }
@@ -0,0 +1,32 @@
1
+ #!/usr/bin/ruby
2
+ require 'rubygems'
3
+ require 'rmotion'
4
+
5
+ m=Motion.new
6
+ m.fill= false
7
+ m.rect= true
8
+
9
+ count = 0
10
+ status = Entity.status
11
+
12
+ m.vid("capture_1.avi") do |e|
13
+
14
+ Entity.reorder status, e
15
+
16
+ t = status.select { |u| not u.center.nil? }.count
17
+ if t > count
18
+ puts "#{t - count} new " + (t > 1 ? "entities" : "entity") + " found"
19
+ elsif t < count
20
+ puts "#{count - t} " + (t > 1 ? "entities" : "entity") + " disappeared"
21
+ m.point = true
22
+ m.rect=false
23
+ end
24
+ count = t
25
+
26
+ status.each_with_index do |s,n|
27
+ puts "Entity #{n} is in position #{s.center.inspect}" unless s.center.nil?
28
+ end
29
+
30
+ sleep 0.10
31
+
32
+ end
@@ -0,0 +1,3 @@
1
+ require 'mkmf'
2
+ dir_config("rmotion")
3
+ create_makefile("rmotion")
@@ -0,0 +1,97 @@
1
+ /*
2
+ * Copyright (C) 2010 Riccardo Cecolin
3
+ * rkj@playkanji.com - 8 july 2010
4
+ */
5
+
6
+ #include <ruby.h>
7
+ #include <stdio.h>
8
+ #include <string.h>
9
+ #include <math.h>
10
+ #include <opencv/cv.h>
11
+ #include <opencv/highgui.h>
12
+ #include <fftw3.h>
13
+
14
+ #define CELL 8 /* 2FIX */
15
+ #define ENTITIES 8
16
+ #define LABEL "RMotion"
17
+
18
+ #define MODECAM 0
19
+ #define MODEVID 1
20
+
21
+ #define DBG(x) printf("%s: %d\n",#x,x)
22
+
23
+ struct motion_data_t {
24
+ /* FFTW */
25
+ fftw_complex *fft_img1;
26
+ fftw_complex *fft_img2;
27
+ fftw_complex *fft_res;
28
+ fftw_plan fft_cell_plan;
29
+
30
+ /* OPENCV */
31
+ CvCapture *capture;
32
+ CvVideoWriter *writer;
33
+
34
+ IplImage *last;
35
+ IplImage *cur;
36
+ IplImage *frame;
37
+
38
+ IplImage *cell1;
39
+ IplImage *cell2;
40
+ IplImage *tmp;
41
+
42
+ CvMemStorage* storage;
43
+ CvSeq* point_seq;
44
+
45
+ int quit;
46
+
47
+ /* GENERAL CONFIG */
48
+ int mode;
49
+ int camid;
50
+ char * vid;
51
+ int fft;
52
+ char * write;
53
+
54
+ /* DISPLAY */
55
+ int show;
56
+ int fill;
57
+ int rect;
58
+ int point;
59
+
60
+ /* ALGORITHM */
61
+ double threshold_distance;
62
+ int threshold_group;
63
+ double threshold_fft;
64
+ double threshold_direct;
65
+
66
+ };
67
+
68
+ struct entity {
69
+ int init;
70
+ int tot;
71
+ CvPoint tl;
72
+ CvPoint br;
73
+ };
74
+
75
+ #define GetMD(obj, p) Data_Get_Struct(obj, struct motion_data_t, p);
76
+
77
+
78
+ VALUE rb_motion_show_set (VALUE self, VALUE val);
79
+ VALUE rb_motion_show_p (VALUE self);
80
+ VALUE rb_motion_fill_set (VALUE self, VALUE val);
81
+ VALUE rb_motion_fill_p (VALUE self);
82
+ VALUE rb_motion_rect_set (VALUE self, VALUE val);
83
+ VALUE rb_motion_rect_p (VALUE self);
84
+ VALUE rb_motion_fft_set (VALUE self, VALUE val);
85
+ VALUE rb_motion_fft_p (VALUE self);
86
+ VALUE rb_motion_point_set (VALUE self, VALUE val);
87
+ VALUE rb_motion_point_p (VALUE self);
88
+ VALUE rb_motion_write_set (VALUE self, VALUE val);
89
+ VALUE rb_motion_write_p (VALUE self) ;
90
+ VALUE rb_motion_th_distance_set (VALUE self, VALUE val);
91
+ VALUE rb_motion_th_distance_p (VALUE self) ;
92
+ VALUE rb_motion_th_group_set (VALUE self, VALUE val);
93
+ VALUE rb_motion_th_group_p (VALUE self);
94
+ VALUE rb_motion_th_fft_set (VALUE self, VALUE val);
95
+ VALUE rb_motion_th_fft_p (VALUE self);
96
+ VALUE rb_motion_th_direct_set (VALUE self, VALUE val);
97
+ VALUE rb_motion_th_direct_p (VALUE self);
@@ -0,0 +1,196 @@
1
+ /*
2
+ * Copyright (C) 2010 Riccardo Cecolin
3
+ * rkj@playkanji.com - 8 july 2010
4
+ */
5
+
6
+ #include "rmotion.h"
7
+
8
+ extern VALUE motion_data;
9
+
10
+ VALUE rb_motion_show_set (VALUE self, VALUE val)
11
+ {
12
+ struct motion_data_t * mdp;
13
+ GetMD(motion_data,mdp);
14
+ if(RTEST(val)) {
15
+ mdp->show = 1;
16
+ return Qtrue;
17
+ } else {
18
+ mdp->show = 0;
19
+ return Qfalse;
20
+ }
21
+ }
22
+ VALUE rb_motion_show_p (VALUE self)
23
+ {
24
+ struct motion_data_t * mdp;
25
+ GetMD(motion_data,mdp);
26
+ if (mdp->show)
27
+ return Qtrue;
28
+ else
29
+ return Qfalse;
30
+ }
31
+
32
+ VALUE rb_motion_fill_set (VALUE self, VALUE val)
33
+ {
34
+ struct motion_data_t * mdp;
35
+ GetMD(motion_data,mdp);
36
+ if(RTEST(val)) {
37
+ mdp->fill = 1;
38
+ return Qtrue;
39
+ } else {
40
+ mdp->fill = 0;
41
+ return Qfalse;
42
+ }
43
+ }
44
+ VALUE rb_motion_fill_p (VALUE self)
45
+ {
46
+ struct motion_data_t * mdp;
47
+ GetMD(motion_data,mdp);
48
+ if (mdp->fill)
49
+ return Qtrue;
50
+ else
51
+ return Qfalse;
52
+ }
53
+
54
+ VALUE rb_motion_rect_set (VALUE self, VALUE val)
55
+ {
56
+ struct motion_data_t * mdp;
57
+ GetMD(motion_data,mdp);
58
+ if(RTEST(val)) {
59
+ mdp->rect = 1;
60
+ return Qtrue;
61
+ } else {
62
+ mdp->rect = 0;
63
+ return Qfalse;
64
+ }
65
+ }
66
+ VALUE rb_motion_rect_p (VALUE self)
67
+ {
68
+ struct motion_data_t * mdp;
69
+ GetMD(motion_data,mdp);
70
+ if (mdp->rect)
71
+ return Qtrue;
72
+ else
73
+ return Qfalse;
74
+ }
75
+
76
+ VALUE rb_motion_fft_set (VALUE self, VALUE val)
77
+ {
78
+ struct motion_data_t * mdp;
79
+ GetMD(motion_data,mdp);
80
+ if(RTEST(val)) {
81
+ mdp->fft = 1;
82
+ return Qtrue;
83
+ } else {
84
+ mdp->fft = 0;
85
+ return Qfalse;
86
+ }
87
+ }
88
+ VALUE rb_motion_fft_p (VALUE self)
89
+ {
90
+ struct motion_data_t * mdp;
91
+ GetMD(motion_data,mdp);
92
+ if (mdp->fft)
93
+ return Qtrue;
94
+ else
95
+ return Qfalse;
96
+ }
97
+
98
+ VALUE rb_motion_point_set (VALUE self, VALUE val)
99
+ {
100
+ struct motion_data_t * mdp;
101
+ GetMD(motion_data,mdp);
102
+ if(RTEST(val)) {
103
+ mdp->point = 1;
104
+ return Qtrue;
105
+ } else {
106
+ mdp->point = 0;
107
+ return Qfalse;
108
+ }
109
+ }
110
+ VALUE rb_motion_point_p (VALUE self)
111
+ {
112
+ struct motion_data_t * mdp;
113
+ GetMD(motion_data,mdp);
114
+ if (mdp->point)
115
+ return Qtrue;
116
+ else
117
+ return Qfalse;
118
+ }
119
+
120
+ VALUE rb_motion_write_set (VALUE self, VALUE val)
121
+ {
122
+ struct motion_data_t * mdp;
123
+ GetMD(motion_data,mdp);
124
+ Check_Type(val, T_STRING);
125
+ mdp->write = StringValuePtr(val);
126
+ }
127
+
128
+ VALUE rb_motion_write_p (VALUE self)
129
+ {
130
+ struct motion_data_t * mdp;
131
+ GetMD(motion_data,mdp);
132
+ if (mdp->write != NULL)
133
+ return Qtrue;
134
+ else
135
+ return Qfalse;
136
+ }
137
+
138
+ VALUE rb_motion_th_distance_set (VALUE self, VALUE val)
139
+ {
140
+ struct motion_data_t * mdp;
141
+ GetMD(motion_data,mdp);
142
+ mdp->threshold_distance = NUM2DBL(val);
143
+ return val;
144
+ }
145
+
146
+ VALUE rb_motion_th_distance_p (VALUE self)
147
+ {
148
+ struct motion_data_t * mdp;
149
+ GetMD(motion_data,mdp);
150
+ return rb_float_new(mdp->threshold_distance);
151
+ }
152
+
153
+ VALUE rb_motion_th_group_set (VALUE self, VALUE val)
154
+ {
155
+ struct motion_data_t * mdp;
156
+ GetMD(motion_data,mdp);
157
+ mdp->threshold_group = NUM2INT(val);
158
+ return val;
159
+ }
160
+
161
+ VALUE rb_motion_th_group_p (VALUE self)
162
+ {
163
+ struct motion_data_t * mdp;
164
+ GetMD(motion_data,mdp);
165
+ return INT2FIX(mdp->threshold_group);
166
+ }
167
+
168
+ VALUE rb_motion_th_fft_set (VALUE self, VALUE val)
169
+ {
170
+ struct motion_data_t * mdp;
171
+ GetMD(motion_data,mdp);
172
+ mdp->threshold_fft = NUM2DBL(val);
173
+ return val;
174
+ }
175
+
176
+ VALUE rb_motion_th_fft_p (VALUE self)
177
+ {
178
+ struct motion_data_t * mdp;
179
+ GetMD(motion_data,mdp);
180
+ return rb_float_new(mdp->threshold_fft);
181
+ }
182
+
183
+ VALUE rb_motion_th_direct_set (VALUE self, VALUE val)
184
+ {
185
+ struct motion_data_t * mdp;
186
+ GetMD(motion_data,mdp);
187
+ mdp->threshold_direct = NUM2DBL(val);
188
+ return val;
189
+ }
190
+
191
+ VALUE rb_motion_th_direct_p (VALUE self)
192
+ {
193
+ struct motion_data_t * mdp;
194
+ GetMD(motion_data,mdp);
195
+ return rb_float_new(mdp->threshold_direct);
196
+ }
@@ -0,0 +1,392 @@
1
+ /*
2
+ * Copyright (C) 2010 Riccardo Cecolin
3
+ * rkj@playkanji.com - 8 july 2010
4
+ */
5
+
6
+ #include "rmotion.h"
7
+
8
+ VALUE rb_cMotion;
9
+ VALUE rb_cEntity;
10
+ VALUE motion_data;
11
+
12
+ void rmotion_free_data (struct motion_data_t * mdp)
13
+ {
14
+ free(mdp);
15
+ }
16
+ VALUE rmotion_allocate (VALUE klass)
17
+ {
18
+ struct motion_data_t * mdp;
19
+ motion_data = Data_Make_Struct(klass, struct motion_data_t, 0, rmotion_free_data, mdp);
20
+
21
+ mdp->storage = cvCreateMemStorage(0);
22
+ mdp->point_seq = cvCreateSeq(CV_32SC2,sizeof(CvSeq),sizeof(CvPoint),mdp->storage);
23
+
24
+ return motion_data;
25
+ }
26
+ VALUE rb_motion_initialize (VALUE self)
27
+ {
28
+ int i=0;
29
+ VALUE entities = rb_ary_new();
30
+ VALUE e;
31
+
32
+ struct motion_data_t * mdp;
33
+ GetMD(motion_data,mdp);
34
+
35
+ for (i=0;i<ENTITIES;i++) {
36
+ e = rb_class_new_instance (0, 0, rb_cEntity);
37
+ rb_ary_push(entities, e);
38
+ }
39
+ rb_iv_set(self,"entities",entities);
40
+
41
+ /* DEFAULTS */
42
+ mdp->mode = MODECAM;
43
+ mdp->fft = 1;
44
+ mdp->write = NULL;
45
+
46
+ mdp->show = 1;
47
+ mdp->fill = 1;
48
+ mdp->rect = 0;
49
+ mdp->point = 0;
50
+
51
+ mdp->threshold_distance = 9.0;
52
+ mdp->threshold_group = 20;
53
+ mdp->threshold_fft = 1;
54
+ mdp->threshold_direct = 8;
55
+
56
+ return self;
57
+ }
58
+
59
+ int rmotion_isequal( const void* _a, const void* _b, void* userdata ) {
60
+ CvPoint a = *(const CvPoint*)_a;
61
+ CvPoint b = *(const CvPoint*)_b;
62
+ double threshold = *(double*)userdata;
63
+ return (double)((a.x - b.x)*(a.x - b.x) + (a.y - b.y)*(a.y - b.y)) <= threshold;
64
+ }
65
+
66
+ void rmotion_cellfill(IplImage *img, int x, int y, int sdv) {
67
+ int z, w;
68
+ CvScalar pixel;
69
+
70
+ for(w=x;w<x+CELL;w++)
71
+ for(z=y;z<y+CELL;z++) {
72
+ pixel=cvGet2D(img,w,z);
73
+ if (z==y || z==(y+CELL-1)||w==x||w==(x+CELL-1)) {
74
+ pixel.val[0] += 100;
75
+ pixel.val[1] += 100;
76
+ pixel.val[2] += 100;
77
+ } else {
78
+ pixel.val[0] -= (int)(sdv * 10);
79
+ pixel.val[1] += (int)(sdv * 10);
80
+ pixel.val[2] -= (int)(sdv * 10);
81
+ }
82
+ if (pixel.val[0] < 0) pixel.val[0] = 0.0;
83
+ if (pixel.val[1] > 255) pixel.val[1] = 255.0;
84
+ if (pixel.val[1] > 255) pixel.val[1] = 255.0;
85
+ if (pixel.val[2] < 0) pixel.val[2] = 0.0;
86
+ if (pixel.val[2] > 255) pixel.val[2] = 255.0;
87
+ cvSet2D(img,w,z,pixel);
88
+ }
89
+ }
90
+
91
+ void rmotion_spectrumdiff( struct motion_data_t * mdp, IplImage *ref, IplImage *tpl, IplImage *poc, int x, int y ) {
92
+ int i, j, k;
93
+ double tmp;
94
+ int step = ref->widthStep;
95
+ int fft_size = CELL * CELL;
96
+ uchar *ref_data = ( uchar* ) ref->imageData;
97
+ uchar *tpl_data = ( uchar* ) tpl->imageData;
98
+ double *poc_data = ( double* )poc->imageData;
99
+
100
+ for( i = 0, k = 0 ; i < CELL ; i++ ) {
101
+ for( j = 0 ; j < CELL ; j++, k++ ) {
102
+ mdp->fft_img1[k][0] = ( double )ref_data[(i+y) * step + j+x];
103
+ mdp->fft_img1[k][1] = 0.0;
104
+ mdp->fft_img2[k][0] = ( double )tpl_data[(i+y) * step + j+x];
105
+ mdp->fft_img2[k][1] = 0.0;
106
+ }
107
+ }
108
+
109
+ fftw_execute_dft( mdp->fft_cell_plan, mdp->fft_img1, mdp->fft_img1);
110
+ fftw_execute_dft( mdp->fft_cell_plan, mdp->fft_img2, mdp->fft_img2);
111
+
112
+ for( i = 0; i < fft_size ; i++ ) {
113
+ mdp->fft_res[i][0] = mdp->fft_img1[i][0] - mdp->fft_img2[i][0];
114
+ mdp->fft_res[i][1] = mdp->fft_img1[i][1] - mdp->fft_img2[i][1];
115
+ }
116
+
117
+ for( i = 0 ; i < fft_size ; i++ ) {
118
+ poc_data[i] = mdp->fft_res[i][0] / ( double )fft_size;
119
+ }
120
+ }
121
+
122
+ VALUE rmotion_process (VALUE self)
123
+ {
124
+
125
+ int i, j, k, w, z, key = 0;
126
+ CvScalar avg, sdv;
127
+ IplImage * swap;
128
+
129
+ int count;
130
+ CvPoint coord;
131
+ CvSeq* labels = 0;
132
+
133
+ struct motion_data_t * mdp;
134
+ GetMD(motion_data,mdp);
135
+
136
+ /* SETUP */
137
+ if (mdp->mode == MODEVID && (strlen(mdp->vid) > 0)) {
138
+ mdp->capture = cvCaptureFromAVI(mdp->vid);
139
+ } else {
140
+ mdp->capture = cvCaptureFromCAM(mdp->camid);
141
+ }
142
+
143
+ if(!mdp->capture) {
144
+ printf("error mdp capture\n");
145
+ }
146
+
147
+ mdp->frame = cvQueryFrame(mdp->capture);
148
+
149
+ /* ALLOCATE WORKING BUFFERS */
150
+ mdp->last =cvCreateImage(cvSize(mdp->frame->width,mdp->frame->height), mdp->frame->depth, 1);
151
+ mdp->cur =cvCreateImage(cvSize(mdp->frame->width,mdp->frame->height), mdp->frame->depth, 1);
152
+
153
+ mdp->cell1 = cvCreateImage(cvSize(CELL, CELL), mdp->frame->depth, 1);
154
+ mdp->cell2 = cvCreateImage(cvSize(CELL, CELL), mdp->frame->depth, 1);
155
+ mdp->tmp = cvCreateImage(cvSize(CELL, CELL), IPL_DEPTH_64F, 1);
156
+
157
+ mdp->fft_img1 = ( fftw_complex* )fftw_malloc( sizeof( fftw_complex ) * CELL * CELL );
158
+ mdp->fft_img2 = ( fftw_complex* )fftw_malloc( sizeof( fftw_complex ) * CELL * CELL );
159
+ mdp->fft_res = ( fftw_complex* )fftw_malloc( sizeof( fftw_complex ) * CELL * CELL );
160
+ mdp->fft_cell_plan = fftw_plan_dft_1d( CELL * CELL, mdp->fft_img1, mdp->fft_img1, FFTW_FORWARD, FFTW_ESTIMATE );
161
+
162
+ if(mdp->write != NULL) {
163
+ mdp->writer=cvCreateVideoWriter(mdp->write,CV_FOURCC('P','I','M','1'),25,cvSize(mdp->frame->width,mdp->frame->height),1);
164
+ }
165
+
166
+ if (mdp->show) {
167
+ cvNamedWindow(LABEL,CV_WINDOW_AUTOSIZE );
168
+ cvShowImage(LABEL,mdp->frame);
169
+ }
170
+
171
+ cvCvtColor(mdp->frame,mdp->last,CV_RGB2GRAY);
172
+
173
+ /* MAIN LOOP */
174
+ mdp->quit = 0;
175
+
176
+ int n = 0;
177
+ while (!mdp->quit && key != 'b') {
178
+
179
+ mdp->frame= cvQueryFrame(mdp->capture);
180
+ if(!mdp->frame) break;
181
+
182
+ cvCvtColor(mdp->frame,mdp->cur,CV_RGB2GRAY);
183
+
184
+ /* FORCELLS*/
185
+ for (i= 0; i < mdp->frame->height; i+= CELL) {
186
+ for (k= 0; k < mdp->frame->width; k+= CELL) {
187
+
188
+ if(mdp->fft) {
189
+
190
+ rmotion_spectrumdiff( mdp, mdp->last, mdp->cur, mdp->tmp, k, i);
191
+ cvAvgSdv(mdp->tmp, &avg, &sdv, 0);
192
+
193
+ if(sdv.val[0] > mdp->threshold_fft) {
194
+
195
+ if(mdp->show && mdp->fill)
196
+ rmotion_cellfill(mdp->frame, i, k, sdv.val[0]);
197
+ coord.x= k/CELL;
198
+ coord.y= i/CELL;
199
+ cvSeqPush( mdp->point_seq, &coord );
200
+
201
+ }
202
+ } else {
203
+ cvSetImageROI(mdp->last,cvRect(k, i, CELL, CELL));
204
+ cvSetImageROI(mdp->cur,cvRect(k, i, CELL, CELL));
205
+ cvCopy(mdp->last,mdp->cell1, 0);
206
+ cvCopy(mdp->cur,mdp->cell2, 0);
207
+ cvResetImageROI(mdp->cur);
208
+ cvResetImageROI(mdp->last);
209
+
210
+ cvAbsDiff(mdp->cell1,mdp->cell2,mdp->cell1);
211
+ cvAvgSdv(mdp->cell1, &avg, &sdv, 0);
212
+ if(sdv.val[0] > mdp->threshold_direct) {
213
+ if(mdp->show && mdp->fill)
214
+ rmotion_cellfill(mdp->frame, i, k, sdv.val[0]);
215
+ coord.x= k/CELL;
216
+ coord.y= i/CELL;
217
+ cvSeqPush( mdp->point_seq, &coord );
218
+ }
219
+ }
220
+ }
221
+ }
222
+ /* ENTITIES */
223
+ count = cvSeqPartition( mdp->point_seq,0,&labels,rmotion_isequal, &mdp->threshold_distance );
224
+ // DBG(count);
225
+ struct entity groups [ENTITIES] = { 0 };
226
+ memset(groups, 0, sizeof(struct entity) * ENTITIES);
227
+ for( i = 0; i < labels->total; i++ ){
228
+ CvPoint pt = *(CvPoint*)cvGetSeqElem( mdp->point_seq, i );
229
+ int seq = *(int*)cvGetSeqElem( labels, i );
230
+ if (seq < ENTITIES) {
231
+ if (pt.x <= groups[seq].tl.x || groups[seq].init == 0)
232
+ groups[seq].tl.x = pt.x;
233
+ if (pt.y <= groups[seq].tl.y || groups[seq].init == 0)
234
+ groups[seq].tl.y = pt.y;
235
+ if (pt.x >= groups[seq].br.x || groups[seq].init == 0)
236
+ groups[seq].br.x = pt.x;
237
+ if (pt.y >= groups[seq].br.y || groups[seq].init == 0)
238
+ groups[seq].br.y = pt.y;
239
+ groups[seq].tot +=1;
240
+ groups[seq].init = 1;
241
+ }
242
+ }
243
+
244
+ for (i=0;i<ENTITIES;i++) {
245
+ VALUE ary = rb_iv_get(self,"entities");
246
+ VALUE e = rb_ary_entry(ary,i);
247
+ if (groups[i].tot > mdp->threshold_group) {
248
+ groups[i].tl.x *= CELL;
249
+ groups[i].tl.y *= CELL;
250
+ groups[i].br.x *= CELL;
251
+ groups[i].br.y *= CELL;
252
+
253
+ rb_iv_set(e,"@tlx", INT2FIX((int)groups[i].tl.x));
254
+ rb_iv_set(e,"@tly", INT2FIX((int)groups[i].tl.y));
255
+ rb_iv_set(e,"@brx", INT2FIX((int)groups[i].br.x));
256
+ rb_iv_set(e,"@bry", INT2FIX((int)groups[i].br.y));
257
+
258
+ if (mdp->rect) cvRectangle(mdp->frame,groups[i].tl,groups[i].br,cvScalar(255,112,61,0),2,0,0);
259
+ if (mdp->point) {
260
+ cvCircle(mdp->frame, cvPoint((groups[i].tl.x + groups[i].br.x)/2,(groups[i].tl.y + groups[i].br.y)/2), 10, cvScalar(255,112,61,0), 2, 0,0);
261
+ }
262
+ } else {
263
+ rb_iv_set(e,"@tlx", Qnil);
264
+ rb_iv_set(e,"@tly", Qnil);
265
+ rb_iv_set(e,"@brx", Qnil);
266
+ rb_iv_set(e,"@bry", Qnil);
267
+ }
268
+ }
269
+ cvClearSeq(mdp->point_seq);
270
+ cvClearSeq(labels);
271
+
272
+ if (mdp->show) {
273
+ cvShowImage(LABEL,mdp->frame);
274
+ key = cvWaitKey( 10 );
275
+ }
276
+
277
+ if(mdp->write != NULL) {
278
+ cvWriteFrame(mdp->writer,mdp->frame);
279
+ }
280
+
281
+ if(rb_block_given_p())
282
+ rb_yield(rb_iv_get(self,"entities"));
283
+
284
+ swap = mdp->last;
285
+ mdp->last = mdp->cur;
286
+ mdp->cur=swap;
287
+
288
+ }
289
+
290
+ /* FREE */
291
+ if(mdp->write != NULL) cvReleaseVideoWriter(&mdp->writer);
292
+
293
+ fftw_free ( mdp->fft_img1 );
294
+ fftw_free ( mdp->fft_img2 );
295
+ fftw_free ( mdp->fft_res );
296
+ fftw_destroy_plan( mdp->fft_cell_plan );
297
+
298
+ cvReleaseCapture( &mdp->capture);
299
+ cvReleaseImage( &mdp->cur );
300
+ cvReleaseImage( &mdp->last );
301
+ cvReleaseImage( &mdp->frame );
302
+ cvReleaseImage( &mdp->cell1 );
303
+ cvReleaseImage( &mdp->cell2 );
304
+ cvReleaseImage( &mdp->tmp );
305
+
306
+ }
307
+ VALUE rb_motion_cam (int argc, VALUE *argv, VALUE self)
308
+ {
309
+ struct motion_data_t * mdp;
310
+ GetMD(motion_data,mdp);
311
+ VALUE camid;
312
+
313
+ rb_scan_args(argc, argv, "01", &camid);
314
+ if(NIL_P(camid)) {
315
+ camid = INT2FIX(-1);
316
+ }
317
+ mdp->mode = MODECAM;
318
+ mdp->camid = NUM2INT(camid);
319
+ rmotion_process(self);
320
+ return Qtrue;
321
+ }
322
+
323
+ VALUE rb_motion_vid (int argc, VALUE *argv, VALUE self)
324
+ {
325
+ VALUE vid;
326
+ struct motion_data_t * mdp;
327
+ GetMD(motion_data,mdp);
328
+
329
+ rb_scan_args(argc, argv, "01", &vid);
330
+ if(TYPE(vid) == T_STRING) {
331
+ mdp->mode = MODEVID;
332
+ mdp->vid = StringValuePtr(vid);
333
+ } else {
334
+ return Qfalse;
335
+ }
336
+ rmotion_process(self);
337
+ return Qtrue;
338
+ }
339
+
340
+ VALUE rb_motion_quit (VALUE self)
341
+ {
342
+ struct motion_data_t * mdp;
343
+ GetMD(motion_data,mdp);
344
+ mdp->quit = 1;
345
+ return Qtrue;
346
+ }
347
+
348
+ void
349
+ Init_rmotion ()
350
+ {
351
+ rb_cMotion = rb_define_class ("Motion", rb_cObject);
352
+ rb_cEntity = rb_define_class ("Entity", rb_cObject);
353
+
354
+ rb_define_alloc_func(rb_cMotion, rmotion_allocate);
355
+ rb_define_method (rb_cMotion, "initialize", rb_motion_initialize, 0);
356
+ rb_define_method (rb_cMotion, "cam", rb_motion_cam, -1);
357
+ rb_define_method (rb_cMotion, "vid", rb_motion_vid, -1);
358
+
359
+ rb_define_method (rb_cMotion, "show=", rb_motion_show_set, 1);
360
+ rb_define_method (rb_cMotion, "show?", rb_motion_show_p, 0);
361
+
362
+ rb_define_method (rb_cMotion, "fft=", rb_motion_fft_set, 1);
363
+ rb_define_method (rb_cMotion, "fft?", rb_motion_fft_p, 0);
364
+
365
+ rb_define_method (rb_cMotion, "fill=", rb_motion_fill_set, 1);
366
+ rb_define_method (rb_cMotion, "fill?", rb_motion_fill_p, 0);
367
+
368
+ rb_define_method (rb_cMotion, "rect=", rb_motion_rect_set, 1);
369
+ rb_define_method (rb_cMotion, "rect?", rb_motion_rect_p, 0);
370
+
371
+ rb_define_method (rb_cMotion, "point=", rb_motion_point_set, 1);
372
+ rb_define_method (rb_cMotion, "point?", rb_motion_point_p, 0);
373
+
374
+ rb_define_method (rb_cMotion, "write=", rb_motion_write_set, 1);
375
+ rb_define_method (rb_cMotion, "write?", rb_motion_write_p, 0);
376
+
377
+ rb_define_method (rb_cMotion, "threshold_distance=", rb_motion_th_distance_set, 1);
378
+ rb_define_method (rb_cMotion, "threshold_distance?", rb_motion_th_distance_p, 0);
379
+
380
+ rb_define_method (rb_cMotion, "threshold_group=", rb_motion_th_group_set, 1);
381
+ rb_define_method (rb_cMotion, "threshold_group?", rb_motion_th_group_p, 0);
382
+
383
+ rb_define_method (rb_cMotion, "threshold_fft=", rb_motion_th_fft_set, 1);
384
+ rb_define_method (rb_cMotion, "threshold_fft?", rb_motion_th_fft_p, 0);
385
+
386
+ rb_define_method (rb_cMotion, "threshold_direct=", rb_motion_th_direct_set, 1);
387
+ rb_define_method (rb_cMotion, "threshold_direct?", rb_motion_th_direct_p, 0);
388
+
389
+ rb_define_method (rb_cMotion, "quit", rb_motion_quit, 0);
390
+
391
+
392
+ }