puremotion 0.0.1 → 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/.yardopts +1 -0
- data/README.md +69 -0
- data/Rakefile +23 -11
- data/examples/progress_reporting.rb +26 -0
- data/examples/simple.rb +22 -0
- data/ext/puremotion/audio.c +38 -0
- data/ext/puremotion/extconf.rb +34 -0
- data/ext/puremotion/frame.c +176 -0
- data/ext/puremotion/media.c +175 -0
- data/ext/puremotion/puremotion.c +26 -0
- data/ext/puremotion/puremotion.h +38 -0
- data/ext/puremotion/stream.c +128 -0
- data/ext/puremotion/stream_collection.c +44 -0
- data/ext/puremotion/utils.c +81 -0
- data/ext/puremotion/utils.h +6 -0
- data/ext/puremotion/video.c +141 -0
- data/lib/{puremotion/events → events}/event.rb +0 -0
- data/lib/{puremotion/events → events}/generator.rb +0 -0
- data/lib/media.rb +89 -0
- data/lib/preset/audio/audio.rb +42 -0
- data/lib/preset/file.rb +19 -0
- data/lib/preset/general.rb +28 -0
- data/lib/preset/metadata.rb +41 -0
- data/lib/preset/preset.rb +120 -0
- data/lib/preset/video/crop.rb +29 -0
- data/lib/preset/video/pad.rb +31 -0
- data/lib/preset/video/video.rb +130 -0
- data/lib/puremotion.rb +20 -12
- data/lib/puremotion_native.so +0 -0
- data/lib/threading.rb +54 -0
- data/lib/{puremotion/tools → tools}/ffmpeg.rb +12 -50
- data/lib/transcode/transcode.rb +142 -0
- data/spec/spec_helper.rb +7 -0
- data/spec/units/media_spec.rb +13 -0
- data/spec/units/preset_spec.rb +91 -0
- metadata +52 -44
- data/.document +0 -5
- data/.gitignore +0 -21
- data/README.rdoc +0 -18
- data/VERSION +0 -1
- data/lib/puremotion/codecs.rb +0 -59
- data/lib/puremotion/media.rb +0 -490
- data/lib/puremotion/media/stream.rb +0 -4
- data/lib/puremotion/media/stream/audio.rb +0 -0
- data/lib/puremotion/media/stream/base.rb +0 -7
- data/lib/puremotion/media/stream/collection.rb +0 -5
- data/lib/puremotion/media/stream/video.rb +0 -61
- data/lib/puremotion/recipes/ipod.yml +0 -12
- data/lib/puremotion/thread.rb +0 -153
- data/lib/puremotion/transcode/recipe.rb +0 -250
- data/lib/puremotion/transcode/transcode.rb +0 -153
- data/puremotion.gemspec +0 -68
- data/test/helper.rb +0 -10
- data/test/test_puremotion.rb +0 -7
@@ -0,0 +1,26 @@
|
|
1
|
+
#include "puremotion.h"
|
2
|
+
|
3
|
+
VALUE rb_mPureMotion;
|
4
|
+
VALUE rb_mStreams;
|
5
|
+
|
6
|
+
void Init_puremotion_native() {
|
7
|
+
|
8
|
+
av_register_all();
|
9
|
+
avcodec_register_all();
|
10
|
+
|
11
|
+
// Don't sent all output into ruby interpreter
|
12
|
+
// This will eventually be changed by calling av_log_callback
|
13
|
+
// If you want to develop it's best you set this to AV_LOG_VERBOSE
|
14
|
+
av_log_set_level(AV_LOG_QUIET);
|
15
|
+
|
16
|
+
rb_mPureMotion = rb_define_module("PureMotion");
|
17
|
+
rb_mStreams = rb_define_module_under(rb_mPureMotion, "Streams");
|
18
|
+
|
19
|
+
Init_media();
|
20
|
+
Init_stream();
|
21
|
+
Init_video_stream();
|
22
|
+
Init_audio_stream();
|
23
|
+
Init_stream_collection();
|
24
|
+
Init_frame();
|
25
|
+
|
26
|
+
}
|
@@ -0,0 +1,38 @@
|
|
1
|
+
#include <libavformat/avformat.h>
|
2
|
+
#include <libavutil/avutil.h>
|
3
|
+
#include <libavcodec/avcodec.h>
|
4
|
+
#include <libswscale/swscale.h>
|
5
|
+
#include <gd.h>
|
6
|
+
|
7
|
+
#ifdef RSHIFT
|
8
|
+
#undef RSHIFT
|
9
|
+
#endif
|
10
|
+
|
11
|
+
#define DEBUG
|
12
|
+
|
13
|
+
#include "ruby.h"
|
14
|
+
|
15
|
+
RUBY_EXTERN VALUE rb_mPureMotion;
|
16
|
+
RUBY_EXTERN VALUE rb_mStreams;
|
17
|
+
RUBY_EXTERN VALUE rb_cMedia;
|
18
|
+
RUBY_EXTERN VALUE rb_cStream;
|
19
|
+
RUBY_EXTERN VALUE rb_cVideoStream;
|
20
|
+
RUBY_EXTERN VALUE rb_cAudioStream;
|
21
|
+
RUBY_EXTERN VALUE rb_cStreamCollection;
|
22
|
+
RUBY_EXTERN VALUE rb_cFrame;
|
23
|
+
|
24
|
+
RUBY_EXTERN VALUE rb_eUnsupportedFormat;
|
25
|
+
|
26
|
+
RUBY_EXTERN void Init_puremotion_native();
|
27
|
+
RUBY_EXTERN void Init_media();
|
28
|
+
RUBY_EXTERN void Init_stream();
|
29
|
+
RUBY_EXTERN void Init_video_stream();
|
30
|
+
RUBY_EXTERN void Init_audio_stream();
|
31
|
+
RUBY_EXTERN void Init_stream_collection();
|
32
|
+
RUBY_EXTERN void Init_frame();
|
33
|
+
|
34
|
+
VALUE build_stream_collection(VALUE media);
|
35
|
+
VALUE build_stream(AVStream *stream, VALUE rb_media);
|
36
|
+
VALUE build_video_stream(AVStream *stream, VALUE rb_media);
|
37
|
+
VALUE build_audio_stream(AVStream *stream, VALUE rb_media);
|
38
|
+
VALUE build_frame_object(AVFrame * frame, int width, int height, int pixel_format);
|
@@ -0,0 +1,128 @@
|
|
1
|
+
#include "puremotion.h"
|
2
|
+
#include "utils.h"
|
3
|
+
|
4
|
+
VALUE rb_cStream;
|
5
|
+
VALUE rb_mStreams;
|
6
|
+
|
7
|
+
static int next_packet(AVFormatContext * format_context, AVPacket * packet)
|
8
|
+
{
|
9
|
+
if(packet->data != NULL)
|
10
|
+
av_free_packet(packet);
|
11
|
+
|
12
|
+
if(av_read_frame(format_context, packet) < 0) {
|
13
|
+
return -1;
|
14
|
+
}
|
15
|
+
|
16
|
+
return 0;
|
17
|
+
}
|
18
|
+
|
19
|
+
static int next_packet_for_stream(AVFormatContext * format_context, int stream_index, AVPacket * packet)
|
20
|
+
{
|
21
|
+
int ret = 0;
|
22
|
+
do {
|
23
|
+
ret = next_packet(format_context, packet);
|
24
|
+
} while(packet->stream_index != stream_index && ret == 0);
|
25
|
+
|
26
|
+
return ret;
|
27
|
+
}
|
28
|
+
|
29
|
+
static VALUE stream_type( VALUE self ) {
|
30
|
+
AVStream * stream = get_stream(self);
|
31
|
+
|
32
|
+
VALUE type = rb_sym("unknown");
|
33
|
+
|
34
|
+
switch( stream->codec->codec_type ) {
|
35
|
+
case CODEC_TYPE_AUDIO:
|
36
|
+
type = rb_sym("audio");
|
37
|
+
break;
|
38
|
+
case CODEC_TYPE_VIDEO:
|
39
|
+
type = rb_sym("video");
|
40
|
+
break;
|
41
|
+
}
|
42
|
+
|
43
|
+
return type;
|
44
|
+
|
45
|
+
}
|
46
|
+
|
47
|
+
static VALUE stream_duration(VALUE self) {
|
48
|
+
AVStream *stream = get_stream(self);
|
49
|
+
|
50
|
+
if (stream->duration == AV_NOPTS_VALUE) return Qnil;
|
51
|
+
|
52
|
+
return (rb_float_new(stream->duration * av_q2d(stream->time_base)));
|
53
|
+
}
|
54
|
+
|
55
|
+
static VALUE stream_seek(VALUE self, VALUE position) {
|
56
|
+
AVFormatContext * format_context = get_format_context(rb_iv_get(self, "@media"));
|
57
|
+
AVStream * stream = get_stream(self);
|
58
|
+
|
59
|
+
int64_t timestamp = NUM2LONG(position) / av_q2d(stream->time_base);
|
60
|
+
|
61
|
+
int ret;
|
62
|
+
if (format_context->start_time != AV_NOPTS_VALUE)
|
63
|
+
timestamp += format_context->start_time;
|
64
|
+
|
65
|
+
//fprintf(stderr, "seeking to %d\n", NUM2INT(position));
|
66
|
+
ret = av_seek_frame(format_context, stream->index, timestamp, 0);
|
67
|
+
if (ret < 0) {
|
68
|
+
rb_raise(rb_eRangeError, "could not seek %s to pos %f",
|
69
|
+
format_context->filename, timestamp * av_q2d(stream->time_base));
|
70
|
+
}
|
71
|
+
|
72
|
+
//fprintf(stderr, "seeked.\n");
|
73
|
+
return self;
|
74
|
+
}
|
75
|
+
|
76
|
+
static VALUE stream_bitrate(VALUE self) {
|
77
|
+
AVStream * stream = get_stream(self);
|
78
|
+
|
79
|
+
rb_float_new(stream->codec->bit_rate);
|
80
|
+
}
|
81
|
+
|
82
|
+
|
83
|
+
static VALUE stream_position(VALUE self) {
|
84
|
+
AVFormatContext * format_context = get_format_context(rb_iv_get(self, "@media"));
|
85
|
+
AVStream * stream = get_stream(self);
|
86
|
+
AVPacket decoding_packet;
|
87
|
+
|
88
|
+
av_init_packet(&decoding_packet);
|
89
|
+
|
90
|
+
do {
|
91
|
+
if(av_read_frame(format_context, &decoding_packet) < 0) {
|
92
|
+
rb_raise(rb_eRuntimeError, "error extracting packet");
|
93
|
+
}
|
94
|
+
} while(decoding_packet.stream_index != stream->index);
|
95
|
+
|
96
|
+
return rb_float_new(decoding_packet.pts * (double)av_q2d(stream->time_base));
|
97
|
+
}
|
98
|
+
|
99
|
+
static VALUE stream_init(VALUE self, VALUE media) {
|
100
|
+
//printf("Stream initialized\n");
|
101
|
+
rb_iv_set(self, "@media", media);
|
102
|
+
return self;
|
103
|
+
}
|
104
|
+
|
105
|
+
static VALUE alloc_stream(VALUE self) {
|
106
|
+
//printf("Stream allocating...\n");
|
107
|
+
AVStream * stream = av_new_stream(NULL, 0);
|
108
|
+
//printf("Stream wrapping...\n");
|
109
|
+
return Data_Wrap_Struct(rb_cStream, 0, 0, stream);
|
110
|
+
}
|
111
|
+
|
112
|
+
VALUE build_stream(AVStream *stream, VALUE rb_media) {
|
113
|
+
//printf("Stream building...\n");
|
114
|
+
VALUE rb_stream = Data_Wrap_Struct(rb_cStream, 0, 0, stream);
|
115
|
+
//printf("Stream wrapped\n");
|
116
|
+
return stream_init(rb_stream, rb_media);
|
117
|
+
}
|
118
|
+
|
119
|
+
void Init_stream() {
|
120
|
+
rb_cStream = rb_define_class_under(rb_mStreams, "Stream", rb_cObject);
|
121
|
+
rb_define_alloc_func(rb_cStream, alloc_stream);
|
122
|
+
|
123
|
+
rb_define_method(rb_cStream, "type", stream_type, 0);
|
124
|
+
rb_define_method(rb_cStream, "duration", stream_duration, 0);
|
125
|
+
rb_define_method(rb_cStream, "bitrate", stream_bitrate, 0);
|
126
|
+
rb_define_method(rb_cStream, "seek", stream_seek, 1);
|
127
|
+
rb_define_method(rb_cStream, "position", stream_position, 0);
|
128
|
+
}
|
@@ -0,0 +1,44 @@
|
|
1
|
+
#include "puremotion.h"
|
2
|
+
|
3
|
+
VALUE rb_cStreamCollection;
|
4
|
+
VALUE rb_mStreams;
|
5
|
+
|
6
|
+
static VALUE stream_collection_init(VALUE self) {
|
7
|
+
|
8
|
+
return self;
|
9
|
+
|
10
|
+
}
|
11
|
+
|
12
|
+
VALUE build_stream_collection(VALUE media) {
|
13
|
+
|
14
|
+
|
15
|
+
AVFormatContext *format_context = get_format_context(media);
|
16
|
+
|
17
|
+
VALUE rb_streams = rb_ary_new();
|
18
|
+
|
19
|
+
int i, stream_idx = 0;
|
20
|
+
|
21
|
+
for( i = 0; i < format_context->nb_streams; i++ ) {
|
22
|
+
AVStream *stream = format_context->streams[i];
|
23
|
+
|
24
|
+
VALUE rb_stream = Qnil;
|
25
|
+
|
26
|
+
if( stream->codec->codec_type == CODEC_TYPE_VIDEO ) rb_stream = build_video_stream( stream, media );
|
27
|
+
if( stream->codec->codec_type == CODEC_TYPE_AUDIO ) rb_stream = build_audio_stream( stream, media );
|
28
|
+
|
29
|
+
if( rb_stream != Qnil ) {
|
30
|
+
rb_ary_store(rb_streams, stream_idx, rb_stream);
|
31
|
+
stream_idx++;
|
32
|
+
}
|
33
|
+
|
34
|
+
}
|
35
|
+
|
36
|
+
return rb_streams;
|
37
|
+
}
|
38
|
+
|
39
|
+
void Init_stream_collection() {
|
40
|
+
|
41
|
+
rb_cStreamCollection = rb_define_class_under(rb_mStreams, "Collection", rb_cArray);
|
42
|
+
// rb_define_method(rb_cStreamCollection, "initialize", stream_collection_init, 1);
|
43
|
+
|
44
|
+
}
|
@@ -0,0 +1,81 @@
|
|
1
|
+
#include "utils.h"
|
2
|
+
|
3
|
+
AVFormatContext * get_format_context(VALUE self)
|
4
|
+
{
|
5
|
+
AVFormatContext * format_context = NULL;
|
6
|
+
Data_Get_Struct(self, AVFormatContext, format_context);
|
7
|
+
if (NULL == format_context) {
|
8
|
+
rb_fatal("FFMPEG internal error\n");
|
9
|
+
}
|
10
|
+
return format_context;
|
11
|
+
}
|
12
|
+
|
13
|
+
AVStream * get_stream(VALUE self)
|
14
|
+
{
|
15
|
+
AVStream * stream = NULL;
|
16
|
+
Data_Get_Struct(self, AVStream, stream);
|
17
|
+
if (NULL == stream) {
|
18
|
+
rb_fatal("FFMPEG internal error\n");
|
19
|
+
}
|
20
|
+
return stream;
|
21
|
+
}
|
22
|
+
|
23
|
+
AVCodecContext * get_codec_context(VALUE self)
|
24
|
+
{
|
25
|
+
AVCodecContext * codec_context = NULL;
|
26
|
+
Data_Get_Struct(self, AVCodecContext, codec_context);
|
27
|
+
if (NULL == codec_context) {
|
28
|
+
rb_fatal("FFMPEG internal error\n");
|
29
|
+
}
|
30
|
+
return codec_context;
|
31
|
+
}
|
32
|
+
|
33
|
+
AVFrame * get_frame(VALUE self)
|
34
|
+
{
|
35
|
+
AVFrame * frame = NULL;
|
36
|
+
Data_Get_Struct(self, AVFrame, frame);
|
37
|
+
if (NULL == frame) {
|
38
|
+
rb_fatal("FFMPEG internal error\n");
|
39
|
+
}
|
40
|
+
return frame;
|
41
|
+
}
|
42
|
+
|
43
|
+
VALUE rb_sym(const char *s) {
|
44
|
+
return rb_str_intern(rb_str_new2(s));
|
45
|
+
}
|
46
|
+
|
47
|
+
VALUE codec_type_id_to_sym(int codec_type)
|
48
|
+
{
|
49
|
+
VALUE type_sym;
|
50
|
+
switch(codec_type) {
|
51
|
+
case CODEC_TYPE_AUDIO:
|
52
|
+
type_sym = rb_sym("audio");
|
53
|
+
break;
|
54
|
+
|
55
|
+
case CODEC_TYPE_VIDEO:
|
56
|
+
type_sym = rb_sym("video");
|
57
|
+
break;
|
58
|
+
|
59
|
+
case CODEC_TYPE_SUBTITLE:
|
60
|
+
type_sym = rb_sym("subtitle");
|
61
|
+
break;
|
62
|
+
|
63
|
+
case CODEC_TYPE_DATA:
|
64
|
+
type_sym = rb_sym("data");
|
65
|
+
break;
|
66
|
+
|
67
|
+
case CODEC_TYPE_ATTACHMENT:
|
68
|
+
type_sym = rb_sym("attachment");
|
69
|
+
break;
|
70
|
+
|
71
|
+
case CODEC_TYPE_NB:
|
72
|
+
type_sym = rb_sym("nb");
|
73
|
+
break;
|
74
|
+
|
75
|
+
default:
|
76
|
+
type_sym = rb_sym("unknown");
|
77
|
+
break;
|
78
|
+
}
|
79
|
+
|
80
|
+
return type_sym;
|
81
|
+
}
|
@@ -0,0 +1,141 @@
|
|
1
|
+
#include "puremotion.h"
|
2
|
+
#include "utils.h"
|
3
|
+
|
4
|
+
VALUE rb_cStream;
|
5
|
+
VALUE rb_cVideoStream;
|
6
|
+
VALUE rb_mStreams;
|
7
|
+
|
8
|
+
static VALUE stream_frame_rate(VALUE self) {
|
9
|
+
AVStream * stream = get_stream(self);
|
10
|
+
return(rb_float_new(av_q2d(stream->r_frame_rate)));
|
11
|
+
}
|
12
|
+
|
13
|
+
static int extract_next_frame(AVFormatContext * format_context, AVCodecContext * codec_context, int stream_index, AVFrame * frame, AVPacket * decoding_packet) {
|
14
|
+
// open codec to decode the video if needed
|
15
|
+
if (NULL == codec_context->codec) {
|
16
|
+
rb_fatal("codec should have already been opened");
|
17
|
+
}
|
18
|
+
|
19
|
+
uint8_t * databuffer;
|
20
|
+
|
21
|
+
int remaining = 0;
|
22
|
+
int decoded;
|
23
|
+
int frame_complete = 0;
|
24
|
+
int next;
|
25
|
+
|
26
|
+
while(!frame_complete &&
|
27
|
+
0 == (next = next_packet_for_stream(format_context, stream_index, decoding_packet))) {
|
28
|
+
// setting parameters before processing decoding_packet data
|
29
|
+
remaining = decoding_packet->size;
|
30
|
+
databuffer = decoding_packet->data;
|
31
|
+
|
32
|
+
while(remaining > 0) {
|
33
|
+
decoded = avcodec_decode_video(codec_context, frame, &frame_complete,
|
34
|
+
databuffer, remaining);
|
35
|
+
remaining -= decoded;
|
36
|
+
// pointer seek forward
|
37
|
+
databuffer += decoded;
|
38
|
+
}
|
39
|
+
}
|
40
|
+
|
41
|
+
return next;
|
42
|
+
}
|
43
|
+
|
44
|
+
/*
|
45
|
+
* call-seq: grab => PureMotion::Frame
|
46
|
+
*
|
47
|
+
*
|
48
|
+
*
|
49
|
+
*/
|
50
|
+
|
51
|
+
static VALUE stream_grab(VALUE self) {
|
52
|
+
AVFormatContext * format_context = get_format_context(rb_iv_get(self, "@media"));
|
53
|
+
AVStream * stream = get_stream(self);
|
54
|
+
|
55
|
+
AVCodecContext * codec_context = stream->codec;
|
56
|
+
|
57
|
+
// open codec to decode the video if needed
|
58
|
+
if (!codec_context->codec) {
|
59
|
+
AVCodec * codec = avcodec_find_decoder(codec_context->codec_id);
|
60
|
+
if (!codec)
|
61
|
+
rb_raise(rb_eRuntimeError, "error codec not found");
|
62
|
+
if (avcodec_open(codec_context, codec) < 0)
|
63
|
+
rb_raise(rb_eRuntimeError, "error while opening codec : %s", codec->name);
|
64
|
+
}
|
65
|
+
|
66
|
+
VALUE rb_frame = rb_funcall(rb_const_get(rb_mPureMotion, rb_intern("Frame")),
|
67
|
+
rb_intern("new"), 3,
|
68
|
+
INT2NUM(codec_context->width),
|
69
|
+
INT2NUM(codec_context->height),
|
70
|
+
INT2NUM(codec_context->pix_fmt));
|
71
|
+
|
72
|
+
AVFrame * frame = get_frame(rb_frame);
|
73
|
+
avcodec_get_frame_defaults(frame);
|
74
|
+
|
75
|
+
AVPacket decoding_packet;
|
76
|
+
av_init_packet(&decoding_packet);
|
77
|
+
|
78
|
+
if (rb_block_given_p()) {
|
79
|
+
int ret;
|
80
|
+
do {
|
81
|
+
ret = extract_next_frame(format_context, stream->codec,
|
82
|
+
stream->index, frame, &decoding_packet);
|
83
|
+
rb_yield(
|
84
|
+
rb_ary_new3(
|
85
|
+
3,
|
86
|
+
rb_frame,
|
87
|
+
rb_float_new(decoding_packet.pts * (double)av_q2d(stream->time_base)),
|
88
|
+
rb_float_new(decoding_packet.dts * (double)av_q2d(stream->time_base))
|
89
|
+
)
|
90
|
+
);
|
91
|
+
} while (ret == 0);
|
92
|
+
} else {
|
93
|
+
extract_next_frame(format_context, stream->codec,
|
94
|
+
stream->index, frame, &decoding_packet);
|
95
|
+
return rb_frame;
|
96
|
+
}
|
97
|
+
|
98
|
+
return self;
|
99
|
+
}
|
100
|
+
|
101
|
+
static VALUE stream_resolution(VALUE self, VALUE media) {
|
102
|
+
AVFormatContext * format_context = get_format_context(rb_iv_get(self, "@media"));
|
103
|
+
AVStream * stream = get_stream(self);
|
104
|
+
|
105
|
+
VALUE width = INT2NUM(stream->codec->width);
|
106
|
+
VALUE height = INT2NUM(stream->codec->height);
|
107
|
+
|
108
|
+
VALUE res = rb_ary_new2(2);
|
109
|
+
rb_ary_store(res, 0, width);
|
110
|
+
rb_ary_store(res, 1, height);
|
111
|
+
|
112
|
+
return res;
|
113
|
+
}
|
114
|
+
|
115
|
+
static VALUE video_stream_init(VALUE self, VALUE media) {
|
116
|
+
//printf("Stream initialized\n");
|
117
|
+
rb_iv_set(self, "@media", media);
|
118
|
+
return self;
|
119
|
+
}
|
120
|
+
|
121
|
+
static VALUE alloc_video_stream(VALUE self) {
|
122
|
+
//printf("Stream allocating...\n");
|
123
|
+
AVStream * stream = av_new_stream(NULL, 0);
|
124
|
+
//printf("Stream wrapping...\n");
|
125
|
+
return Data_Wrap_Struct(rb_cVideoStream, 0, 0, stream);
|
126
|
+
}
|
127
|
+
|
128
|
+
VALUE build_video_stream(AVStream *stream, VALUE rb_media) {
|
129
|
+
//printf("Stream building...\n");
|
130
|
+
VALUE rb_stream = Data_Wrap_Struct(rb_cVideoStream, 0, 0, stream);
|
131
|
+
//printf("Stream wrapped\n");
|
132
|
+
return video_stream_init(rb_stream, rb_media);
|
133
|
+
}
|
134
|
+
|
135
|
+
void Init_video_stream() {
|
136
|
+
rb_cVideoStream = rb_define_class_under(rb_mStreams, "Video", rb_cStream);
|
137
|
+
|
138
|
+
rb_define_method(rb_cVideoStream, "resolution", stream_resolution, 0);
|
139
|
+
rb_define_method(rb_cVideoStream, "frame_rate", stream_frame_rate, 0);
|
140
|
+
rb_define_method(rb_cVideoStream, "grab", stream_grab, 0);
|
141
|
+
}
|