fluq 0.7.0
Sign up to get free protection for your applications and to get access to all the features.
- data/.gitignore +3 -0
- data/.travis.yml +6 -0
- data/Gemfile +6 -0
- data/Gemfile.lock +39 -0
- data/MIT-LICENCE +19 -0
- data/README.md +10 -0
- data/Rakefile +11 -0
- data/benchmark/logging.rb +37 -0
- data/benchmark/socket.rb +52 -0
- data/bin/fluq-rb +8 -0
- data/examples/common.rb +3 -0
- data/examples/simple.rb +5 -0
- data/fluq.gemspec +33 -0
- data/lib/fluq.rb +50 -0
- data/lib/fluq/buffer.rb +6 -0
- data/lib/fluq/buffer/base.rb +51 -0
- data/lib/fluq/buffer/file.rb +68 -0
- data/lib/fluq/cli.rb +142 -0
- data/lib/fluq/dsl.rb +49 -0
- data/lib/fluq/dsl/options.rb +27 -0
- data/lib/fluq/error.rb +2 -0
- data/lib/fluq/event.rb +55 -0
- data/lib/fluq/feed.rb +6 -0
- data/lib/fluq/feed/base.rb +18 -0
- data/lib/fluq/feed/json.rb +28 -0
- data/lib/fluq/feed/msgpack.rb +27 -0
- data/lib/fluq/feed/tsv.rb +30 -0
- data/lib/fluq/handler.rb +6 -0
- data/lib/fluq/handler/base.rb +80 -0
- data/lib/fluq/handler/log.rb +67 -0
- data/lib/fluq/handler/null.rb +4 -0
- data/lib/fluq/input.rb +6 -0
- data/lib/fluq/input/base.rb +59 -0
- data/lib/fluq/input/socket.rb +50 -0
- data/lib/fluq/input/socket/connection.rb +41 -0
- data/lib/fluq/mixins.rb +6 -0
- data/lib/fluq/mixins/loggable.rb +7 -0
- data/lib/fluq/mixins/logger.rb +26 -0
- data/lib/fluq/reactor.rb +76 -0
- data/lib/fluq/testing.rb +26 -0
- data/lib/fluq/url.rb +16 -0
- data/lib/fluq/version.rb +3 -0
- data/spec/fluq/buffer/base_spec.rb +21 -0
- data/spec/fluq/buffer/file_spec.rb +47 -0
- data/spec/fluq/dsl/options_spec.rb +24 -0
- data/spec/fluq/dsl_spec.rb +43 -0
- data/spec/fluq/event_spec.rb +25 -0
- data/spec/fluq/feed/base_spec.rb +15 -0
- data/spec/fluq/feed/json_spec.rb +27 -0
- data/spec/fluq/feed/msgpack_spec.rb +27 -0
- data/spec/fluq/feed/tsv_spec.rb +27 -0
- data/spec/fluq/handler/base_spec.rb +70 -0
- data/spec/fluq/handler/log_spec.rb +68 -0
- data/spec/fluq/handler/null_spec.rb +11 -0
- data/spec/fluq/input/base_spec.rb +29 -0
- data/spec/fluq/input/socket/connection_spec.rb +35 -0
- data/spec/fluq/input/socket_spec.rb +45 -0
- data/spec/fluq/mixins/loggable_spec.rb +10 -0
- data/spec/fluq/mixins/logger_spec.rb +25 -0
- data/spec/fluq/reactor_spec.rb +58 -0
- data/spec/fluq/url_spec.rb +16 -0
- data/spec/fluq_spec.rb +11 -0
- data/spec/scenario/config/nested/common.rb +3 -0
- data/spec/scenario/config/test.rb +3 -0
- data/spec/scenario/lib/fluq/handler/custom/test_handler.rb +4 -0
- data/spec/spec_helper.rb +12 -0
- data/spec/support/configuration.rb +25 -0
- metadata +242 -0
@@ -0,0 +1,27 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
|
3
|
+
describe FluQ::Feed::Json do
|
4
|
+
|
5
|
+
let(:buffer) { FluQ::Buffer::Base.new }
|
6
|
+
let(:event) { FluQ::Event.new("some.tag", 1313131313, "a" => "b") }
|
7
|
+
|
8
|
+
before do
|
9
|
+
io = StringIO.new [event, event, event].map(&:to_json).join("\n")
|
10
|
+
buffer.stub(:drain).and_yield(io)
|
11
|
+
end
|
12
|
+
|
13
|
+
subject do
|
14
|
+
described_class.new(buffer)
|
15
|
+
end
|
16
|
+
|
17
|
+
it { should be_a(FluQ::Feed::Base) }
|
18
|
+
its(:to_a) { should == [event, event, event] }
|
19
|
+
|
20
|
+
it 'should log invalid inputs' do
|
21
|
+
io = StringIO.new [event.to_json, "ABCD", event.to_json].join("\n")
|
22
|
+
buffer.stub(:drain).and_yield(io)
|
23
|
+
subject.logger.should_receive(:warn).at_least(:once)
|
24
|
+
subject.to_a.should == [event, event]
|
25
|
+
end
|
26
|
+
|
27
|
+
end
|
@@ -0,0 +1,27 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
|
3
|
+
describe FluQ::Feed::Msgpack do
|
4
|
+
|
5
|
+
let(:buffer) { FluQ::Buffer::Base.new }
|
6
|
+
let(:event) { FluQ::Event.new("some.tag", 1313131313, "a" => "b") }
|
7
|
+
|
8
|
+
before do
|
9
|
+
io = StringIO.new [event, event, event].map(&:to_msgpack).join
|
10
|
+
buffer.stub(:drain).and_yield(io)
|
11
|
+
end
|
12
|
+
|
13
|
+
subject do
|
14
|
+
described_class.new(buffer)
|
15
|
+
end
|
16
|
+
|
17
|
+
it { should be_a(FluQ::Feed::Base) }
|
18
|
+
its(:to_a) { should == [event, event, event] }
|
19
|
+
|
20
|
+
it 'should log invalid inputs' do
|
21
|
+
io = StringIO.new [event.to_msgpack, "ABCD", event.to_msgpack].join
|
22
|
+
buffer.stub(:drain).and_yield(io)
|
23
|
+
subject.logger.should_receive(:warn).at_least(:once)
|
24
|
+
subject.to_a.should == [event, event]
|
25
|
+
end
|
26
|
+
|
27
|
+
end
|
@@ -0,0 +1,27 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
|
3
|
+
describe FluQ::Feed::Tsv do
|
4
|
+
|
5
|
+
let(:buffer) { FluQ::Buffer::Base.new }
|
6
|
+
let(:event) { FluQ::Event.new("some.tag", 1313131313, "a" => "b") }
|
7
|
+
|
8
|
+
before do
|
9
|
+
io = StringIO.new [event, event, event].map(&:to_tsv).join("\n")
|
10
|
+
buffer.stub(:drain).and_yield(io)
|
11
|
+
end
|
12
|
+
|
13
|
+
subject do
|
14
|
+
described_class.new(buffer)
|
15
|
+
end
|
16
|
+
|
17
|
+
it { should be_a(FluQ::Feed::Base) }
|
18
|
+
its(:to_a) { should == [event, event, event] }
|
19
|
+
|
20
|
+
it 'should log invalid inputs' do
|
21
|
+
io = StringIO.new [event.to_tsv, "ABCD", event.to_tsv].join("\n")
|
22
|
+
buffer.stub(:drain).and_yield(io)
|
23
|
+
subject.logger.should_receive(:warn).at_least(:once)
|
24
|
+
subject.to_a.should == [event, event]
|
25
|
+
end
|
26
|
+
|
27
|
+
end
|
@@ -0,0 +1,70 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
|
3
|
+
describe FluQ::Handler::Base do
|
4
|
+
|
5
|
+
subject { described_class.new reactor }
|
6
|
+
|
7
|
+
it { should respond_to(:on_events) }
|
8
|
+
it { should be_a(FluQ::Mixins::Loggable) }
|
9
|
+
its(:reactor) { should be(reactor) }
|
10
|
+
its(:config) { should == { pattern: /./ } }
|
11
|
+
its(:pattern) { should == /./ }
|
12
|
+
its(:name) { should == "base-AxPGxv" }
|
13
|
+
|
14
|
+
def events(*tags)
|
15
|
+
tags.map {|tag| event(tag) }
|
16
|
+
end
|
17
|
+
|
18
|
+
def event(tag)
|
19
|
+
FluQ::Event.new(tag, 1313131313, {})
|
20
|
+
end
|
21
|
+
|
22
|
+
it 'should have a type' do
|
23
|
+
described_class.type.should == "base"
|
24
|
+
end
|
25
|
+
|
26
|
+
it 'can have custom names' do
|
27
|
+
described_class.new(reactor, name: "visitors").name.should == "visitors"
|
28
|
+
end
|
29
|
+
|
30
|
+
it 'should match tags via patters' do
|
31
|
+
subject = described_class.new(reactor, pattern: "visits.????.*")
|
32
|
+
subject.match?(event("visits.site.1")).should be(true)
|
33
|
+
subject.match?(event("visits.page.2")).should be(true)
|
34
|
+
subject.match?(event("visits.other.1")).should be(false)
|
35
|
+
subject.match?(event("visits.site")).should be(false)
|
36
|
+
subject.match?(event("visits.site.")).should be(true)
|
37
|
+
subject.match?(event("prefix.visits.site.1")).should be(false)
|
38
|
+
subject.match?(event("visits.site.1.suffix")).should be(true)
|
39
|
+
end
|
40
|
+
|
41
|
+
it 'should support "or" patterns' do
|
42
|
+
subject = described_class.new(reactor, pattern: "visits.{site,page}.*")
|
43
|
+
subject.match?(event("visits.site.1")).should be(true)
|
44
|
+
subject.match?(event("visits.page.2")).should be(true)
|
45
|
+
subject.match?(event("visits.other.1")).should be(false)
|
46
|
+
subject.match?(event("visits.site")).should be(false)
|
47
|
+
subject.match?(event("visits.site.")).should be(true)
|
48
|
+
subject.match?(event("prefix.visits.site.1")).should be(false)
|
49
|
+
subject.match?(event("visits.site.1.suffix")).should be(true)
|
50
|
+
end
|
51
|
+
|
52
|
+
it 'should support regular expression patterns' do
|
53
|
+
subject = described_class.new(reactor, pattern: /^visits\.(?:s|p)\w{3}\..*/)
|
54
|
+
subject.match?(event("visits.site.1")).should be(true)
|
55
|
+
subject.match?(event("visits.page.2")).should be(true)
|
56
|
+
subject.match?(event("visits.other.1")).should be(false)
|
57
|
+
subject.match?(event("visits.site")).should be(false)
|
58
|
+
subject.match?(event("visits.site.")).should be(true)
|
59
|
+
subject.match?(event("prefix.visits.site.1")).should be(false)
|
60
|
+
subject.match?(event("visits.site.1.suffix")).should be(true)
|
61
|
+
end
|
62
|
+
|
63
|
+
it 'should select events' do
|
64
|
+
stream = events("visits.site.1", "visits.page.2", "visits.other.1", "visits.site.2")
|
65
|
+
described_class.new(reactor, pattern: "visits.????.*").select(stream).map(&:tag).should == [
|
66
|
+
"visits.site.1", "visits.page.2", "visits.site.2"
|
67
|
+
]
|
68
|
+
end
|
69
|
+
|
70
|
+
end
|
@@ -0,0 +1,68 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
|
3
|
+
describe FluQ::Handler::Log do
|
4
|
+
|
5
|
+
let(:event) do
|
6
|
+
FluQ::Event.new("my.special.tag", 1313131313, { "a" => "1" })
|
7
|
+
end
|
8
|
+
let(:root) { FluQ.root.join("../scenario/log/raw") }
|
9
|
+
subject { described_class.new reactor }
|
10
|
+
before { FileUtils.rm_rf(root); FileUtils.mkdir_p(root) }
|
11
|
+
|
12
|
+
it { should be_a(FluQ::Handler::Base) }
|
13
|
+
its("config.keys") { should =~ [:convert, :path, :pattern, :rewrite, :cache_max, :cache_ttl] }
|
14
|
+
|
15
|
+
it "can log events" do
|
16
|
+
subject.on_events [event]
|
17
|
+
subject.pool.each_key {|k| subject.pool[k].flush }
|
18
|
+
root.join("my/special/tag/20110812/06.log").read.should == %(my.special.tag\t1313131313\t{"a":"1"}\n)
|
19
|
+
end
|
20
|
+
|
21
|
+
it 'can have custom conversions' do
|
22
|
+
subject = described_class.new reactor, convert: lambda {|e| e.merge(ts: e.timestamp).map {|k,v| "#{k}=#{v}" }.join(',') }
|
23
|
+
subject.on_events [event]
|
24
|
+
subject.pool.each_key {|k| subject.pool[k].flush }
|
25
|
+
root.join("my/special/tag/20110812/06.log").read.should == "a=1,ts=1313131313\n"
|
26
|
+
end
|
27
|
+
|
28
|
+
it 'can rewrite tags' do
|
29
|
+
subject = described_class.new reactor, rewrite: lambda {|t| t.split('.').reverse.first(2).join(".") }
|
30
|
+
subject.on_events [event]
|
31
|
+
root.join("tag.special/20110812/06.log").should be_file
|
32
|
+
end
|
33
|
+
|
34
|
+
it 'should not fail on temporary file errors' do
|
35
|
+
subject.on_events [event]
|
36
|
+
subject.pool.each_key {|k| subject.pool[k].close }
|
37
|
+
subject.on_events [event]
|
38
|
+
subject.pool.each_key {|k| subject.pool[k].flush }
|
39
|
+
root.join("my/special/tag/20110812/06.log").read.should have(2).lines
|
40
|
+
end
|
41
|
+
|
42
|
+
describe described_class::FilePool do
|
43
|
+
subject { described_class::FilePool.new(max_size: 2) }
|
44
|
+
let(:path) { root.join("a.log") }
|
45
|
+
|
46
|
+
it { should be_a(TimedLRU) }
|
47
|
+
|
48
|
+
it 'should open files' do
|
49
|
+
lambda {
|
50
|
+
subject.open(path).should be_instance_of(File)
|
51
|
+
}.should change { subject.keys }.from([]).to([path.to_s])
|
52
|
+
end
|
53
|
+
|
54
|
+
it 'should re-use open files' do
|
55
|
+
fd = subject.open(path)
|
56
|
+
lambda {
|
57
|
+
subject.open(path).should be(fd)
|
58
|
+
}.should_not change { subject.keys }
|
59
|
+
end
|
60
|
+
|
61
|
+
it 'should auto-close files' do
|
62
|
+
fd = subject.open(path)
|
63
|
+
fd.should be_autoclose
|
64
|
+
end
|
65
|
+
|
66
|
+
end
|
67
|
+
|
68
|
+
end
|
@@ -0,0 +1,29 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
|
3
|
+
describe FluQ::Input::Base do
|
4
|
+
|
5
|
+
let(:event) { FluQ::Event.new("some.tag", 1313131313, {}) }
|
6
|
+
let!(:handler) { reactor.register FluQ::Handler::Test }
|
7
|
+
subject { described_class.new(reactor, feed: "json") }
|
8
|
+
|
9
|
+
it { should be_a(FluQ::Mixins::Loggable) }
|
10
|
+
its(:reactor) { should be(reactor) }
|
11
|
+
its(:config) { should == {feed: "json", buffer: "file", buffer_options: {}} }
|
12
|
+
its(:name) { should == "base" }
|
13
|
+
its(:feed_klass) { should == FluQ::Feed::Json }
|
14
|
+
its(:buffer_klass) { should == FluQ::Buffer::File }
|
15
|
+
|
16
|
+
it 'should create new buffers' do
|
17
|
+
(b1 = subject.new_buffer).should be_instance_of(FluQ::Buffer::File)
|
18
|
+
(b2 = subject.new_buffer).should be_instance_of(FluQ::Buffer::File)
|
19
|
+
b1.should_not be(b2)
|
20
|
+
end
|
21
|
+
|
22
|
+
it 'should flush buffers' do
|
23
|
+
buf = subject.new_buffer
|
24
|
+
buf.write [event, event].map(&:to_json).join("\n")
|
25
|
+
subject.flush!(buf)
|
26
|
+
handler.should have(2).events
|
27
|
+
end
|
28
|
+
|
29
|
+
end
|
@@ -0,0 +1,35 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
|
3
|
+
describe FluQ::Input::Socket::Connection do
|
4
|
+
|
5
|
+
let(:event) { FluQ::Event.new("some.tag", 1313131313, {}) }
|
6
|
+
let(:input) { FluQ::Input::Socket.new reactor, bind: "tcp://127.0.0.1:26712" }
|
7
|
+
before { EventMachine.stub(:set_comm_inactivity_timeout) }
|
8
|
+
subject { described_class.new(Time.now.to_i, input) }
|
9
|
+
|
10
|
+
it { should be_a(EM::Connection) }
|
11
|
+
|
12
|
+
it 'should set a timeout' do
|
13
|
+
EventMachine.should_receive(:set_comm_inactivity_timeout).with(instance_of(Fixnum), 60)
|
14
|
+
subject
|
15
|
+
end
|
16
|
+
|
17
|
+
it 'should handle data' do
|
18
|
+
subject.receive_data [event, event].map(&:to_msgpack).join
|
19
|
+
subject.send(:buffer).size.should == 38
|
20
|
+
end
|
21
|
+
|
22
|
+
it 'should flush when data transfer is complete' do
|
23
|
+
subject.receive_data [event, event].map(&:to_msgpack).join
|
24
|
+
input.should_receive(:flush!).with(instance_of(FluQ::Buffer::File))
|
25
|
+
subject.unbind
|
26
|
+
end
|
27
|
+
|
28
|
+
it 'should recover connection errors' do
|
29
|
+
reactor.should_receive(:process).and_raise(Errno::ECONNRESET)
|
30
|
+
FluQ.logger.should_receive(:crash)
|
31
|
+
subject.receive_data [event, event].map(&:to_msgpack).join
|
32
|
+
subject.unbind
|
33
|
+
end
|
34
|
+
|
35
|
+
end
|
@@ -0,0 +1,45 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
|
3
|
+
describe FluQ::Input::Socket do
|
4
|
+
|
5
|
+
let(:event) { FluQ::Event.new("some.tag", 1313131313, {}) }
|
6
|
+
|
7
|
+
def input(reactor)
|
8
|
+
described_class.new(reactor, bind: "tcp://127.0.0.1:26712")
|
9
|
+
end
|
10
|
+
|
11
|
+
subject { input(reactor) }
|
12
|
+
it { should be_a(FluQ::Input::Base) }
|
13
|
+
its(:name) { should == "socket (tcp://127.0.0.1:26712)" }
|
14
|
+
its(:config) { should == {feed: "msgpack", buffer: "file", buffer_options: {}, bind: "tcp://127.0.0.1:26712"} }
|
15
|
+
|
16
|
+
it 'should require bind option' do
|
17
|
+
lambda { described_class.new(reactor) }.should raise_error(ArgumentError, /No URL to bind/)
|
18
|
+
end
|
19
|
+
|
20
|
+
it 'should handle requests' do
|
21
|
+
with_reactor do |reactor|
|
22
|
+
server = input(reactor)
|
23
|
+
lambda { TCPSocket.open("127.0.0.1", 26712) }.should raise_error(Errno::ECONNREFUSED)
|
24
|
+
|
25
|
+
server.run
|
26
|
+
client = TCPSocket.open("127.0.0.1", 26712)
|
27
|
+
|
28
|
+
client.write event.to_msgpack
|
29
|
+
client.close
|
30
|
+
end
|
31
|
+
end
|
32
|
+
|
33
|
+
it 'should support UDP' do
|
34
|
+
h = nil
|
35
|
+
with_reactor do |reactor|
|
36
|
+
h = reactor.register FluQ::Handler::Test
|
37
|
+
reactor.listen described_class, bind: "udp://127.0.0.1:26713"
|
38
|
+
client = UDPSocket.new
|
39
|
+
client.send event.to_msgpack, 0, "127.0.0.1", 26713
|
40
|
+
client.close
|
41
|
+
end
|
42
|
+
h.should have(1).events
|
43
|
+
end
|
44
|
+
|
45
|
+
end
|
@@ -0,0 +1,25 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
|
3
|
+
describe FluQ::Mixins::Logger do
|
4
|
+
|
5
|
+
subject do
|
6
|
+
logger = Logger.new("/dev/null")
|
7
|
+
logger.extend described_class
|
8
|
+
logger
|
9
|
+
end
|
10
|
+
|
11
|
+
its(:exception_handlers) { should == [] }
|
12
|
+
|
13
|
+
it 'should register handlers' do
|
14
|
+
subject.exception_handler {|*| }
|
15
|
+
subject.should have(1).exception_handlers
|
16
|
+
end
|
17
|
+
|
18
|
+
it 'should apply handlers on crash' do
|
19
|
+
str = ""
|
20
|
+
subject.exception_handler {|ex| str << ex.message }
|
21
|
+
subject.crash("error", StandardError.new("something"))
|
22
|
+
str.should == "something"
|
23
|
+
end
|
24
|
+
|
25
|
+
end
|
@@ -0,0 +1,58 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
|
3
|
+
describe FluQ::Reactor do
|
4
|
+
|
5
|
+
its(:handlers) { should == [] }
|
6
|
+
its(:inputs) { should == [] }
|
7
|
+
|
8
|
+
def events(*tags)
|
9
|
+
tags.map do |tag|
|
10
|
+
FluQ::Event.new(tag, 1313131313, {})
|
11
|
+
end
|
12
|
+
end
|
13
|
+
|
14
|
+
it "should listen to inputs" do
|
15
|
+
with_reactor do |subject|
|
16
|
+
subject.listen(FluQ::Input::Socket, bind: "tcp://127.0.0.1:7654")
|
17
|
+
subject.should have(1).inputs
|
18
|
+
end
|
19
|
+
end
|
20
|
+
|
21
|
+
it "should register handlers" do
|
22
|
+
h1 = subject.register(FluQ::Handler::Test)
|
23
|
+
subject.should have(1).handlers
|
24
|
+
|
25
|
+
h2 = subject.register(FluQ::Handler::Test, name: "specific")
|
26
|
+
subject.should have(2).handlers
|
27
|
+
end
|
28
|
+
|
29
|
+
it "should prevent duplicates" do
|
30
|
+
subject.register(FluQ::Handler::Test)
|
31
|
+
lambda {
|
32
|
+
subject.register(FluQ::Handler::Test)
|
33
|
+
}.should raise_error(ArgumentError)
|
34
|
+
end
|
35
|
+
|
36
|
+
it "should process events" do
|
37
|
+
h1 = subject.register(FluQ::Handler::Test)
|
38
|
+
h2 = subject.register(FluQ::Handler::Test, pattern: "NONE")
|
39
|
+
subject.process(events("tag")).should be(true)
|
40
|
+
h1.events.should == [["tag", 1313131313, {}]]
|
41
|
+
h2.events.should == []
|
42
|
+
end
|
43
|
+
|
44
|
+
it "should skip not matching events" do
|
45
|
+
h1 = subject.register(FluQ::Handler::Test, pattern: "some*")
|
46
|
+
subject.process(events("some.tag", "other.tag", "something.else")).should be(true)
|
47
|
+
h1.events.should == [["some.tag", 1313131313, {}], ["something.else", 1313131313, {}]]
|
48
|
+
end
|
49
|
+
|
50
|
+
it "should recover crashed handlers gracefully" do
|
51
|
+
h1 = subject.register(FluQ::Handler::Test)
|
52
|
+
10.times { subject.process(events("ok.now")) }
|
53
|
+
subject.process(events("error.event"))
|
54
|
+
10.times { subject.process(events("ok.now")) }
|
55
|
+
h1.should have(20).events
|
56
|
+
end
|
57
|
+
|
58
|
+
end
|