fluq 0.7.5 → 0.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.gitignore +2 -0
- data/.travis.yml +3 -0
- data/Gemfile +12 -1
- data/Gemfile.lock +44 -8
- data/README.md +24 -6
- data/Rakefile +8 -1
- data/benchmark/socket.rb +13 -25
- data/examples/config/multi.rb +52 -0
- data/examples/config/simple.rb +15 -0
- data/fluq.gemspec +3 -3
- data/lib/fluq.rb +22 -16
- data/lib/fluq/cli.rb +3 -12
- data/lib/fluq/dsl.rb +2 -45
- data/lib/fluq/dsl/base.rb +11 -0
- data/lib/fluq/dsl/feed.rb +24 -0
- data/lib/fluq/dsl/root.rb +35 -0
- data/lib/fluq/event.rb +9 -28
- data/lib/fluq/feed.rb +40 -5
- data/lib/fluq/format.rb +6 -0
- data/lib/fluq/format/base.rb +42 -0
- data/lib/fluq/format/json.rb +17 -0
- data/lib/fluq/format/lines.rb +27 -0
- data/lib/fluq/format/msgpack.rb +28 -0
- data/lib/fluq/format/tsv.rb +19 -0
- data/lib/fluq/handler.rb +1 -1
- data/lib/fluq/handler/base.rb +11 -38
- data/lib/fluq/handler/log.rb +12 -14
- data/lib/fluq/handler/noop.rb +2 -0
- data/lib/fluq/input/base.rb +33 -29
- data/lib/fluq/input/socket.rb +46 -16
- data/lib/fluq/mixins.rb +2 -2
- data/lib/fluq/runner.rb +41 -0
- data/lib/fluq/testing.rb +5 -11
- data/lib/fluq/version.rb +1 -1
- data/lib/fluq/worker.rb +73 -0
- data/spec/fluq/dsl/feed_spec.rb +33 -0
- data/spec/fluq/dsl/root_spec.rb +20 -0
- data/spec/fluq/event_spec.rb +17 -12
- data/spec/fluq/feed_spec.rb +24 -0
- data/spec/fluq/format/base_spec.rb +9 -0
- data/spec/fluq/format/json_spec.rb +22 -0
- data/spec/fluq/format/lines_spec.rb +20 -0
- data/spec/fluq/format/msgpack_spec.rb +22 -0
- data/spec/fluq/format/tsv_spec.rb +21 -0
- data/spec/fluq/handler/base_spec.rb +7 -52
- data/spec/fluq/handler/log_spec.rb +11 -14
- data/spec/fluq/handler/{null_spec.rb → noop_spec.rb} +1 -3
- data/spec/fluq/input/base_spec.rb +48 -15
- data/spec/fluq/input/socket_spec.rb +34 -26
- data/spec/fluq/mixins/loggable_spec.rb +2 -2
- data/spec/fluq/runner_spec.rb +18 -0
- data/spec/fluq/worker_spec.rb +87 -0
- data/spec/fluq_spec.rb +1 -2
- data/spec/scenario/config/nested/feed1.rb +6 -0
- data/spec/scenario/config/test.rb +8 -2
- data/spec/spec_helper.rb +7 -26
- metadata +62 -62
- data/benchmark/logging.rb +0 -37
- data/examples/common.rb +0 -3
- data/examples/simple.rb +0 -5
- data/lib/fluq/buffer.rb +0 -6
- data/lib/fluq/buffer/base.rb +0 -51
- data/lib/fluq/buffer/file.rb +0 -68
- data/lib/fluq/feed/base.rb +0 -37
- data/lib/fluq/feed/json.rb +0 -28
- data/lib/fluq/feed/msgpack.rb +0 -27
- data/lib/fluq/feed/tsv.rb +0 -30
- data/lib/fluq/handler/null.rb +0 -4
- data/lib/fluq/input/socket/connection.rb +0 -41
- data/lib/fluq/mixins/logger.rb +0 -26
- data/lib/fluq/reactor.rb +0 -79
- data/spec/fluq/buffer/base_spec.rb +0 -21
- data/spec/fluq/buffer/file_spec.rb +0 -47
- data/spec/fluq/dsl_spec.rb +0 -43
- data/spec/fluq/feed/base_spec.rb +0 -15
- data/spec/fluq/feed/json_spec.rb +0 -27
- data/spec/fluq/feed/msgpack_spec.rb +0 -27
- data/spec/fluq/feed/tsv_spec.rb +0 -27
- data/spec/fluq/input/socket/connection_spec.rb +0 -35
- data/spec/fluq/mixins/logger_spec.rb +0 -25
- data/spec/fluq/reactor_spec.rb +0 -69
- data/spec/scenario/config/nested/common.rb +0 -3
@@ -0,0 +1,24 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
|
3
|
+
describe FluQ::Feed do
|
4
|
+
|
5
|
+
subject { described_class.new "my-feed" }
|
6
|
+
|
7
|
+
its(:name) { should == "my-feed" }
|
8
|
+
its(:handlers) { should == [] }
|
9
|
+
its(:inputs) { should == [] }
|
10
|
+
|
11
|
+
it "should listen to inputs" do
|
12
|
+
subject.listen(FluQ::Input::Socket, bind: "tcp://127.0.0.1:7654")
|
13
|
+
subject.should have(1).inputs
|
14
|
+
end
|
15
|
+
|
16
|
+
it "should register handlers" do
|
17
|
+
h1 = subject.register(FluQ::Handler::Test)
|
18
|
+
subject.should have(1).handlers
|
19
|
+
|
20
|
+
h2 = subject.register(FluQ::Handler::Test)
|
21
|
+
subject.should have(2).handlers
|
22
|
+
end
|
23
|
+
|
24
|
+
end
|
@@ -0,0 +1,22 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
|
3
|
+
describe FluQ::Format::Json do
|
4
|
+
|
5
|
+
let(:data) { %({"a":"b"}\n{"a":"b"}\n{"a":"b"}\n) }
|
6
|
+
|
7
|
+
it { should be_a(FluQ::Format::Lines) }
|
8
|
+
|
9
|
+
it 'should parse' do
|
10
|
+
events = subject.parse(data)
|
11
|
+
events.should have(3).items
|
12
|
+
events.first.timestamp.should be_within(5).of(Time.now.to_i)
|
13
|
+
events.first.should == FluQ::Event.new({"a" => "b"}, events.first.timestamp)
|
14
|
+
end
|
15
|
+
|
16
|
+
it 'should log invalid inputs' do
|
17
|
+
subject.logger.should_receive(:warn).once
|
18
|
+
events = subject.parse data + %(NOTJSON\n{"a":"b"}\n\n{"a":"b"})
|
19
|
+
events.should have(5).items
|
20
|
+
end
|
21
|
+
|
22
|
+
end
|
@@ -0,0 +1,20 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
|
3
|
+
describe FluQ::Format::Lines do
|
4
|
+
|
5
|
+
subject { FluQ::Format::Json.new }
|
6
|
+
|
7
|
+
it { should be_a(described_class) }
|
8
|
+
it { should be_a(FluQ::Format::Base) }
|
9
|
+
|
10
|
+
it 'should parse' do
|
11
|
+
subject.parse(%({"a":1})).should have(1).item
|
12
|
+
subject.parse(%({"a":1}\n{"b":2}\n\n{"c":3}\n)).should have(3).items
|
13
|
+
end
|
14
|
+
|
15
|
+
it 'should deal with partials' do
|
16
|
+
subject.parse(%({"a":1}\n{"b")).should == [{"a"=>1}]
|
17
|
+
subject.parse(%(:2}\n)).should == [{"b"=>2}]
|
18
|
+
end
|
19
|
+
|
20
|
+
end
|
@@ -0,0 +1,22 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
|
3
|
+
describe FluQ::Format::Msgpack do
|
4
|
+
|
5
|
+
let(:data) { ([{"a" => "b"}] * 3).map {|h| MessagePack.pack(h) }.join }
|
6
|
+
|
7
|
+
it { should be_a(FluQ::Format::Base) }
|
8
|
+
|
9
|
+
it 'should parse' do
|
10
|
+
events = subject.parse(data)
|
11
|
+
events.should have(3).items
|
12
|
+
events.first.timestamp.should be_within(5).of(Time.now.to_i)
|
13
|
+
events.first.should == FluQ::Event.new({"a" => "b"}, events.first.timestamp)
|
14
|
+
end
|
15
|
+
|
16
|
+
it 'should log invalid inputs' do
|
17
|
+
subject.logger.should_receive(:warn).at_least(:once)
|
18
|
+
events = subject.parse data + "NOTMP" + data
|
19
|
+
events.should have(6).items
|
20
|
+
end
|
21
|
+
|
22
|
+
end
|
@@ -0,0 +1,21 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
|
3
|
+
describe FluQ::Format::Tsv do
|
4
|
+
|
5
|
+
let(:data) { %(1313131313\t{"a":"b"}\n1313131313\t{"a":"b"}\n1313131313\t{"a":"b"}\n) }
|
6
|
+
|
7
|
+
it { should be_a(FluQ::Format::Lines) }
|
8
|
+
|
9
|
+
it 'should parse' do
|
10
|
+
events = subject.parse(data)
|
11
|
+
events.should have(3).items
|
12
|
+
events.first.should == FluQ::Event.new({"a" => "b"}, 1313131313)
|
13
|
+
end
|
14
|
+
|
15
|
+
it 'should log invalid inputs' do
|
16
|
+
subject.logger.should_receive(:warn).once
|
17
|
+
events = subject.parse data + %(NOTTSV\n1313131313\t{"a":"b"}\n\n)
|
18
|
+
events.should have(4).items
|
19
|
+
end
|
20
|
+
|
21
|
+
end
|
@@ -2,69 +2,24 @@ require 'spec_helper'
|
|
2
2
|
|
3
3
|
describe FluQ::Handler::Base do
|
4
4
|
|
5
|
-
|
5
|
+
let(:event) { FluQ::Event.new({}) }
|
6
6
|
|
7
7
|
it { should respond_to(:on_events) }
|
8
8
|
it { should be_a(FluQ::Mixins::Loggable) }
|
9
|
-
its(:
|
10
|
-
its(:
|
11
|
-
its(:
|
12
|
-
its(:name) { should == "base-AxPGxv" }
|
13
|
-
|
14
|
-
def events(*tags)
|
15
|
-
tags.map {|tag| event(tag) }
|
16
|
-
end
|
17
|
-
|
18
|
-
def event(tag)
|
19
|
-
FluQ::Event.new(tag, 1313131313, {})
|
20
|
-
end
|
9
|
+
its(:config) { should == { timeout: 60 } }
|
10
|
+
its(:name) { should == "base" }
|
11
|
+
its(:timers) { should be_instance_of(Timers) }
|
21
12
|
|
22
13
|
it 'should have a type' do
|
23
14
|
described_class.type.should == "base"
|
24
15
|
end
|
25
16
|
|
26
17
|
it 'can have custom names' do
|
27
|
-
described_class.new(
|
28
|
-
end
|
29
|
-
|
30
|
-
it 'should match tags via patters' do
|
31
|
-
subject = described_class.new(reactor, pattern: "visits.????.*")
|
32
|
-
subject.match?(event("visits.site.1")).should be(true)
|
33
|
-
subject.match?(event("visits.page.2")).should be(true)
|
34
|
-
subject.match?(event("visits.other.1")).should be(false)
|
35
|
-
subject.match?(event("visits.site")).should be(false)
|
36
|
-
subject.match?(event("visits.site.")).should be(true)
|
37
|
-
subject.match?(event("prefix.visits.site.1")).should be(false)
|
38
|
-
subject.match?(event("visits.site.1.suffix")).should be(true)
|
39
|
-
end
|
40
|
-
|
41
|
-
it 'should support "or" patterns' do
|
42
|
-
subject = described_class.new(reactor, pattern: "visits.{site,page}.*")
|
43
|
-
subject.match?(event("visits.site.1")).should be(true)
|
44
|
-
subject.match?(event("visits.page.2")).should be(true)
|
45
|
-
subject.match?(event("visits.other.1")).should be(false)
|
46
|
-
subject.match?(event("visits.site")).should be(false)
|
47
|
-
subject.match?(event("visits.site.")).should be(true)
|
48
|
-
subject.match?(event("prefix.visits.site.1")).should be(false)
|
49
|
-
subject.match?(event("visits.site.1.suffix")).should be(true)
|
50
|
-
end
|
51
|
-
|
52
|
-
it 'should support regular expression patterns' do
|
53
|
-
subject = described_class.new(reactor, pattern: /^visits\.(?:s|p)\w{3}\..*/)
|
54
|
-
subject.match?(event("visits.site.1")).should be(true)
|
55
|
-
subject.match?(event("visits.page.2")).should be(true)
|
56
|
-
subject.match?(event("visits.other.1")).should be(false)
|
57
|
-
subject.match?(event("visits.site")).should be(false)
|
58
|
-
subject.match?(event("visits.site.")).should be(true)
|
59
|
-
subject.match?(event("prefix.visits.site.1")).should be(false)
|
60
|
-
subject.match?(event("visits.site.1.suffix")).should be(true)
|
18
|
+
described_class.new(name: "visitors").name.should == "visitors"
|
61
19
|
end
|
62
20
|
|
63
|
-
it 'should
|
64
|
-
|
65
|
-
described_class.new(reactor, pattern: "visits.????.*").select(stream).map(&:tag).should == [
|
66
|
-
"visits.site.1", "visits.page.2", "visits.site.2"
|
67
|
-
]
|
21
|
+
it 'should not filter events by default' do
|
22
|
+
subject.filter([event, event]).should have(2).items
|
68
23
|
end
|
69
24
|
|
70
25
|
end
|
@@ -2,33 +2,30 @@ require 'spec_helper'
|
|
2
2
|
|
3
3
|
describe FluQ::Handler::Log do
|
4
4
|
|
5
|
-
let(:event)
|
6
|
-
|
7
|
-
|
8
|
-
let(:root) { FluQ.root.join("../scenario/log/raw") }
|
9
|
-
subject { described_class.new reactor }
|
10
|
-
before { FileUtils.rm_rf(root); FileUtils.mkdir_p(root) }
|
5
|
+
let(:event) { FluQ::Event.new({"a" => "1"}, 1313131313) }
|
6
|
+
let(:root) { FluQ.root.join("../scenario/log/raw") }
|
7
|
+
before { FileUtils.rm_rf(root); FileUtils.mkdir_p(root) }
|
11
8
|
|
12
9
|
it { should be_a(FluQ::Handler::Base) }
|
13
|
-
its("config.keys") { should =~ [:convert, :path, :
|
10
|
+
its("config.keys") { should =~ [:convert, :path, :cache_max, :cache_ttl, :timeout] }
|
14
11
|
|
15
12
|
it "can log events" do
|
16
13
|
subject.on_events [event]
|
17
14
|
subject.pool.each_key {|k| subject.pool[k].flush }
|
18
|
-
root.join("
|
15
|
+
root.join("20110812.log").read.should == %(1313131313\t{"a":"1"}\n)
|
19
16
|
end
|
20
17
|
|
21
18
|
it 'can have custom conversions' do
|
22
|
-
subject = described_class.new
|
19
|
+
subject = described_class.new convert: ->e { e.merge(ts: e.timestamp).map {|k,v| "#{k}=#{v}" }.join(',') }
|
23
20
|
subject.on_events [event]
|
24
21
|
subject.pool.each_key {|k| subject.pool[k].flush }
|
25
|
-
root.join("
|
22
|
+
root.join("20110812.log").read.should == "a=1,ts=1313131313\n"
|
26
23
|
end
|
27
24
|
|
28
|
-
it 'can rewrite
|
29
|
-
subject = described_class.new
|
25
|
+
it 'can rewrite events' do
|
26
|
+
subject = described_class.new rewrite: ->e { e["a"].to_i * 1000 }, path: "log/raw/%Y%m/%t.log"
|
30
27
|
subject.on_events [event]
|
31
|
-
root.join("
|
28
|
+
root.join("201108/1000.log").should be_file
|
32
29
|
end
|
33
30
|
|
34
31
|
it 'should not fail on temporary file errors' do
|
@@ -36,7 +33,7 @@ describe FluQ::Handler::Log do
|
|
36
33
|
subject.pool.each_key {|k| subject.pool[k].close }
|
37
34
|
subject.on_events [event]
|
38
35
|
subject.pool.each_key {|k| subject.pool[k].flush }
|
39
|
-
root.join("
|
36
|
+
root.join("20110812.log").read.should have(2).lines
|
40
37
|
end
|
41
38
|
|
42
39
|
describe described_class::FilePool do
|
@@ -2,28 +2,61 @@ require 'spec_helper'
|
|
2
2
|
|
3
3
|
describe FluQ::Input::Base do
|
4
4
|
|
5
|
-
|
6
|
-
let
|
7
|
-
|
5
|
+
subject { described_class.new "my-feed", [FluQ::Handler::Test], format: "msgpack" }
|
6
|
+
let(:subject2) { described_class.new "my-feed", [FluQ::Handler::Test], format: "json" }
|
7
|
+
let(:handler) { subject.worker.handlers.first }
|
8
|
+
let(:handler2) { subject2.worker.handlers.first }
|
9
|
+
let(:data) { [{"a" => 1}, {"b" => 2}].map {|h| MessagePack.pack(h) }.join }
|
8
10
|
|
9
11
|
it { should be_a(FluQ::Mixins::Loggable) }
|
10
|
-
its(:
|
11
|
-
|
12
|
+
its(:wrapped_object) { should be_instance_of(described_class) }
|
13
|
+
|
14
|
+
its(:worker) { should be_instance_of(FluQ::Worker) }
|
15
|
+
its(:config) { should == {format: "msgpack", format_options: {}} }
|
12
16
|
its(:name) { should == "base" }
|
13
|
-
its(:
|
14
|
-
its(:
|
17
|
+
its(:description) { should == "base" }
|
18
|
+
its(:format) { should be_instance_of(FluQ::Format::Msgpack) }
|
19
|
+
|
20
|
+
it 'should process' do
|
21
|
+
subject.process(data)
|
22
|
+
handler.should have(2).events
|
23
|
+
end
|
15
24
|
|
16
|
-
it 'should
|
17
|
-
|
18
|
-
|
19
|
-
|
25
|
+
it 'should maintain separate handler instances per input' do
|
26
|
+
-> {
|
27
|
+
subject.process data
|
28
|
+
}.should change { handler.events.size }.by(2)
|
29
|
+
|
30
|
+
-> {
|
31
|
+
subject2.process %({"a":1,"b":2}\n{"a":1,"b":2}\n{"a":1,"b":2}\n)
|
32
|
+
}.should_not change { handler.events.size }
|
33
|
+
handler2.should have(3).events
|
20
34
|
end
|
21
35
|
|
22
|
-
it 'should
|
23
|
-
|
24
|
-
|
25
|
-
subject.flush!(buf)
|
36
|
+
it 'should handle partial messages' do
|
37
|
+
m1, m2 = data + data[0..1], data[2..-1]
|
38
|
+
subject.process(m1)
|
26
39
|
handler.should have(2).events
|
40
|
+
subject.process(m2)
|
41
|
+
handler.should have(4).events
|
42
|
+
|
43
|
+
m1, m2 = data[0..-3], data[-3..-1] + data
|
44
|
+
subject.process(m1)
|
45
|
+
handler.should have(5).events
|
46
|
+
subject.process(m2)
|
47
|
+
handler.should have(8).events
|
48
|
+
|
49
|
+
m1, m2 = %({"a":1,"b":2}\n{"a":1,"b":2}\n{"a":1), %(,"b":2}\n{"a":1,"b":2}\n)
|
50
|
+
subject2.process(m1)
|
51
|
+
handler2.should have(2).events
|
52
|
+
subject2.process(m2)
|
53
|
+
handler2.should have(4).events
|
54
|
+
|
55
|
+
m1, m2 = %({"a":1,"b":2}\n{"a":1,), %("b":2}\n{"a":1,"b":2}\n{"a":1,"b":2}\n)
|
56
|
+
subject2.process(m1)
|
57
|
+
handler2.should have(5).events
|
58
|
+
subject2.process(m2)
|
59
|
+
handler2.should have(8).events
|
27
60
|
end
|
28
61
|
|
29
62
|
end
|
@@ -2,44 +2,52 @@ require 'spec_helper'
|
|
2
2
|
|
3
3
|
describe FluQ::Input::Socket do
|
4
4
|
|
5
|
-
let(:event) {
|
5
|
+
let(:event) { {a: 1, b: 2} }
|
6
|
+
let(:actors) { [] }
|
6
7
|
|
7
|
-
def input(
|
8
|
-
described_class.new
|
8
|
+
def input(opts = {})
|
9
|
+
actor = described_class.new "my-feed", [[FluQ::Handler::Test]], opts
|
10
|
+
actors << actor
|
11
|
+
actor
|
9
12
|
end
|
10
13
|
|
11
|
-
|
14
|
+
def wait_for(server)
|
15
|
+
30.times do
|
16
|
+
break if server.listening?
|
17
|
+
sleep(0.01)
|
18
|
+
end
|
19
|
+
end
|
20
|
+
|
21
|
+
subject { input bind: "tcp://127.0.0.1:26712", format: "msgpack" }
|
22
|
+
after { actors.each &:terminate }
|
23
|
+
|
12
24
|
it { should be_a(FluQ::Input::Base) }
|
13
|
-
its(:
|
14
|
-
its(:
|
25
|
+
its(:description) { should == "socket (tcp://127.0.0.1:26712)" }
|
26
|
+
its(:name) { should == "tcp" }
|
27
|
+
its(:config) { should == {format: "msgpack", format_options: {}, bind: "tcp://127.0.0.1:26712"} }
|
15
28
|
|
16
29
|
it 'should require bind option' do
|
17
|
-
|
30
|
+
-> { input }.should raise_error(ArgumentError, /No URL to bind/)
|
18
31
|
end
|
19
32
|
|
20
33
|
it 'should handle requests' do
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
client.write event.to_msgpack
|
29
|
-
client.close
|
30
|
-
end
|
34
|
+
wait_for(subject)
|
35
|
+
client = TCPSocket.open("127.0.0.1", 26712)
|
36
|
+
client.write MessagePack.pack(event)
|
37
|
+
client.close
|
38
|
+
subject.worker.should have(1).handlers
|
39
|
+
subject.worker.handlers.first.should have(1).events
|
31
40
|
end
|
32
41
|
|
33
42
|
it 'should support UDP' do
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
h.should have(1).events
|
43
|
+
udp = input bind: "udp://127.0.0.1:26713", format: "msgpack"
|
44
|
+
wait_for(udp)
|
45
|
+
|
46
|
+
client = UDPSocket.new
|
47
|
+
client.send MessagePack.pack(event), 0, "127.0.0.1", 26713
|
48
|
+
client.close
|
49
|
+
udp.worker.should have(1).handlers
|
50
|
+
udp.worker.handlers.first.should have(1).events
|
43
51
|
end
|
44
52
|
|
45
53
|
end
|
@@ -0,0 +1,18 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
|
3
|
+
describe FluQ::Runner do
|
4
|
+
|
5
|
+
after { subject.terminate }
|
6
|
+
|
7
|
+
its(:feeds) { should == [] }
|
8
|
+
its(:inspect) { should == "#<FluQ::Runner feeds: []>" }
|
9
|
+
|
10
|
+
it "should register feeds" do
|
11
|
+
subject.feed("my-feed")
|
12
|
+
subject.should have(1).feeds
|
13
|
+
|
14
|
+
subject.feed("other-feed")
|
15
|
+
subject.should have(2).feeds
|
16
|
+
end
|
17
|
+
|
18
|
+
end
|