logstash-core 2.2.4.snapshot1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of logstash-core might be problematic. Click here for more details.
- checksums.yaml +7 -0
- data/lib/logstash-core.rb +1 -0
- data/lib/logstash-core/logstash-core.rb +3 -0
- data/lib/logstash-core/version.rb +8 -0
- data/lib/logstash/agent.rb +391 -0
- data/lib/logstash/codecs/base.rb +50 -0
- data/lib/logstash/config/config_ast.rb +550 -0
- data/lib/logstash/config/cpu_core_strategy.rb +32 -0
- data/lib/logstash/config/defaults.rb +12 -0
- data/lib/logstash/config/file.rb +39 -0
- data/lib/logstash/config/grammar.rb +3503 -0
- data/lib/logstash/config/mixin.rb +518 -0
- data/lib/logstash/config/registry.rb +13 -0
- data/lib/logstash/environment.rb +98 -0
- data/lib/logstash/errors.rb +12 -0
- data/lib/logstash/filters/base.rb +205 -0
- data/lib/logstash/inputs/base.rb +116 -0
- data/lib/logstash/inputs/threadable.rb +18 -0
- data/lib/logstash/java_integration.rb +116 -0
- data/lib/logstash/json.rb +61 -0
- data/lib/logstash/logging.rb +91 -0
- data/lib/logstash/namespace.rb +13 -0
- data/lib/logstash/output_delegator.rb +172 -0
- data/lib/logstash/outputs/base.rb +91 -0
- data/lib/logstash/patches.rb +5 -0
- data/lib/logstash/patches/bugfix_jruby_2558.rb +51 -0
- data/lib/logstash/patches/cabin.rb +35 -0
- data/lib/logstash/patches/profile_require_calls.rb +47 -0
- data/lib/logstash/patches/rubygems.rb +38 -0
- data/lib/logstash/patches/stronger_openssl_defaults.rb +68 -0
- data/lib/logstash/pipeline.rb +499 -0
- data/lib/logstash/pipeline_reporter.rb +114 -0
- data/lib/logstash/plugin.rb +120 -0
- data/lib/logstash/program.rb +14 -0
- data/lib/logstash/runner.rb +124 -0
- data/lib/logstash/shutdown_watcher.rb +100 -0
- data/lib/logstash/util.rb +203 -0
- data/lib/logstash/util/buftok.rb +139 -0
- data/lib/logstash/util/charset.rb +35 -0
- data/lib/logstash/util/decorators.rb +52 -0
- data/lib/logstash/util/defaults_printer.rb +31 -0
- data/lib/logstash/util/filetools.rb +186 -0
- data/lib/logstash/util/java_version.rb +66 -0
- data/lib/logstash/util/password.rb +25 -0
- data/lib/logstash/util/plugin_version.rb +56 -0
- data/lib/logstash/util/prctl.rb +10 -0
- data/lib/logstash/util/retryable.rb +40 -0
- data/lib/logstash/util/socket_peer.rb +7 -0
- data/lib/logstash/util/unicode_trimmer.rb +81 -0
- data/lib/logstash/util/worker_threads_default_printer.rb +29 -0
- data/lib/logstash/util/wrapped_synchronous_queue.rb +41 -0
- data/lib/logstash/version.rb +14 -0
- data/locales/en.yml +204 -0
- data/logstash-core.gemspec +58 -0
- data/spec/conditionals_spec.rb +429 -0
- data/spec/logstash/agent_spec.rb +85 -0
- data/spec/logstash/config/config_ast_spec.rb +146 -0
- data/spec/logstash/config/cpu_core_strategy_spec.rb +123 -0
- data/spec/logstash/config/defaults_spec.rb +10 -0
- data/spec/logstash/config/mixin_spec.rb +158 -0
- data/spec/logstash/environment_spec.rb +56 -0
- data/spec/logstash/filters/base_spec.rb +251 -0
- data/spec/logstash/inputs/base_spec.rb +74 -0
- data/spec/logstash/java_integration_spec.rb +304 -0
- data/spec/logstash/json_spec.rb +96 -0
- data/spec/logstash/output_delegator_spec.rb +144 -0
- data/spec/logstash/outputs/base_spec.rb +40 -0
- data/spec/logstash/patches_spec.rb +90 -0
- data/spec/logstash/pipeline_reporter_spec.rb +85 -0
- data/spec/logstash/pipeline_spec.rb +455 -0
- data/spec/logstash/plugin_spec.rb +169 -0
- data/spec/logstash/runner_spec.rb +68 -0
- data/spec/logstash/shutdown_watcher_spec.rb +113 -0
- data/spec/logstash/util/buftok_spec.rb +31 -0
- data/spec/logstash/util/charset_spec.rb +74 -0
- data/spec/logstash/util/defaults_printer_spec.rb +50 -0
- data/spec/logstash/util/java_version_spec.rb +79 -0
- data/spec/logstash/util/plugin_version_spec.rb +64 -0
- data/spec/logstash/util/unicode_trimmer_spec.rb +55 -0
- data/spec/logstash/util/worker_threads_default_printer_spec.rb +45 -0
- data/spec/logstash/util/wrapped_synchronous_queue_spec.rb +28 -0
- data/spec/logstash/util_spec.rb +35 -0
- metadata +364 -0
@@ -0,0 +1,96 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "spec_helper"
|
3
|
+
require "logstash/json"
|
4
|
+
require "logstash/environment"
|
5
|
+
require "logstash/util"
|
6
|
+
|
7
|
+
describe "LogStash::Json" do
|
8
|
+
|
9
|
+
let(:hash) {{"a" => 1}}
|
10
|
+
let(:json_hash) {"{\"a\":1}"}
|
11
|
+
|
12
|
+
let(:string) {"foobar"}
|
13
|
+
let(:json_string) {"\"foobar\""}
|
14
|
+
|
15
|
+
let(:array) {["foo", "bar"]}
|
16
|
+
let(:json_array) {"[\"foo\",\"bar\"]"}
|
17
|
+
|
18
|
+
let(:multi) {
|
19
|
+
[
|
20
|
+
{:ruby => "foo bar baz", :json => "\"foo bar baz\""},
|
21
|
+
{:ruby => "1", :json => "\"1\""},
|
22
|
+
{:ruby => {"a" => true}, :json => "{\"a\":true}"},
|
23
|
+
{:ruby => {"a" => nil}, :json => "{\"a\":null}"},
|
24
|
+
{:ruby => ["a", "b"], :json => "[\"a\",\"b\"]"},
|
25
|
+
{:ruby => [1, 2], :json => "[1,2]"},
|
26
|
+
{:ruby => [1, nil], :json => "[1,null]"},
|
27
|
+
{:ruby => {"a" => [1, 2]}, :json => "{\"a\":[1,2]}"},
|
28
|
+
{:ruby => {"a" => {"b" => 2}}, :json => "{\"a\":{\"b\":2}}"},
|
29
|
+
# {:ruby => , :json => },
|
30
|
+
]
|
31
|
+
}
|
32
|
+
|
33
|
+
if LogStash::Environment.jruby?
|
34
|
+
|
35
|
+
### JRuby specific
|
36
|
+
# Former expectation in this code were removed because of https://github.com/rspec/rspec-mocks/issues/964
|
37
|
+
# as soon as is fix we can re introduce them if decired, however for now the completeness of the test
|
38
|
+
# is also not affected as the conversion would not work if the expectation where not meet.
|
39
|
+
###
|
40
|
+
context "jruby deserialize" do
|
41
|
+
it "should respond to load and deserialize object" do
|
42
|
+
expect(LogStash::Json.load(json_hash)).to eql(hash)
|
43
|
+
end
|
44
|
+
end
|
45
|
+
|
46
|
+
context "jruby serialize" do
|
47
|
+
it "should respond to dump and serialize object" do
|
48
|
+
expect(LogStash::Json.dump(string)).to eql(json_string)
|
49
|
+
end
|
50
|
+
|
51
|
+
it "should call JrJackson::Raw.generate for Hash" do
|
52
|
+
expect(LogStash::Json.dump(hash)).to eql(json_hash)
|
53
|
+
end
|
54
|
+
|
55
|
+
it "should call JrJackson::Raw.generate for Array" do
|
56
|
+
expect(LogStash::Json.dump(array)).to eql(json_array)
|
57
|
+
end
|
58
|
+
|
59
|
+
end
|
60
|
+
|
61
|
+
else
|
62
|
+
|
63
|
+
### MRI specific
|
64
|
+
|
65
|
+
it "should respond to load and deserialize object on mri" do
|
66
|
+
expect(Oj).to receive(:load).with(json).and_call_original
|
67
|
+
expect(LogStash::Json.load(json)).to eql(hash)
|
68
|
+
end
|
69
|
+
|
70
|
+
it "should respond to dump and serialize object on mri" do
|
71
|
+
expect(Oj).to receive(:dump).with(hash, anything).and_call_original
|
72
|
+
expect(LogStash::Json.dump(hash)).to eql(json)
|
73
|
+
end
|
74
|
+
end
|
75
|
+
|
76
|
+
### non specific
|
77
|
+
|
78
|
+
it "should correctly deserialize" do
|
79
|
+
multi.each do |test|
|
80
|
+
# because JrJackson in :raw mode uses Java::JavaUtil::LinkedHashMap and
|
81
|
+
# Java::JavaUtil::ArrayList, we must cast to compare.
|
82
|
+
# other than that, they quack like their Ruby equivalent
|
83
|
+
expect(LogStash::Util.normalize(LogStash::Json.load(test[:json]))).to eql(test[:ruby])
|
84
|
+
end
|
85
|
+
end
|
86
|
+
|
87
|
+
it "should correctly serialize" do
|
88
|
+
multi.each do |test|
|
89
|
+
expect(LogStash::Json.dump(test[:ruby])).to eql(test[:json])
|
90
|
+
end
|
91
|
+
end
|
92
|
+
|
93
|
+
it "should raise Json::ParserError on invalid json" do
|
94
|
+
expect{LogStash::Json.load("abc")}.to raise_error LogStash::Json::ParserError
|
95
|
+
end
|
96
|
+
end
|
@@ -0,0 +1,144 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require 'spec_helper'
|
3
|
+
|
4
|
+
describe LogStash::OutputDelegator do
|
5
|
+
let(:logger) { double("logger") }
|
6
|
+
let(:events) { 7.times.map { LogStash::Event.new }}
|
7
|
+
let(:default_worker_count) { 1 }
|
8
|
+
|
9
|
+
subject { described_class.new(logger, out_klass, default_worker_count) }
|
10
|
+
|
11
|
+
context "with a plain output plugin" do
|
12
|
+
let(:out_klass) { double("output klass") }
|
13
|
+
let(:out_inst) { double("output instance") }
|
14
|
+
|
15
|
+
before do
|
16
|
+
allow(out_klass).to receive(:new).with(any_args).and_return(out_inst)
|
17
|
+
allow(out_klass).to receive(:threadsafe?).and_return(false)
|
18
|
+
allow(out_klass).to receive(:workers_not_supported?).and_return(false)
|
19
|
+
allow(out_inst).to receive(:register)
|
20
|
+
allow(out_inst).to receive(:multi_receive)
|
21
|
+
allow(logger).to receive(:debug).with(any_args)
|
22
|
+
end
|
23
|
+
|
24
|
+
it "should initialize cleanly" do
|
25
|
+
expect { subject }.not_to raise_error
|
26
|
+
end
|
27
|
+
|
28
|
+
context "after having received a batch of events" do
|
29
|
+
before do
|
30
|
+
subject.register
|
31
|
+
subject.multi_receive(events)
|
32
|
+
end
|
33
|
+
|
34
|
+
it "should pass the events through" do
|
35
|
+
expect(out_inst).to have_received(:multi_receive).with(events)
|
36
|
+
end
|
37
|
+
|
38
|
+
it "should increment the number of events received" do
|
39
|
+
expect(subject.events_received).to eql(events.length)
|
40
|
+
end
|
41
|
+
end
|
42
|
+
|
43
|
+
|
44
|
+
describe "closing" do
|
45
|
+
before do
|
46
|
+
subject.register
|
47
|
+
end
|
48
|
+
|
49
|
+
it "should register all workers on register" do
|
50
|
+
expect(out_inst).to have_received(:register)
|
51
|
+
end
|
52
|
+
|
53
|
+
it "should close all workers when closing" do
|
54
|
+
expect(out_inst).to receive(:do_close)
|
55
|
+
subject.do_close
|
56
|
+
end
|
57
|
+
end
|
58
|
+
|
59
|
+
describe "concurrency and worker support" do
|
60
|
+
before do
|
61
|
+
allow(out_inst).to receive(:id).and_return("a-simple-plugin")
|
62
|
+
allow(out_inst).to receive(:metric=).with(any_args)
|
63
|
+
allow(out_klass).to receive(:workers_not_supported?).and_return(false)
|
64
|
+
end
|
65
|
+
|
66
|
+
describe "non-threadsafe outputs that allow workers" do
|
67
|
+
let(:default_worker_count) { 3 }
|
68
|
+
|
69
|
+
before do
|
70
|
+
allow(out_klass).to receive(:threadsafe?).and_return(false)
|
71
|
+
subject.register
|
72
|
+
end
|
73
|
+
|
74
|
+
it "should instantiate multiple workers" do
|
75
|
+
expect(subject.workers.length).to eql(default_worker_count)
|
76
|
+
end
|
77
|
+
|
78
|
+
it "should send received events to the worker" do
|
79
|
+
expect(out_inst).to receive(:multi_receive).with(events)
|
80
|
+
subject.multi_receive(events)
|
81
|
+
end
|
82
|
+
end
|
83
|
+
|
84
|
+
describe "threadsafe outputs" do
|
85
|
+
before do
|
86
|
+
allow(out_klass).to receive(:threadsafe?).and_return(true)
|
87
|
+
subject.register
|
88
|
+
end
|
89
|
+
|
90
|
+
it "should return true when threadsafe? is invoked" do
|
91
|
+
expect(subject.threadsafe?).to eql(true)
|
92
|
+
end
|
93
|
+
|
94
|
+
it "should define a threadsafe_worker" do
|
95
|
+
expect(subject.send(:threadsafe_worker)).to eql(out_inst)
|
96
|
+
end
|
97
|
+
|
98
|
+
it "should utilize threadsafe_multi_receive" do
|
99
|
+
expect(subject.send(:threadsafe_worker)).to receive(:multi_receive).with(events)
|
100
|
+
subject.multi_receive(events)
|
101
|
+
end
|
102
|
+
|
103
|
+
it "should not utilize the worker queue" do
|
104
|
+
expect(subject.send(:worker_queue)).to be_nil
|
105
|
+
end
|
106
|
+
|
107
|
+
it "should send received events to the worker" do
|
108
|
+
expect(out_inst).to receive(:multi_receive).with(events)
|
109
|
+
subject.multi_receive(events)
|
110
|
+
end
|
111
|
+
|
112
|
+
it "should close all workers when closing" do
|
113
|
+
expect(out_inst).to receive(:do_close)
|
114
|
+
subject.do_close
|
115
|
+
end
|
116
|
+
end
|
117
|
+
end
|
118
|
+
end
|
119
|
+
|
120
|
+
# This may seem suspiciously similar to the class in outputs/base_spec
|
121
|
+
# but, in fact, we need a whole new class because using this even once
|
122
|
+
# will immutably modify the base class
|
123
|
+
class LogStash::Outputs::NOOPDelLegacyNoWorkers < ::LogStash::Outputs::Base
|
124
|
+
LEGACY_WORKERS_NOT_SUPPORTED_REASON = "legacy reason"
|
125
|
+
|
126
|
+
def register
|
127
|
+
workers_not_supported(LEGACY_WORKERS_NOT_SUPPORTED_REASON)
|
128
|
+
end
|
129
|
+
end
|
130
|
+
|
131
|
+
describe "legacy output workers_not_supported" do
|
132
|
+
let(:default_worker_count) { 2 }
|
133
|
+
let(:out_klass) { LogStash::Outputs::NOOPDelLegacyNoWorkers }
|
134
|
+
|
135
|
+
before(:each) do
|
136
|
+
allow(logger).to receive(:debug).with(any_args)
|
137
|
+
end
|
138
|
+
|
139
|
+
it "should only setup one worker" do
|
140
|
+
subject.register
|
141
|
+
expect(subject.worker_count).to eql(1)
|
142
|
+
end
|
143
|
+
end
|
144
|
+
end
|
@@ -0,0 +1,40 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "spec_helper"
|
3
|
+
|
4
|
+
# use a dummy NOOP output to test Outputs::Base
|
5
|
+
class LogStash::Outputs::NOOP < LogStash::Outputs::Base
|
6
|
+
config_name "noop"
|
7
|
+
milestone 2
|
8
|
+
|
9
|
+
config :dummy_option, :validate => :string
|
10
|
+
|
11
|
+
def register; end
|
12
|
+
|
13
|
+
def receive(event)
|
14
|
+
return output?(event)
|
15
|
+
end
|
16
|
+
end
|
17
|
+
|
18
|
+
class LogStash::Outputs::NOOPLegacyNoWorkers < ::LogStash::Outputs::Base
|
19
|
+
LEGACY_WORKERS_NOT_SUPPORTED_REASON = "legacy reason"
|
20
|
+
|
21
|
+
def register
|
22
|
+
workers_not_supported(LEGACY_WORKERS_NOT_SUPPORTED_REASON)
|
23
|
+
end
|
24
|
+
end
|
25
|
+
|
26
|
+
describe "LogStash::Outputs::Base#new" do
|
27
|
+
it "should instantiate cleanly" do
|
28
|
+
params = { "dummy_option" => "potatoes", "codec" => "json", "workers" => 2 }
|
29
|
+
worker_params = params.dup; worker_params["workers"] = 1
|
30
|
+
|
31
|
+
expect do
|
32
|
+
LogStash::Outputs::NOOP.new(params.dup)
|
33
|
+
end.not_to raise_error
|
34
|
+
end
|
35
|
+
|
36
|
+
it "should move workers_not_supported declarations up to the class level" do
|
37
|
+
LogStash::Outputs::NOOPLegacyNoWorkers.new.register
|
38
|
+
expect(LogStash::Outputs::NOOPLegacyNoWorkers.workers_not_supported?).to eql(true)
|
39
|
+
end
|
40
|
+
end
|
@@ -0,0 +1,90 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "socket"
|
3
|
+
require "logstash/patches"
|
4
|
+
require "flores/pki"
|
5
|
+
|
6
|
+
describe "OpenSSL defaults" do
|
7
|
+
subject { OpenSSL::SSL::SSLContext.new }
|
8
|
+
|
9
|
+
# OpenSSL::SSL::SSLContext#ciphers returns an array of
|
10
|
+
# [ [ ciphername, version, bits, alg_bits ], [ ... ], ... ]
|
11
|
+
|
12
|
+
# List of cipher names
|
13
|
+
let(:ciphers) { subject.ciphers.map(&:first) }
|
14
|
+
|
15
|
+
# List of cipher encryption bit strength.
|
16
|
+
let(:encryption_bits) { subject.ciphers.map { |_, _, _, a| a } }
|
17
|
+
|
18
|
+
it "should not include any export ciphers" do
|
19
|
+
# SSLContext#ciphers returns an array of [ciphername, tlsversion, key_bits, alg_bits]
|
20
|
+
# Let's just check the cipher names
|
21
|
+
expect(ciphers).not_to be_any { |name| name =~ /EXPORT/ || name =~ /^EXP/ }
|
22
|
+
end
|
23
|
+
|
24
|
+
it "should not include any weak ciphers (w/ less than 128 bits in encryption algorithm)" do
|
25
|
+
# SSLContext#ciphers returns an array of [ciphername, tlsversion, key_bits, alg_bits]
|
26
|
+
expect(encryption_bits).not_to be_any { |bits| bits < 128 }
|
27
|
+
end
|
28
|
+
|
29
|
+
it "should not include a default `verify_mode`" do
|
30
|
+
expect(OpenSSL::SSL::SSLContext::DEFAULT_PARAMS[:verify_mode]).to eq(nil)
|
31
|
+
end
|
32
|
+
|
33
|
+
context "SSLSocket" do
|
34
|
+
# Code taken from the flores library by @jordansissels,
|
35
|
+
# https://github.com/jordansissel/ruby-flores/blob/master/spec/flores/pki_integration_spec.rb
|
36
|
+
# since these helpers were created to fix this particular issue
|
37
|
+
let(:csr) { Flores::PKI::CertificateSigningRequest.new }
|
38
|
+
# Here, I use a 1024-bit key for faster tests.
|
39
|
+
# Please do not use such small keys in production.
|
40
|
+
let(:key_bits) { 1024 }
|
41
|
+
let(:key) { OpenSSL::PKey::RSA.generate(key_bits, 65537) }
|
42
|
+
let(:certificate_duration) { Flores::Random.number(1..86400) }
|
43
|
+
|
44
|
+
context "with self-signed client/server certificate" do
|
45
|
+
let(:certificate_subject) { "CN=server.example.com" }
|
46
|
+
let(:certificate) { csr.create }
|
47
|
+
|
48
|
+
# Returns [socket, address, port]
|
49
|
+
let(:listener) { Flores::Random.tcp_listener }
|
50
|
+
let(:server) { listener[0] }
|
51
|
+
let(:server_address) { listener[1] }
|
52
|
+
let(:server_port) { listener[2] }
|
53
|
+
|
54
|
+
let(:server_context) { OpenSSL::SSL::SSLContext.new }
|
55
|
+
let(:client_context) { OpenSSL::SSL::SSLContext.new }
|
56
|
+
|
57
|
+
before do
|
58
|
+
csr.subject = certificate_subject
|
59
|
+
csr.public_key = key.public_key
|
60
|
+
csr.start_time = Time.now
|
61
|
+
csr.expire_time = csr.start_time + certificate_duration
|
62
|
+
csr.signing_key = key
|
63
|
+
csr.want_signature_ability = true
|
64
|
+
|
65
|
+
server_context.cert = certificate
|
66
|
+
server_context.key = key
|
67
|
+
|
68
|
+
client_store = OpenSSL::X509::Store.new
|
69
|
+
client_store.add_cert(certificate)
|
70
|
+
client_context.cert_store = client_store
|
71
|
+
client_context.verify_mode = OpenSSL::SSL::VERIFY_PEER
|
72
|
+
|
73
|
+
ssl_server = OpenSSL::SSL::SSLServer.new(server, server_context)
|
74
|
+
Thread.new do
|
75
|
+
begin
|
76
|
+
ssl_server.accept
|
77
|
+
rescue => e
|
78
|
+
puts "Server accept failed: #{e}"
|
79
|
+
end
|
80
|
+
end
|
81
|
+
end
|
82
|
+
|
83
|
+
it "should successfully connect as a client" do
|
84
|
+
socket = TCPSocket.new(server_address, server_port)
|
85
|
+
ssl_client = OpenSSL::SSL::SSLSocket.new(socket, client_context)
|
86
|
+
expect { ssl_client.connect }.not_to raise_error
|
87
|
+
end
|
88
|
+
end
|
89
|
+
end
|
90
|
+
end
|
@@ -0,0 +1,85 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "spec_helper"
|
3
|
+
require "logstash/pipeline"
|
4
|
+
require "logstash/pipeline_reporter"
|
5
|
+
|
6
|
+
class DummyOutput < LogStash::Outputs::Base
|
7
|
+
config_name "dummyoutput"
|
8
|
+
milestone 2
|
9
|
+
|
10
|
+
attr_reader :num_closes, :events
|
11
|
+
|
12
|
+
def initialize(params={})
|
13
|
+
super
|
14
|
+
@num_closes = 0
|
15
|
+
@events = []
|
16
|
+
end
|
17
|
+
|
18
|
+
def register
|
19
|
+
end
|
20
|
+
|
21
|
+
def receive(event)
|
22
|
+
@events << event
|
23
|
+
end
|
24
|
+
|
25
|
+
def close
|
26
|
+
@num_closes += 1
|
27
|
+
end
|
28
|
+
end
|
29
|
+
|
30
|
+
#TODO: Figure out how to add more tests that actually cover inflight events
|
31
|
+
#This will require some janky multithreading stuff
|
32
|
+
describe LogStash::PipelineReporter do
|
33
|
+
let(:generator_count) { 5 }
|
34
|
+
let(:config) do
|
35
|
+
"input { generator { count => #{generator_count} } } output { dummyoutput {} } "
|
36
|
+
end
|
37
|
+
let(:pipeline) { LogStash::Pipeline.new(config)}
|
38
|
+
let(:reporter) { pipeline.reporter }
|
39
|
+
|
40
|
+
before do
|
41
|
+
allow(LogStash::Plugin).to receive(:lookup).with("output", "dummyoutput").and_return(DummyOutput)
|
42
|
+
allow(LogStash::Plugin).to receive(:lookup).with("input", "generator").and_call_original
|
43
|
+
allow(LogStash::Plugin).to receive(:lookup).with("codec", "plain").and_call_original
|
44
|
+
|
45
|
+
@pre_snapshot = reporter.snapshot
|
46
|
+
pipeline.run
|
47
|
+
@post_snapshot = reporter.snapshot
|
48
|
+
end
|
49
|
+
|
50
|
+
describe "events filtered" do
|
51
|
+
it "should start at zero" do
|
52
|
+
expect(@pre_snapshot.events_filtered).to eql(0)
|
53
|
+
end
|
54
|
+
|
55
|
+
it "should end at the number of generated events" do
|
56
|
+
expect(@post_snapshot.events_filtered).to eql(generator_count)
|
57
|
+
end
|
58
|
+
end
|
59
|
+
|
60
|
+
describe "events consumed" do
|
61
|
+
it "should start at zero" do
|
62
|
+
expect(@pre_snapshot.events_consumed).to eql(0)
|
63
|
+
end
|
64
|
+
|
65
|
+
it "should end at the number of generated events" do
|
66
|
+
expect(@post_snapshot.events_consumed).to eql(generator_count)
|
67
|
+
end
|
68
|
+
end
|
69
|
+
|
70
|
+
describe "inflight count" do
|
71
|
+
it "should be zero before running" do
|
72
|
+
expect(@pre_snapshot.inflight_count).to eql(0)
|
73
|
+
end
|
74
|
+
|
75
|
+
it "should be zero after running" do
|
76
|
+
expect(@post_snapshot.inflight_count).to eql(0)
|
77
|
+
end
|
78
|
+
end
|
79
|
+
|
80
|
+
describe "output states" do
|
81
|
+
it "should include the count of received events" do
|
82
|
+
expect(@post_snapshot.output_info.first[:events_received]).to eql(generator_count)
|
83
|
+
end
|
84
|
+
end
|
85
|
+
end
|