logstash-core 5.0.0.alpha3.snapshot2-java → 5.0.0.alpha3.snapshot4-java
Sign up to get free protection for your applications and to get access to all the features.
Potentially problematic release.
This version of logstash-core might be problematic. Click here for more details.
- checksums.yaml +4 -4
- data/lib/logstash-core/version.rb +1 -1
- data/lib/logstash/agent.rb +49 -31
- data/lib/logstash/api/init.ru +3 -3
- data/lib/logstash/api/lib/app/service.rb +1 -1
- data/lib/logstash/config/config_ast.rb +23 -18
- data/lib/logstash/config/loader.rb +4 -4
- data/lib/logstash/config/mixin.rb +10 -21
- data/lib/logstash/environment.rb +30 -0
- data/lib/logstash/filters/base.rb +2 -2
- data/lib/logstash/inputs/base.rb +2 -2
- data/lib/logstash/instrument/collector.rb +1 -1
- data/lib/logstash/logging/json.rb +21 -0
- data/lib/logstash/output_delegator.rb +2 -2
- data/lib/logstash/patches/clamp.rb +69 -0
- data/lib/logstash/pipeline.rb +37 -62
- data/lib/logstash/plugin.rb +1 -1
- data/lib/logstash/runner.rb +155 -146
- data/lib/logstash/settings.rb +267 -0
- data/lib/logstash/util/decorators.rb +6 -6
- data/lib/logstash/util/java_version.rb +1 -10
- data/lib/logstash/util/worker_threads_default_printer.rb +2 -2
- data/lib/logstash/version.rb +1 -1
- data/locales/en.yml +17 -20
- data/logstash-core.gemspec +1 -1
- data/spec/api/spec_helper.rb +15 -16
- data/spec/conditionals_spec.rb +113 -113
- data/spec/logstash/agent_spec.rb +77 -68
- data/spec/logstash/config/config_ast_spec.rb +4 -2
- data/spec/logstash/config/mixin_spec.rb +33 -7
- data/spec/logstash/filters/base_spec.rb +16 -16
- data/spec/logstash/inputs/base_spec.rb +8 -8
- data/spec/logstash/output_delegator_spec.rb +2 -0
- data/spec/logstash/pipeline_spec.rb +60 -26
- data/spec/logstash/plugin_spec.rb +2 -2
- data/spec/logstash/runner_spec.rb +112 -25
- data/spec/logstash/setting_spec.rb +130 -0
- data/spec/logstash/settings_spec.rb +62 -0
- metadata +11 -9
- data/lib/logstash/util/defaults_printer.rb +0 -31
- data/spec/logstash/util/defaults_printer_spec.rb +0 -50
- data/spec/logstash/util/worker_threads_default_printer_spec.rb +0 -45
@@ -169,10 +169,12 @@ describe LogStashConfigParser do
|
|
169
169
|
end
|
170
170
|
|
171
171
|
describe "generated conditional functionals" do
|
172
|
-
it "should be
|
172
|
+
it "should be created per instance" do
|
173
173
|
instance_1 = pipeline_klass.new(config_string)
|
174
174
|
instance_2 = pipeline_klass.new(config_string)
|
175
|
-
|
175
|
+
generated_method_1 = instance_1.instance_variable_get("@generated_objects")[:cond_func_1]
|
176
|
+
generated_method_2 = instance_2.instance_variable_get("@generated_objects")[:cond_func_1]
|
177
|
+
expect(generated_method_1).to_not be(generated_method_2)
|
176
178
|
end
|
177
179
|
end
|
178
180
|
end
|
@@ -160,14 +160,14 @@ describe LogStash::Config::Mixin do
|
|
160
160
|
let(:plugin_class) do
|
161
161
|
Class.new(LogStash::Filters::Base) do
|
162
162
|
config_name "one_plugin"
|
163
|
-
config :oneString, :validate => :string
|
164
|
-
config :oneBoolean, :validate => :boolean
|
165
|
-
config :oneNumber, :validate => :number
|
166
|
-
config :oneArray, :validate => :array
|
167
|
-
config :oneHash, :validate => :hash
|
163
|
+
config :oneString, :validate => :string, :required => false
|
164
|
+
config :oneBoolean, :validate => :boolean, :required => false
|
165
|
+
config :oneNumber, :validate => :number, :required => false
|
166
|
+
config :oneArray, :validate => :array, :required => false
|
167
|
+
config :oneHash, :validate => :hash, :required => false
|
168
168
|
|
169
169
|
def initialize(params)
|
170
|
-
super(params
|
170
|
+
super(params)
|
171
171
|
end
|
172
172
|
end
|
173
173
|
end
|
@@ -231,8 +231,34 @@ describe LogStash::Config::Mixin do
|
|
231
231
|
expect(subject.oneArray).to(be == [ "first array value", "fancy" ])
|
232
232
|
expect(subject.oneHash).to(be == { "key1" => "fancy", "key2" => "fancy is true", "key3" => "true or false" })
|
233
233
|
end
|
234
|
+
end
|
235
|
+
|
236
|
+
context "should support $ in values" do
|
237
|
+
before do
|
238
|
+
ENV["bar"] = "foo"
|
239
|
+
ENV["f$$"] = "bar"
|
240
|
+
end
|
241
|
+
|
242
|
+
after do
|
243
|
+
ENV.delete("bar")
|
244
|
+
ENV.delete("f$$")
|
245
|
+
end
|
246
|
+
|
247
|
+
subject do
|
248
|
+
plugin_class.new(
|
249
|
+
"oneString" => "${f$$:val}",
|
250
|
+
"oneArray" => ["foo$bar", "${bar:my$val}"]
|
251
|
+
# "dollar_in_env" => "${f$$:final}"
|
252
|
+
)
|
253
|
+
end
|
234
254
|
|
255
|
+
it "should support $ in values" do
|
256
|
+
expect(subject.oneArray).to(be == ["foo$bar", "foo"])
|
257
|
+
end
|
258
|
+
|
259
|
+
it "should not support $ in environment variable name" do
|
260
|
+
expect(subject.oneString).to(be == "${f$$:val}")
|
261
|
+
end
|
235
262
|
end
|
236
263
|
end
|
237
|
-
|
238
264
|
end
|
@@ -62,7 +62,7 @@ describe LogStash::Filters::NOOP do
|
|
62
62
|
CONFIG
|
63
63
|
|
64
64
|
sample "example" do
|
65
|
-
insist { subject
|
65
|
+
insist { subject.get("new_field") } == ["new_value", "new_value_2"]
|
66
66
|
end
|
67
67
|
end
|
68
68
|
|
@@ -76,7 +76,7 @@ describe LogStash::Filters::NOOP do
|
|
76
76
|
CONFIG
|
77
77
|
|
78
78
|
sample("type" => "noop") do
|
79
|
-
insist { subject
|
79
|
+
insist { subject.get("tags") } == ["test"]
|
80
80
|
end
|
81
81
|
end
|
82
82
|
|
@@ -90,11 +90,11 @@ describe LogStash::Filters::NOOP do
|
|
90
90
|
CONFIG
|
91
91
|
|
92
92
|
sample("type" => "noop") do
|
93
|
-
insist { subject
|
93
|
+
insist { subject.get("tags") } == ["test"]
|
94
94
|
end
|
95
95
|
|
96
96
|
sample("type" => "noop", "tags" => ["t1", "t2"]) do
|
97
|
-
insist { subject
|
97
|
+
insist { subject.get("tags") } == ["t1", "t2", "test"]
|
98
98
|
end
|
99
99
|
end
|
100
100
|
|
@@ -108,19 +108,19 @@ describe LogStash::Filters::NOOP do
|
|
108
108
|
CONFIG
|
109
109
|
|
110
110
|
sample("type" => "noop") do
|
111
|
-
insist { subject
|
111
|
+
insist { subject.get("tags") } == ["test"]
|
112
112
|
end
|
113
113
|
|
114
114
|
sample("type" => "noop", "tags" => ["t1"]) do
|
115
|
-
insist { subject
|
115
|
+
insist { subject.get("tags") } == ["t1", "test"]
|
116
116
|
end
|
117
117
|
|
118
118
|
sample("type" => "noop", "tags" => ["t1", "t2"]) do
|
119
|
-
insist { subject
|
119
|
+
insist { subject.get("tags") } == ["t1", "t2", "test"]
|
120
120
|
end
|
121
121
|
|
122
122
|
sample("type" => "noop", "tags" => ["t1", "t2", "t3"]) do
|
123
|
-
insist { subject
|
123
|
+
insist { subject.get("tags") } == ["t1", "t2", "t3", "test"]
|
124
124
|
end
|
125
125
|
end
|
126
126
|
|
@@ -134,27 +134,27 @@ describe LogStash::Filters::NOOP do
|
|
134
134
|
CONFIG
|
135
135
|
|
136
136
|
sample("type" => "noop", "tags" => ["t4"]) do
|
137
|
-
insist { subject
|
137
|
+
insist { subject.get("tags") } == ["t4"]
|
138
138
|
end
|
139
139
|
|
140
140
|
sample("type" => "noop", "tags" => ["t1", "t2", "t3"]) do
|
141
|
-
insist { subject
|
141
|
+
insist { subject.get("tags") } == ["t1"]
|
142
142
|
end
|
143
143
|
|
144
144
|
# also test from Json deserialized data to test the handling of native Java collections by JrJackson
|
145
145
|
# see https://github.com/elastic/logstash/issues/2261
|
146
146
|
sample(LogStash::Json.load("{\"type\":\"noop\", \"tags\":[\"t1\", \"t2\", \"t3\"]}")) do
|
147
|
-
insist { subject
|
147
|
+
insist { subject.get("tags") } == ["t1"]
|
148
148
|
end
|
149
149
|
|
150
150
|
sample("type" => "noop", "tags" => ["t1", "t2"]) do
|
151
|
-
insist { subject
|
151
|
+
insist { subject.get("tags") } == ["t1"]
|
152
152
|
end
|
153
153
|
|
154
154
|
# also test from Json deserialized data to test the handling of native Java collections by JrJackson
|
155
155
|
# see https://github.com/elastic/logstash/issues/2261
|
156
156
|
sample(LogStash::Json.load("{\"type\":\"noop\", \"tags\":[\"t1\", \"t2\"]}")) do
|
157
|
-
insist { subject
|
157
|
+
insist { subject.get("tags") } == ["t1"]
|
158
158
|
end
|
159
159
|
end
|
160
160
|
|
@@ -168,13 +168,13 @@ describe LogStash::Filters::NOOP do
|
|
168
168
|
CONFIG
|
169
169
|
|
170
170
|
sample("type" => "noop", "tags" => ["t1", "goaway", "t3"], "blackhole" => "goaway") do
|
171
|
-
insist { subject
|
171
|
+
insist { subject.get("tags") } == ["t1", "t3"]
|
172
172
|
end
|
173
173
|
|
174
174
|
# also test from Json deserialized data to test the handling of native Java collections by JrJackson
|
175
175
|
# see https://github.com/elastic/logstash/issues/2261
|
176
176
|
sample(LogStash::Json.load("{\"type\":\"noop\", \"tags\":[\"t1\", \"goaway\", \"t3\"], \"blackhole\":\"goaway\"}")) do
|
177
|
-
insist { subject
|
177
|
+
insist { subject.get("tags") } == ["t1", "t3"]
|
178
178
|
end
|
179
179
|
end
|
180
180
|
|
@@ -230,7 +230,7 @@ describe LogStash::Filters::NOOP do
|
|
230
230
|
|
231
231
|
sample("type" => "noop", "t1" => ["t2", "t3"]) do
|
232
232
|
insist { subject }.include?("t1")
|
233
|
-
insist { subject
|
233
|
+
insist { subject.get("[t1][0]") } == "t3"
|
234
234
|
end
|
235
235
|
end
|
236
236
|
|
@@ -15,50 +15,50 @@ describe "LogStash::Inputs::Base#decorate" do
|
|
15
15
|
input = LogStash::Inputs::NOOP.new("tags" => "value")
|
16
16
|
evt = LogStash::Event.new({"type" => "noop"})
|
17
17
|
input.instance_eval {decorate(evt)}
|
18
|
-
expect(evt
|
18
|
+
expect(evt.get("tags")).to eq(["value"])
|
19
19
|
end
|
20
20
|
|
21
21
|
it "should add multiple tag" do
|
22
22
|
input = LogStash::Inputs::NOOP.new("tags" => ["value1","value2"])
|
23
23
|
evt = LogStash::Event.new({"type" => "noop"})
|
24
24
|
input.instance_eval {decorate(evt)}
|
25
|
-
expect(evt
|
25
|
+
expect(evt.get("tags")).to eq(["value1","value2"])
|
26
26
|
end
|
27
27
|
|
28
28
|
it "should allow duplicates tag" do
|
29
29
|
input = LogStash::Inputs::NOOP.new("tags" => ["value","value"])
|
30
30
|
evt = LogStash::Event.new({"type" => "noop"})
|
31
31
|
input.instance_eval {decorate(evt)}
|
32
|
-
expect(evt
|
32
|
+
expect(evt.get("tags")).to eq(["value","value"])
|
33
33
|
end
|
34
34
|
|
35
35
|
it "should add tag with sprintf" do
|
36
36
|
input = LogStash::Inputs::NOOP.new("tags" => "%{type}")
|
37
37
|
evt = LogStash::Event.new({"type" => "noop"})
|
38
38
|
input.instance_eval {decorate(evt)}
|
39
|
-
expect(evt
|
39
|
+
expect(evt.get("tags")).to eq(["noop"])
|
40
40
|
end
|
41
41
|
|
42
42
|
it "should add single field" do
|
43
43
|
input = LogStash::Inputs::NOOP.new("add_field" => {"field" => "value"})
|
44
44
|
evt = LogStash::Event.new({"type" => "noop"})
|
45
45
|
input.instance_eval {decorate(evt)}
|
46
|
-
expect(evt
|
46
|
+
expect(evt.get("field")).to eq("value")
|
47
47
|
end
|
48
48
|
|
49
49
|
it "should add single field with sprintf" do
|
50
50
|
input = LogStash::Inputs::NOOP.new("add_field" => {"%{type}" => "%{type}"})
|
51
51
|
evt = LogStash::Event.new({"type" => "noop"})
|
52
52
|
input.instance_eval {decorate(evt)}
|
53
|
-
expect(evt
|
53
|
+
expect(evt.get("noop")).to eq("noop")
|
54
54
|
end
|
55
55
|
|
56
56
|
it "should add multiple field" do
|
57
57
|
input = LogStash::Inputs::NOOP.new("add_field" => {"field" => ["value1", "value2"], "field2" => "value"})
|
58
58
|
evt = LogStash::Event.new({"type" => "noop"})
|
59
59
|
input.instance_eval {decorate(evt)}
|
60
|
-
expect(evt
|
61
|
-
expect(evt
|
60
|
+
expect(evt.get("field")).to eq(["value1","value2"])
|
61
|
+
expect(evt.get("field2")).to eq("value")
|
62
62
|
end
|
63
63
|
end
|
64
64
|
|
@@ -13,10 +13,12 @@ describe LogStash::OutputDelegator do
|
|
13
13
|
let(:out_klass) { double("output klass") }
|
14
14
|
let(:out_inst) { double("output instance") }
|
15
15
|
|
16
|
+
|
16
17
|
before(:each) do
|
17
18
|
allow(out_klass).to receive(:new).with(any_args).and_return(out_inst)
|
18
19
|
allow(out_klass).to receive(:threadsafe?).and_return(false)
|
19
20
|
allow(out_klass).to receive(:workers_not_supported?).and_return(false)
|
21
|
+
allow(out_klass).to receive(:name).and_return("example")
|
20
22
|
allow(out_inst).to receive(:register)
|
21
23
|
allow(out_inst).to receive(:multi_receive)
|
22
24
|
allow(out_inst).to receive(:metric=).with(any_args)
|
@@ -84,9 +84,21 @@ class TestPipeline < LogStash::Pipeline
|
|
84
84
|
end
|
85
85
|
|
86
86
|
describe LogStash::Pipeline do
|
87
|
-
let(:worker_thread_count) {
|
87
|
+
let(:worker_thread_count) { 5 }
|
88
88
|
let(:safe_thread_count) { 1 }
|
89
89
|
let(:override_thread_count) { 42 }
|
90
|
+
let(:pipeline_settings_obj) { LogStash::SETTINGS }
|
91
|
+
let(:pipeline_settings) { {} }
|
92
|
+
|
93
|
+
before :each do
|
94
|
+
pipeline_workers_setting = LogStash::SETTINGS.get_setting("pipeline.workers")
|
95
|
+
allow(pipeline_workers_setting).to receive(:default).and_return(worker_thread_count)
|
96
|
+
pipeline_settings.each {|k, v| pipeline_settings_obj.set(k, v) }
|
97
|
+
end
|
98
|
+
|
99
|
+
after :each do
|
100
|
+
pipeline_settings_obj.reset
|
101
|
+
end
|
90
102
|
|
91
103
|
describe "defaulting the pipeline workers based on thread safety" do
|
92
104
|
before(:each) do
|
@@ -123,13 +135,15 @@ describe LogStash::Pipeline do
|
|
123
135
|
end
|
124
136
|
|
125
137
|
it "should not receive a debug message with the compiled code" do
|
138
|
+
pipeline_settings_obj.set("config.debug", false)
|
126
139
|
expect(logger).not_to receive(:debug).with(/Compiled pipeline/, anything)
|
127
140
|
pipeline = TestPipeline.new(test_config_with_filters)
|
128
141
|
end
|
129
142
|
|
130
|
-
it "should print the compiled code if
|
143
|
+
it "should print the compiled code if config.debug is set to true" do
|
144
|
+
pipeline_settings_obj.set("config.debug", true)
|
131
145
|
expect(logger).to receive(:debug).with(/Compiled pipeline/, anything)
|
132
|
-
pipeline = TestPipeline.new(test_config_with_filters,
|
146
|
+
pipeline = TestPipeline.new(test_config_with_filters, pipeline_settings_obj)
|
133
147
|
end
|
134
148
|
end
|
135
149
|
|
@@ -145,12 +159,13 @@ describe LogStash::Pipeline do
|
|
145
159
|
end
|
146
160
|
|
147
161
|
context "when there is command line -w N set" do
|
162
|
+
let(:pipeline_settings) { {"pipeline.workers" => override_thread_count } }
|
148
163
|
it "starts multiple filter thread" do
|
149
|
-
msg = "Warning: Manual override - there are filters that might
|
150
|
-
|
164
|
+
msg = "Warning: Manual override - there are filters that might" +
|
165
|
+
" not work with multiple worker threads"
|
166
|
+
pipeline = TestPipeline.new(test_config_with_filters, pipeline_settings_obj)
|
151
167
|
expect(pipeline.logger).to receive(:warn).with(msg,
|
152
168
|
{:worker_threads=> override_thread_count, :filters=>["dummyfilter"]})
|
153
|
-
pipeline.configure(:pipeline_workers, override_thread_count)
|
154
169
|
pipeline.run
|
155
170
|
expect(pipeline.worker_threads.size).to eq(override_thread_count)
|
156
171
|
end
|
@@ -222,7 +237,7 @@ describe LogStash::Pipeline do
|
|
222
237
|
pipeline.run
|
223
238
|
|
224
239
|
expect(pipeline.outputs.size ).to eq(1)
|
225
|
-
expect(pipeline.outputs.first.workers.size ).to eq(::LogStash::
|
240
|
+
expect(pipeline.outputs.first.workers.size ).to eq(::LogStash::SETTINGS.get("pipeline.output.workers"))
|
226
241
|
expect(pipeline.outputs.first.workers.first.num_closes ).to eq(1)
|
227
242
|
end
|
228
243
|
|
@@ -279,7 +294,7 @@ describe LogStash::Pipeline do
|
|
279
294
|
CONFIG
|
280
295
|
|
281
296
|
sample("hello") do
|
282
|
-
expect(subject
|
297
|
+
expect(subject.get("message")).to eq("hello")
|
283
298
|
end
|
284
299
|
end
|
285
300
|
|
@@ -299,10 +314,10 @@ describe LogStash::Pipeline do
|
|
299
314
|
sample(["foo", "bar"]) do
|
300
315
|
expect(subject.size).to eq(2)
|
301
316
|
|
302
|
-
expect(subject[0]
|
303
|
-
expect(subject[0]
|
304
|
-
expect(subject[1]
|
305
|
-
expect(subject[1]
|
317
|
+
expect(subject[0].get("message")).to eq("foo\nbar")
|
318
|
+
expect(subject[0].get("type")).to be_nil
|
319
|
+
expect(subject[1].get("message")).to eq("foo\nbar")
|
320
|
+
expect(subject[1].get("type")).to eq("clone1")
|
306
321
|
end
|
307
322
|
end
|
308
323
|
end
|
@@ -310,7 +325,8 @@ describe LogStash::Pipeline do
|
|
310
325
|
describe "max inflight warning" do
|
311
326
|
let(:config) { "input { dummyinput {} } output { dummyoutput {} }" }
|
312
327
|
let(:batch_size) { 1 }
|
313
|
-
let(:
|
328
|
+
let(:pipeline_settings) { { "pipeline.batch.size" => batch_size, "pipeline.workers" => 1 } }
|
329
|
+
let(:pipeline) { LogStash::Pipeline.new(config, pipeline_settings_obj) }
|
314
330
|
let(:logger) { pipeline.logger }
|
315
331
|
let(:warning_prefix) { /CAUTION: Recommended inflight events max exceeded!/ }
|
316
332
|
|
@@ -354,17 +370,17 @@ describe LogStash::Pipeline do
|
|
354
370
|
sample("hello") do
|
355
371
|
expect(subject.size).to eq(3)
|
356
372
|
|
357
|
-
expect(subject[0]
|
358
|
-
expect(subject[0]
|
359
|
-
expect(subject[0]
|
373
|
+
expect(subject[0].get("message")).to eq("hello")
|
374
|
+
expect(subject[0].get("type")).to be_nil
|
375
|
+
expect(subject[0].get("foo")).to eq("bar")
|
360
376
|
|
361
|
-
expect(subject[1]
|
362
|
-
expect(subject[1]
|
363
|
-
expect(subject[1]
|
377
|
+
expect(subject[1].get("message")).to eq("hello")
|
378
|
+
expect(subject[1].get("type")).to eq("clone1")
|
379
|
+
expect(subject[1].get("foo")).to eq("bar")
|
364
380
|
|
365
|
-
expect(subject[2]
|
366
|
-
expect(subject[2]
|
367
|
-
expect(subject[2]
|
381
|
+
expect(subject[2].get("message")).to eq("hello")
|
382
|
+
expect(subject[2].get("type")).to eq("clone2")
|
383
|
+
expect(subject[2].get("foo")).to eq("bar")
|
368
384
|
end
|
369
385
|
end
|
370
386
|
end
|
@@ -435,13 +451,13 @@ describe LogStash::Pipeline do
|
|
435
451
|
|
436
452
|
it "flushes the buffered contents of the filter" do
|
437
453
|
Thread.abort_on_exception = true
|
438
|
-
pipeline = LogStash::Pipeline.new(config,
|
454
|
+
pipeline = LogStash::Pipeline.new(config, pipeline_settings_obj)
|
439
455
|
Thread.new { pipeline.run }
|
440
456
|
sleep 0.1 while !pipeline.ready?
|
441
457
|
# give us a bit of time to flush the events
|
442
458
|
wait(5).for do
|
443
459
|
next unless output && output.events && output.events.first
|
444
|
-
output.events.first
|
460
|
+
output.events.first.get("message").split("\n").count
|
445
461
|
end.to eq(number_of_events)
|
446
462
|
pipeline.shutdown
|
447
463
|
end
|
@@ -526,8 +542,9 @@ describe LogStash::Pipeline do
|
|
526
542
|
end
|
527
543
|
|
528
544
|
context "when collecting metrics in the pipeline" do
|
529
|
-
|
530
|
-
|
545
|
+
let(:pipeline_settings) { { "pipeline.id" => pipeline_id } }
|
546
|
+
subject { described_class.new(config, pipeline_settings_obj) }
|
547
|
+
let(:pipeline_id) { "main" }
|
531
548
|
let(:metric) { LogStash::Instrument::Metric.new }
|
532
549
|
let(:number_of_events) { 1000 }
|
533
550
|
let(:multiline_id) { "my-multiline" }
|
@@ -573,6 +590,7 @@ describe LogStash::Pipeline do
|
|
573
590
|
# Reset the metric store
|
574
591
|
LogStash::Instrument::Collector.instance.clear
|
575
592
|
|
593
|
+
subject.metric = metric
|
576
594
|
Thread.new { subject.run }
|
577
595
|
# make sure we have received all the generated events
|
578
596
|
sleep 1 while dummyoutput.events.size < number_of_events
|
@@ -616,4 +634,20 @@ describe LogStash::Pipeline do
|
|
616
634
|
end
|
617
635
|
end
|
618
636
|
end
|
637
|
+
|
638
|
+
context "Pipeline object" do
|
639
|
+
before do
|
640
|
+
allow(LogStash::Plugin).to receive(:lookup).with("input", "generator").and_return(LogStash::Inputs::Generator)
|
641
|
+
allow(LogStash::Plugin).to receive(:lookup).with("codec", "plain").and_return(DummyCodec)
|
642
|
+
allow(LogStash::Plugin).to receive(:lookup).with("filter", "dummyfilter").and_return(DummyFilter)
|
643
|
+
allow(LogStash::Plugin).to receive(:lookup).with("output", "dummyoutput").and_return(DummyOutput)
|
644
|
+
end
|
645
|
+
|
646
|
+
let(:pipeline1) { LogStash::Pipeline.new("input { generator {} } filter { dummyfilter {} } output { dummyoutput {}}") }
|
647
|
+
let(:pipeline2) { LogStash::Pipeline.new("input { generator {} } filter { dummyfilter {} } output { dummyoutput {}}") }
|
648
|
+
|
649
|
+
it "should not add ivars" do
|
650
|
+
expect(pipeline1.instance_variables).to eq(pipeline2.instance_variables)
|
651
|
+
end
|
652
|
+
end
|
619
653
|
end
|
@@ -8,12 +8,12 @@ require "logstash/filters/base"
|
|
8
8
|
|
9
9
|
describe LogStash::Plugin do
|
10
10
|
it "should fail lookup on inexisting type" do
|
11
|
-
expect_any_instance_of(Cabin::Channel).to receive(:debug).once
|
11
|
+
#expect_any_instance_of(Cabin::Channel).to receive(:debug).once
|
12
12
|
expect { LogStash::Plugin.lookup("badbadtype", "badname") }.to raise_error(LogStash::PluginLoadingError)
|
13
13
|
end
|
14
14
|
|
15
15
|
it "should fail lookup on inexisting name" do
|
16
|
-
expect_any_instance_of(Cabin::Channel).to receive(:debug).once
|
16
|
+
#expect_any_instance_of(Cabin::Channel).to receive(:debug).once
|
17
17
|
expect { LogStash::Plugin.lookup("filter", "badname") }.to raise_error(LogStash::PluginLoadingError)
|
18
18
|
end
|
19
19
|
|