logstash-core 6.0.1-java → 6.1.0-java
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/gemspec_jars.rb +1 -1
- data/lib/logstash-core/logstash-core.jar +0 -0
- data/lib/logstash-core/logstash-core.rb +14 -2
- data/lib/logstash-core_jars.rb +4 -2
- data/lib/logstash/agent.rb +8 -2
- data/lib/logstash/api/modules/node.rb +11 -5
- data/lib/logstash/api/modules/stats.rb +13 -7
- data/lib/logstash/compiler.rb +6 -10
- data/lib/logstash/compiler/lscl.rb +10 -1
- data/lib/logstash/compiler/lscl/helpers.rb +3 -1
- data/lib/logstash/config/mixin.rb +2 -2
- data/lib/logstash/environment.rb +1 -6
- data/lib/logstash/errors.rb +1 -1
- data/lib/logstash/event.rb +0 -2
- data/lib/logstash/filter_delegator.rb +1 -2
- data/lib/logstash/instrument/metric_type/counter.rb +1 -1
- data/lib/logstash/instrument/metric_type/gauge.rb +1 -1
- data/lib/logstash/instrument/wrapped_write_client.rb +1 -1
- data/lib/logstash/java_filter_delegator.rb +79 -0
- data/lib/logstash/java_pipeline.rb +690 -0
- data/lib/logstash/json.rb +4 -29
- data/lib/logstash/output_delegator.rb +3 -2
- data/lib/logstash/patches/bugfix_jruby_2558.rb +1 -1
- data/lib/logstash/pipeline.rb +32 -89
- data/lib/logstash/pipeline_action/create.rb +8 -2
- data/lib/logstash/pipeline_action/reload.rb +6 -1
- data/lib/logstash/pipeline_reporter.rb +2 -1
- data/lib/logstash/pipeline_settings.rb +1 -0
- data/lib/logstash/plugins/plugin_factory.rb +100 -0
- data/lib/logstash/plugins/registry.rb +18 -7
- data/lib/logstash/queue_factory.rb +3 -1
- data/lib/logstash/runner.rb +13 -56
- data/lib/logstash/settings.rb +2 -2
- data/lib/logstash/timestamp.rb +0 -1
- data/lib/logstash/util.rb +13 -21
- data/lib/logstash/util/java_version.rb +0 -1
- data/lib/logstash/util/settings_helper.rb +79 -0
- data/lib/logstash/util/{environment_variables.rb → substitution_variables.rb} +10 -8
- data/lib/logstash/util/wrapped_acked_queue.rb +17 -108
- data/lib/logstash/util/wrapped_synchronous_queue.rb +38 -178
- data/locales/en.yml +2 -0
- data/spec/conditionals_spec.rb +235 -80
- data/spec/logstash/api/modules/node_spec.rb +11 -0
- data/spec/logstash/compiler/compiler_spec.rb +28 -2
- data/spec/logstash/environment_spec.rb +0 -5
- data/spec/logstash/event_spec.rb +7 -2
- data/spec/logstash/filter_delegator_spec.rb +1 -1
- data/spec/logstash/filters/base_spec.rb +30 -28
- data/spec/logstash/instrument/wrapped_write_client_spec.rb +2 -2
- data/spec/logstash/java_filter_delegator_spec.rb +176 -0
- data/spec/logstash/java_pipeline_spec.rb +933 -0
- data/spec/logstash/json_spec.rb +27 -45
- data/spec/logstash/plugins/registry_spec.rb +7 -0
- data/spec/logstash/queue_factory_spec.rb +5 -2
- data/spec/logstash/settings_spec.rb +1 -1
- data/spec/logstash/util/java_version_spec.rb +1 -3
- data/spec/logstash/util/wrapped_synchronous_queue_spec.rb +27 -24
- data/spec/logstash/webserver_spec.rb +3 -6
- data/spec/support/helpers.rb +5 -0
- data/spec/support/pipeline/pipeline_helpers.rb +97 -0
- data/versions-gem-copy.yml +5 -2
- metadata +14 -5
- data/lib/logstash/patches/rubygems.rb +0 -38
@@ -0,0 +1,933 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "spec_helper"
|
3
|
+
require "logstash/inputs/generator"
|
4
|
+
require "logstash/filters/drop"
|
5
|
+
require_relative "../support/mocks_classes"
|
6
|
+
require_relative "../support/helpers"
|
7
|
+
require_relative "../logstash/pipeline_reporter_spec" # for DummyOutput class
|
8
|
+
require 'support/pipeline/pipeline_helpers'
|
9
|
+
require "stud/try"
|
10
|
+
require 'timeout'
|
11
|
+
|
12
|
+
class DummyInput < LogStash::Inputs::Base
|
13
|
+
config_name "dummyinput"
|
14
|
+
milestone 2
|
15
|
+
|
16
|
+
def register
|
17
|
+
end
|
18
|
+
|
19
|
+
def run(queue)
|
20
|
+
end
|
21
|
+
|
22
|
+
def close
|
23
|
+
end
|
24
|
+
end
|
25
|
+
|
26
|
+
class DummyInputGenerator < LogStash::Inputs::Base
|
27
|
+
config_name "dummyinputgenerator"
|
28
|
+
milestone 2
|
29
|
+
|
30
|
+
def register
|
31
|
+
end
|
32
|
+
|
33
|
+
def run(queue)
|
34
|
+
queue << Logstash::Event.new while !stop?
|
35
|
+
end
|
36
|
+
|
37
|
+
def close
|
38
|
+
end
|
39
|
+
end
|
40
|
+
|
41
|
+
class DummyCodec < LogStash::Codecs::Base
|
42
|
+
config_name "dummycodec"
|
43
|
+
milestone 2
|
44
|
+
|
45
|
+
config :format, :validate => :string
|
46
|
+
|
47
|
+
def decode(data)
|
48
|
+
data
|
49
|
+
end
|
50
|
+
|
51
|
+
def encode(event)
|
52
|
+
event
|
53
|
+
end
|
54
|
+
|
55
|
+
def close
|
56
|
+
end
|
57
|
+
end
|
58
|
+
|
59
|
+
class DummyOutputMore < ::LogStash::Outputs::DummyOutput
|
60
|
+
config_name "dummyoutputmore"
|
61
|
+
end
|
62
|
+
|
63
|
+
class DummyFilter < LogStash::Filters::Base
|
64
|
+
config_name "dummyfilter"
|
65
|
+
milestone 2
|
66
|
+
|
67
|
+
def register() end
|
68
|
+
|
69
|
+
def filter(event) end
|
70
|
+
|
71
|
+
def threadsafe?() false; end
|
72
|
+
|
73
|
+
def close() end
|
74
|
+
end
|
75
|
+
|
76
|
+
class DummySafeFilter < LogStash::Filters::Base
|
77
|
+
config_name "dummysafefilter"
|
78
|
+
milestone 2
|
79
|
+
|
80
|
+
def register() end
|
81
|
+
|
82
|
+
def filter(event) end
|
83
|
+
|
84
|
+
def threadsafe?() true; end
|
85
|
+
|
86
|
+
def close() end
|
87
|
+
end
|
88
|
+
|
89
|
+
class DummyFlushingFilter < LogStash::Filters::Base
|
90
|
+
config_name "dummyflushingfilter"
|
91
|
+
milestone 2
|
92
|
+
|
93
|
+
def register() end
|
94
|
+
def filter(event) end
|
95
|
+
def periodic_flush
|
96
|
+
true
|
97
|
+
end
|
98
|
+
def flush(options)
|
99
|
+
[::LogStash::Event.new("message" => "dummy_flush")]
|
100
|
+
end
|
101
|
+
def close() end
|
102
|
+
end
|
103
|
+
|
104
|
+
class DummyFlushingFilterPeriodic < DummyFlushingFilter
|
105
|
+
config_name "dummyflushingfilterperiodic"
|
106
|
+
|
107
|
+
def flush(options)
|
108
|
+
# Don't generate events on the shutdown flush to make sure we actually test the
|
109
|
+
# periodic flush.
|
110
|
+
options[:final] ? [] : [::LogStash::Event.new("message" => "dummy_flush")]
|
111
|
+
end
|
112
|
+
end
|
113
|
+
|
114
|
+
class JavaTestPipeline < LogStash::JavaPipeline
|
115
|
+
attr_reader :outputs, :settings
|
116
|
+
end
|
117
|
+
|
118
|
+
describe LogStash::JavaPipeline do
|
119
|
+
let(:worker_thread_count) { 5 }
|
120
|
+
let(:safe_thread_count) { 1 }
|
121
|
+
let(:override_thread_count) { 42 }
|
122
|
+
let(:dead_letter_queue_enabled) { false }
|
123
|
+
let(:dead_letter_queue_path) { }
|
124
|
+
let(:pipeline_settings_obj) { LogStash::SETTINGS.clone }
|
125
|
+
let(:pipeline_settings) { {} }
|
126
|
+
let(:max_retry) {10} #times
|
127
|
+
let(:timeout) {120} #seconds
|
128
|
+
|
129
|
+
before :each do
|
130
|
+
pipeline_workers_setting = LogStash::SETTINGS.get_setting("pipeline.workers")
|
131
|
+
allow(pipeline_workers_setting).to receive(:default).and_return(worker_thread_count)
|
132
|
+
dlq_enabled_setting = LogStash::SETTINGS.get_setting("dead_letter_queue.enable")
|
133
|
+
allow(dlq_enabled_setting).to receive(:value).and_return(dead_letter_queue_enabled)
|
134
|
+
dlq_path_setting = LogStash::SETTINGS.get_setting("path.dead_letter_queue")
|
135
|
+
allow(dlq_path_setting).to receive(:value).and_return(dead_letter_queue_path)
|
136
|
+
|
137
|
+
pipeline_settings.each {|k, v| pipeline_settings_obj.set(k, v) }
|
138
|
+
end
|
139
|
+
|
140
|
+
describe "#ephemeral_id" do
|
141
|
+
it "creates an ephemeral_id at creation time" do
|
142
|
+
pipeline = mock_java_pipeline_from_string("input { generator { count => 1 } } output { null {} }")
|
143
|
+
expect(pipeline.ephemeral_id).to_not be_nil
|
144
|
+
pipeline.close
|
145
|
+
|
146
|
+
second_pipeline = mock_java_pipeline_from_string("input { generator { count => 1 } } output { null {} }")
|
147
|
+
expect(second_pipeline.ephemeral_id).not_to eq(pipeline.ephemeral_id)
|
148
|
+
second_pipeline.close
|
149
|
+
end
|
150
|
+
end
|
151
|
+
|
152
|
+
|
153
|
+
describe "event cancellation" do
|
154
|
+
# test harness for https://github.com/elastic/logstash/issues/6055
|
155
|
+
|
156
|
+
let(:output) { LogStash::Outputs::DummyOutputWithEventsArray.new }
|
157
|
+
|
158
|
+
before do
|
159
|
+
allow(LogStash::Plugin).to receive(:lookup).with("input", "generator").and_return(LogStash::Inputs::Generator)
|
160
|
+
allow(LogStash::Plugin).to receive(:lookup).with("output", "dummyoutputwitheventsarray").and_return(LogStash::Outputs::DummyOutputWithEventsArray)
|
161
|
+
allow(LogStash::Plugin).to receive(:lookup).with("filter", "drop").and_call_original
|
162
|
+
allow(LogStash::Plugin).to receive(:lookup).with("filter", "mutate").and_call_original
|
163
|
+
allow(LogStash::Plugin).to receive(:lookup).with("codec", "plain").and_call_original
|
164
|
+
allow(LogStash::Outputs::DummyOutputWithEventsArray).to receive(:new).with(any_args).and_return(output)
|
165
|
+
end
|
166
|
+
|
167
|
+
let(:config) do
|
168
|
+
<<-CONFIG
|
169
|
+
input {
|
170
|
+
generator {
|
171
|
+
lines => ["1", "2", "END"]
|
172
|
+
count => 1
|
173
|
+
}
|
174
|
+
}
|
175
|
+
filter {
|
176
|
+
if [message] == "1" {
|
177
|
+
drop {}
|
178
|
+
}
|
179
|
+
mutate { add_tag => ["notdropped"] }
|
180
|
+
}
|
181
|
+
output { dummyoutputwitheventsarray {} }
|
182
|
+
CONFIG
|
183
|
+
end
|
184
|
+
|
185
|
+
it "should not propagate cancelled events from filter to output" do
|
186
|
+
abort_on_exception_state = Thread.abort_on_exception
|
187
|
+
Thread.abort_on_exception = true
|
188
|
+
|
189
|
+
pipeline = mock_java_pipeline_from_string(config, pipeline_settings_obj)
|
190
|
+
t = Thread.new { pipeline.run }
|
191
|
+
Timeout.timeout(timeout) do
|
192
|
+
sleep(0.1) until pipeline.ready?
|
193
|
+
end
|
194
|
+
Stud.try(max_retry.times, [StandardError, RSpec::Expectations::ExpectationNotMetError]) do
|
195
|
+
wait(3).for do
|
196
|
+
# give us a bit of time to flush the events
|
197
|
+
# puts("*****" + output.events.map{|e| e.message}.to_s)
|
198
|
+
output.events.map{|e| e.get("message")}.include?("END")
|
199
|
+
end.to be_truthy
|
200
|
+
end
|
201
|
+
expect(output.events.size).to eq(2)
|
202
|
+
expect(output.events[0].get("tags")).to eq(["notdropped"])
|
203
|
+
expect(output.events[1].get("tags")).to eq(["notdropped"])
|
204
|
+
pipeline.shutdown
|
205
|
+
t.join
|
206
|
+
|
207
|
+
Thread.abort_on_exception = abort_on_exception_state
|
208
|
+
end
|
209
|
+
end
|
210
|
+
|
211
|
+
describe "defaulting the pipeline workers based on thread safety" do
|
212
|
+
before(:each) do
|
213
|
+
allow(LogStash::Plugin).to receive(:lookup).with("input", "dummyinput").and_return(DummyInput)
|
214
|
+
allow(LogStash::Plugin).to receive(:lookup).with("codec", "plain").and_return(DummyCodec)
|
215
|
+
allow(LogStash::Plugin).to receive(:lookup).with("output", "dummyoutput").and_return(::LogStash::Outputs::DummyOutput)
|
216
|
+
allow(LogStash::Plugin).to receive(:lookup).with("filter", "dummyfilter").and_return(DummyFilter)
|
217
|
+
allow(LogStash::Plugin).to receive(:lookup).with("filter", "dummysafefilter").and_return(DummySafeFilter)
|
218
|
+
end
|
219
|
+
|
220
|
+
context "when there are some not threadsafe filters" do
|
221
|
+
let(:test_config_with_filters) {
|
222
|
+
<<-eos
|
223
|
+
input {
|
224
|
+
dummyinput {}
|
225
|
+
}
|
226
|
+
|
227
|
+
filter {
|
228
|
+
dummyfilter {}
|
229
|
+
}
|
230
|
+
|
231
|
+
output {
|
232
|
+
dummyoutput {}
|
233
|
+
}
|
234
|
+
eos
|
235
|
+
}
|
236
|
+
|
237
|
+
describe "debug compiled" do
|
238
|
+
let(:logger) { double("pipeline logger").as_null_object }
|
239
|
+
|
240
|
+
before do
|
241
|
+
expect(::LogStash::JavaPipeline).to receive(:logger).and_return(logger)
|
242
|
+
allow(logger).to receive(:debug?).and_return(true)
|
243
|
+
end
|
244
|
+
|
245
|
+
it "should not receive a debug message with the compiled code" do
|
246
|
+
pipeline_settings_obj.set("config.debug", false)
|
247
|
+
expect(logger).not_to receive(:debug).with(/Compiled pipeline/, anything)
|
248
|
+
pipeline = mock_java_pipeline_from_string(test_config_with_filters)
|
249
|
+
pipeline.close
|
250
|
+
end
|
251
|
+
|
252
|
+
it "should print the compiled code if config.debug is set to true" do
|
253
|
+
pipeline_settings_obj.set("config.debug", true)
|
254
|
+
expect(logger).to receive(:debug).with(/Compiled pipeline/, anything)
|
255
|
+
pipeline = mock_java_pipeline_from_string(test_config_with_filters, pipeline_settings_obj)
|
256
|
+
pipeline.close
|
257
|
+
end
|
258
|
+
end
|
259
|
+
|
260
|
+
context "when there is no command line -w N set" do
|
261
|
+
it "starts one filter thread" do
|
262
|
+
msg = "Defaulting pipeline worker threads to 1 because there are some filters that might not work with multiple worker threads"
|
263
|
+
pipeline = mock_java_pipeline_from_string(test_config_with_filters)
|
264
|
+
expect(pipeline.logger).to receive(:warn).with(msg,
|
265
|
+
hash_including({:count_was=>worker_thread_count, :filters=>["dummyfilter"]}))
|
266
|
+
pipeline.run
|
267
|
+
expect(pipeline.worker_threads.size).to eq(safe_thread_count)
|
268
|
+
pipeline.shutdown
|
269
|
+
end
|
270
|
+
end
|
271
|
+
|
272
|
+
context "when there is command line -w N set" do
|
273
|
+
let(:pipeline_settings) { {"pipeline.workers" => override_thread_count } }
|
274
|
+
it "starts multiple filter thread" do
|
275
|
+
msg = "Warning: Manual override - there are filters that might" +
|
276
|
+
" not work with multiple worker threads"
|
277
|
+
pipeline = mock_java_pipeline_from_string(test_config_with_filters, pipeline_settings_obj)
|
278
|
+
expect(pipeline.logger).to receive(:warn).with(msg, hash_including({:worker_threads=> override_thread_count, :filters=>["dummyfilter"]}))
|
279
|
+
pipeline.run
|
280
|
+
expect(pipeline.worker_threads.size).to eq(override_thread_count)
|
281
|
+
pipeline.shutdown
|
282
|
+
end
|
283
|
+
end
|
284
|
+
end
|
285
|
+
|
286
|
+
context "when there are threadsafe filters only" do
|
287
|
+
let(:test_config_with_filters) {
|
288
|
+
<<-eos
|
289
|
+
input {
|
290
|
+
dummyinput {}
|
291
|
+
}
|
292
|
+
|
293
|
+
filter {
|
294
|
+
dummysafefilter {}
|
295
|
+
}
|
296
|
+
|
297
|
+
output {
|
298
|
+
dummyoutput {}
|
299
|
+
}
|
300
|
+
eos
|
301
|
+
}
|
302
|
+
|
303
|
+
it "starts multiple filter threads" do
|
304
|
+
skip("This test has been failing periodically since November 2016. Tracked as https://github.com/elastic/logstash/issues/6245")
|
305
|
+
pipeline = mock_java_pipeline_from_string(test_config_with_filters)
|
306
|
+
pipeline.run
|
307
|
+
expect(pipeline.worker_threads.size).to eq(worker_thread_count)
|
308
|
+
pipeline.shutdown
|
309
|
+
end
|
310
|
+
end
|
311
|
+
end
|
312
|
+
|
313
|
+
context "close" do
|
314
|
+
before(:each) do
|
315
|
+
allow(LogStash::Plugin).to receive(:lookup).with("input", "dummyinput").and_return(DummyInput)
|
316
|
+
allow(LogStash::Plugin).to receive(:lookup).with("codec", "plain").and_return(DummyCodec)
|
317
|
+
allow(LogStash::Plugin).to receive(:lookup).with("output", "dummyoutput").and_return(::LogStash::Outputs::DummyOutput)
|
318
|
+
end
|
319
|
+
|
320
|
+
|
321
|
+
let(:test_config_without_output_workers) {
|
322
|
+
<<-eos
|
323
|
+
input {
|
324
|
+
dummyinput {}
|
325
|
+
}
|
326
|
+
|
327
|
+
output {
|
328
|
+
dummyoutput {}
|
329
|
+
}
|
330
|
+
eos
|
331
|
+
}
|
332
|
+
|
333
|
+
let(:test_config_with_output_workers) {
|
334
|
+
<<-eos
|
335
|
+
input {
|
336
|
+
dummyinput {}
|
337
|
+
}
|
338
|
+
|
339
|
+
output {
|
340
|
+
dummyoutput {
|
341
|
+
workers => 2
|
342
|
+
}
|
343
|
+
}
|
344
|
+
eos
|
345
|
+
}
|
346
|
+
|
347
|
+
context "output close" do
|
348
|
+
let(:pipeline) { mock_java_pipeline_from_string(test_config_without_output_workers) }
|
349
|
+
let(:output) { pipeline.outputs.first }
|
350
|
+
|
351
|
+
before do
|
352
|
+
allow(output).to receive(:do_close)
|
353
|
+
end
|
354
|
+
|
355
|
+
after do
|
356
|
+
pipeline.shutdown
|
357
|
+
end
|
358
|
+
|
359
|
+
it "should call close of output without output-workers" do
|
360
|
+
pipeline.run
|
361
|
+
|
362
|
+
expect(output).to have_received(:do_close).once
|
363
|
+
end
|
364
|
+
end
|
365
|
+
end
|
366
|
+
|
367
|
+
context "with no explicit ids declared" do
|
368
|
+
before(:each) do
|
369
|
+
allow(LogStash::Plugin).to receive(:lookup).with("input", "dummyinput").and_return(DummyInput)
|
370
|
+
allow(LogStash::Plugin).to receive(:lookup).with("codec", "plain").and_return(DummyCodec)
|
371
|
+
allow(LogStash::Plugin).to receive(:lookup).with("filter", "dummyfilter").and_return(DummyFilter)
|
372
|
+
allow(LogStash::Plugin).to receive(:lookup).with("output", "dummyoutput").and_return(::LogStash::Outputs::DummyOutput)
|
373
|
+
end
|
374
|
+
|
375
|
+
let(:config) { "input { dummyinput { codec => plain { format => 'something' } } } filter { dummyfilter {} } output { dummyoutput {} }"}
|
376
|
+
let(:pipeline) { mock_java_pipeline_from_string(config) }
|
377
|
+
|
378
|
+
after do
|
379
|
+
# If you don't start/stop the pipeline it won't release the queue lock and will
|
380
|
+
# cause the suite to fail :(
|
381
|
+
pipeline.close
|
382
|
+
end
|
383
|
+
|
384
|
+
it "should use LIR provided IDs" do
|
385
|
+
expect(pipeline.inputs.first.id).to eq(pipeline.lir.input_plugin_vertices.first.id)
|
386
|
+
expect(pipeline.filters.first.id).to eq(pipeline.lir.filter_plugin_vertices.first.id)
|
387
|
+
expect(pipeline.outputs.first.id).to eq(pipeline.lir.output_plugin_vertices.first.id)
|
388
|
+
end
|
389
|
+
end
|
390
|
+
|
391
|
+
context "compiled flush function" do
|
392
|
+
extend PipelineHelpers
|
393
|
+
describe "flusher thread" do
|
394
|
+
before(:each) do
|
395
|
+
allow(LogStash::Plugin).to receive(:lookup).with("input", "dummyinput").and_return(DummyInput)
|
396
|
+
allow(LogStash::Plugin).to receive(:lookup).with("codec", "plain").and_return(DummyCodec)
|
397
|
+
allow(LogStash::Plugin).to receive(:lookup).with("output", "dummyoutput").and_return(::LogStash::Outputs::DummyOutput)
|
398
|
+
end
|
399
|
+
|
400
|
+
let(:config) { "input { dummyinput {} } output { dummyoutput {} }"}
|
401
|
+
|
402
|
+
it "should start the flusher thread only after the pipeline is running" do
|
403
|
+
pipeline = mock_java_pipeline_from_string(config)
|
404
|
+
|
405
|
+
expect(pipeline).to receive(:transition_to_running).ordered.and_call_original
|
406
|
+
expect(pipeline).to receive(:start_flusher).ordered.and_call_original
|
407
|
+
|
408
|
+
pipeline.run
|
409
|
+
pipeline.shutdown
|
410
|
+
end
|
411
|
+
end
|
412
|
+
|
413
|
+
context "cancelled events should not propagate down the filters" do
|
414
|
+
config <<-CONFIG
|
415
|
+
filter {
|
416
|
+
drop {}
|
417
|
+
}
|
418
|
+
CONFIG
|
419
|
+
|
420
|
+
sample_one("hello") do
|
421
|
+
expect(subject).to eq(nil)
|
422
|
+
end
|
423
|
+
end
|
424
|
+
|
425
|
+
context "new events should propagate down the filters" do
|
426
|
+
config <<-CONFIG
|
427
|
+
filter {
|
428
|
+
clone {
|
429
|
+
clones => ["clone1"]
|
430
|
+
}
|
431
|
+
}
|
432
|
+
CONFIG
|
433
|
+
|
434
|
+
sample_one(["foo", "bar"]) do
|
435
|
+
expect(subject.size).to eq(4)
|
436
|
+
end
|
437
|
+
end
|
438
|
+
end
|
439
|
+
|
440
|
+
describe "max inflight warning" do
|
441
|
+
let(:config) { "input { dummyinput {} } output { dummyoutput {} }" }
|
442
|
+
let(:batch_size) { 1 }
|
443
|
+
let(:pipeline_settings) { { "pipeline.batch.size" => batch_size, "pipeline.workers" => 1 } }
|
444
|
+
let(:pipeline) { mock_java_pipeline_from_string(config, pipeline_settings_obj) }
|
445
|
+
let(:logger) { pipeline.logger }
|
446
|
+
let(:warning_prefix) { Regexp.new("CAUTION: Recommended inflight events max exceeded!") }
|
447
|
+
|
448
|
+
before(:each) do
|
449
|
+
allow(LogStash::Plugin).to receive(:lookup).with("input", "dummyinput").and_return(DummyInput)
|
450
|
+
allow(LogStash::Plugin).to receive(:lookup).with("codec", "plain").and_return(DummyCodec)
|
451
|
+
allow(LogStash::Plugin).to receive(:lookup).with("output", "dummyoutput").and_return(::LogStash::Outputs::DummyOutput)
|
452
|
+
allow(logger).to receive(:warn)
|
453
|
+
|
454
|
+
# pipeline must be first called outside the thread context because it lazily initialize and will create a
|
455
|
+
# race condition if called in the thread
|
456
|
+
p = pipeline
|
457
|
+
t = Thread.new { p.run }
|
458
|
+
Timeout.timeout(timeout) do
|
459
|
+
sleep(0.1) until pipeline.ready?
|
460
|
+
end
|
461
|
+
pipeline.shutdown
|
462
|
+
t.join
|
463
|
+
end
|
464
|
+
|
465
|
+
it "should not raise a max inflight warning if the max_inflight count isn't exceeded" do
|
466
|
+
expect(logger).not_to have_received(:warn).with(warning_prefix)
|
467
|
+
end
|
468
|
+
|
469
|
+
context "with a too large inflight count" do
|
470
|
+
let(:batch_size) { LogStash::JavaPipeline::MAX_INFLIGHT_WARN_THRESHOLD + 1 }
|
471
|
+
|
472
|
+
it "should raise a max inflight warning if the max_inflight count is exceeded" do
|
473
|
+
expect(logger).to have_received(:warn).with(warning_prefix, hash_including(:pipeline_id => anything))
|
474
|
+
end
|
475
|
+
end
|
476
|
+
end
|
477
|
+
|
478
|
+
context "compiled filter functions" do
|
479
|
+
context "new events should propagate down the filters" do
|
480
|
+
extend PipelineHelpers
|
481
|
+
config <<-CONFIG
|
482
|
+
filter {
|
483
|
+
clone {
|
484
|
+
clones => ["clone1", "clone2"]
|
485
|
+
}
|
486
|
+
mutate {
|
487
|
+
add_field => {"foo" => "bar"}
|
488
|
+
}
|
489
|
+
}
|
490
|
+
CONFIG
|
491
|
+
|
492
|
+
sample_one("hello") do
|
493
|
+
expect(subject.size).to eq(3)
|
494
|
+
|
495
|
+
expect(subject[0].get("message")).to eq("hello")
|
496
|
+
expect(subject[0].get("type")).to be_nil
|
497
|
+
expect(subject[0].get("foo")).to eq("bar")
|
498
|
+
|
499
|
+
expect(subject[1].get("message")).to eq("hello")
|
500
|
+
expect(subject[1].get("type")).to eq("clone1")
|
501
|
+
expect(subject[1].get("foo")).to eq("bar")
|
502
|
+
|
503
|
+
expect(subject[2].get("message")).to eq("hello")
|
504
|
+
expect(subject[2].get("type")).to eq("clone2")
|
505
|
+
expect(subject[2].get("foo")).to eq("bar")
|
506
|
+
end
|
507
|
+
end
|
508
|
+
end
|
509
|
+
|
510
|
+
context "metrics" do
|
511
|
+
config = "input { } filter { } output { }"
|
512
|
+
|
513
|
+
let(:settings) { LogStash::SETTINGS.clone }
|
514
|
+
subject { mock_java_pipeline_from_string(config, settings, metric) }
|
515
|
+
|
516
|
+
after :each do
|
517
|
+
subject.close
|
518
|
+
end
|
519
|
+
|
520
|
+
context "when metric.collect is disabled" do
|
521
|
+
before :each do
|
522
|
+
settings.set("metric.collect", false)
|
523
|
+
end
|
524
|
+
|
525
|
+
context "if namespaced_metric is nil" do
|
526
|
+
let(:metric) { nil }
|
527
|
+
it "uses a `NullMetric` object" do
|
528
|
+
expect(subject.metric).to be_a(LogStash::Instrument::NullMetric)
|
529
|
+
end
|
530
|
+
end
|
531
|
+
|
532
|
+
context "if namespaced_metric is a Metric object" do
|
533
|
+
let(:collector) { ::LogStash::Instrument::Collector.new }
|
534
|
+
let(:metric) { ::LogStash::Instrument::Metric.new(collector) }
|
535
|
+
|
536
|
+
it "uses a `NullMetric` object" do
|
537
|
+
expect(subject.metric).to be_a(LogStash::Instrument::NullMetric)
|
538
|
+
end
|
539
|
+
|
540
|
+
it "uses the same collector" do
|
541
|
+
expect(subject.metric.collector).to be(collector)
|
542
|
+
end
|
543
|
+
end
|
544
|
+
|
545
|
+
context "if namespaced_metric is a NullMetric object" do
|
546
|
+
let(:collector) { ::LogStash::Instrument::Collector.new }
|
547
|
+
let(:metric) { ::LogStash::Instrument::NullMetric.new(collector) }
|
548
|
+
|
549
|
+
it "uses a `NullMetric` object" do
|
550
|
+
expect(subject.metric).to be_a(::LogStash::Instrument::NullMetric)
|
551
|
+
end
|
552
|
+
|
553
|
+
it "uses the same collector" do
|
554
|
+
expect(subject.metric.collector).to be(collector)
|
555
|
+
end
|
556
|
+
end
|
557
|
+
end
|
558
|
+
|
559
|
+
context "when metric.collect is enabled" do
|
560
|
+
before :each do
|
561
|
+
settings.set("metric.collect", true)
|
562
|
+
end
|
563
|
+
|
564
|
+
context "if namespaced_metric is nil" do
|
565
|
+
let(:metric) { nil }
|
566
|
+
it "uses a `NullMetric` object" do
|
567
|
+
expect(subject.metric).to be_a(LogStash::Instrument::NullMetric)
|
568
|
+
end
|
569
|
+
end
|
570
|
+
|
571
|
+
context "if namespaced_metric is a Metric object" do
|
572
|
+
let(:collector) { ::LogStash::Instrument::Collector.new }
|
573
|
+
let(:metric) { ::LogStash::Instrument::Metric.new(collector) }
|
574
|
+
|
575
|
+
it "uses a `Metric` object" do
|
576
|
+
expect(subject.metric).to be_a(LogStash::Instrument::Metric)
|
577
|
+
end
|
578
|
+
|
579
|
+
it "uses the same collector" do
|
580
|
+
expect(subject.metric.collector).to be(collector)
|
581
|
+
end
|
582
|
+
end
|
583
|
+
|
584
|
+
context "if namespaced_metric is a NullMetric object" do
|
585
|
+
let(:collector) { ::LogStash::Instrument::Collector.new }
|
586
|
+
let(:metric) { ::LogStash::Instrument::NullMetric.new(collector) }
|
587
|
+
|
588
|
+
it "uses a `NullMetric` object" do
|
589
|
+
expect(subject.metric).to be_a(LogStash::Instrument::NullMetric)
|
590
|
+
end
|
591
|
+
|
592
|
+
it "uses the same collector" do
|
593
|
+
expect(subject.metric.collector).to be(collector)
|
594
|
+
end
|
595
|
+
end
|
596
|
+
end
|
597
|
+
end
|
598
|
+
|
599
|
+
context "Periodic Flush" do
|
600
|
+
let(:config) do
|
601
|
+
<<-EOS
|
602
|
+
input {
|
603
|
+
dummy_input {}
|
604
|
+
}
|
605
|
+
filter {
|
606
|
+
dummy_flushing_filter {}
|
607
|
+
}
|
608
|
+
output {
|
609
|
+
dummy_output {}
|
610
|
+
}
|
611
|
+
EOS
|
612
|
+
end
|
613
|
+
let(:output) { ::LogStash::Outputs::DummyOutput.new }
|
614
|
+
|
615
|
+
before do
|
616
|
+
allow(::LogStash::Outputs::DummyOutput).to receive(:new).with(any_args).and_return(output)
|
617
|
+
allow(LogStash::Plugin).to receive(:lookup).with("input", "dummy_input").and_return(DummyInput)
|
618
|
+
allow(LogStash::Plugin).to receive(:lookup).with("filter", "dummy_flushing_filter").and_return(DummyFlushingFilterPeriodic)
|
619
|
+
allow(LogStash::Plugin).to receive(:lookup).with("output", "dummy_output").and_return(::LogStash::Outputs::DummyOutput)
|
620
|
+
allow(LogStash::Plugin).to receive(:lookup).with("codec", "plain").and_return(LogStash::Codecs::Plain)
|
621
|
+
end
|
622
|
+
|
623
|
+
it "flush periodically" do
|
624
|
+
Thread.abort_on_exception = true
|
625
|
+
pipeline = mock_java_pipeline_from_string(config, pipeline_settings_obj)
|
626
|
+
t = Thread.new { pipeline.run }
|
627
|
+
Timeout.timeout(timeout) do
|
628
|
+
sleep(0.1) until pipeline.ready?
|
629
|
+
end
|
630
|
+
Stud.try(max_retry.times, [StandardError, RSpec::Expectations::ExpectationNotMetError]) do
|
631
|
+
wait(10).for do
|
632
|
+
# give us a bit of time to flush the events
|
633
|
+
output.events.empty?
|
634
|
+
end.to be_falsey
|
635
|
+
end
|
636
|
+
|
637
|
+
expect(output.events.any? {|e| e.get("message") == "dummy_flush"}).to eq(true)
|
638
|
+
|
639
|
+
pipeline.shutdown
|
640
|
+
|
641
|
+
t.join
|
642
|
+
end
|
643
|
+
end
|
644
|
+
|
645
|
+
context "#started_at" do
|
646
|
+
# use a run limiting count to shutdown the pipeline automatically
|
647
|
+
let(:config) do
|
648
|
+
<<-EOS
|
649
|
+
input {
|
650
|
+
generator { count => 10 }
|
651
|
+
}
|
652
|
+
EOS
|
653
|
+
end
|
654
|
+
|
655
|
+
subject { mock_java_pipeline_from_string(config) }
|
656
|
+
|
657
|
+
context "when the pipeline is not started" do
|
658
|
+
after :each do
|
659
|
+
subject.close
|
660
|
+
end
|
661
|
+
|
662
|
+
it "returns nil when the pipeline isnt started" do
|
663
|
+
expect(subject.started_at).to be_nil
|
664
|
+
end
|
665
|
+
end
|
666
|
+
|
667
|
+
it "return when the pipeline started working" do
|
668
|
+
subject.run
|
669
|
+
expect(subject.started_at).to be < Time.now
|
670
|
+
subject.shutdown
|
671
|
+
end
|
672
|
+
end
|
673
|
+
|
674
|
+
context "#uptime" do
|
675
|
+
let(:config) do
|
676
|
+
<<-EOS
|
677
|
+
input {
|
678
|
+
generator {}
|
679
|
+
}
|
680
|
+
EOS
|
681
|
+
end
|
682
|
+
subject { mock_java_pipeline_from_string(config) }
|
683
|
+
|
684
|
+
context "when the pipeline is not started" do
|
685
|
+
after :each do
|
686
|
+
subject.close
|
687
|
+
end
|
688
|
+
|
689
|
+
it "returns 0" do
|
690
|
+
expect(subject.uptime).to eq(0)
|
691
|
+
end
|
692
|
+
end
|
693
|
+
|
694
|
+
context "when the pipeline is started" do
|
695
|
+
it "return the duration in milliseconds" do
|
696
|
+
# subject must be first call outside the thread context because of lazy initialization
|
697
|
+
s = subject
|
698
|
+
t = Thread.new { s.run }
|
699
|
+
Timeout.timeout(timeout) do
|
700
|
+
sleep(0.1) until subject.ready?
|
701
|
+
end
|
702
|
+
Timeout.timeout(timeout) do
|
703
|
+
sleep(0.1)
|
704
|
+
end
|
705
|
+
expect(subject.uptime).to be > 0
|
706
|
+
subject.shutdown
|
707
|
+
t.join
|
708
|
+
end
|
709
|
+
end
|
710
|
+
end
|
711
|
+
|
712
|
+
context "when collecting metrics in the pipeline" do
|
713
|
+
let(:metric) { LogStash::Instrument::Metric.new(LogStash::Instrument::Collector.new) }
|
714
|
+
|
715
|
+
subject { mock_java_pipeline_from_string(config, pipeline_settings_obj, metric) }
|
716
|
+
|
717
|
+
let(:pipeline_settings) { { "pipeline.id" => pipeline_id } }
|
718
|
+
let(:pipeline_id) { "main" }
|
719
|
+
let(:number_of_events) { 420 }
|
720
|
+
let(:dummy_id) { "my-multiline" }
|
721
|
+
let(:dummy_id_other) { "my-multiline_other" }
|
722
|
+
let(:dummy_output_id) { "my-dummyoutput" }
|
723
|
+
let(:generator_id) { "my-generator" }
|
724
|
+
let(:config) do
|
725
|
+
<<-EOS
|
726
|
+
input {
|
727
|
+
generator {
|
728
|
+
count => #{number_of_events}
|
729
|
+
id => "#{generator_id}"
|
730
|
+
}
|
731
|
+
}
|
732
|
+
filter {
|
733
|
+
dummyfilter {
|
734
|
+
id => "#{dummy_id}"
|
735
|
+
}
|
736
|
+
dummyfilter {
|
737
|
+
id => "#{dummy_id_other}"
|
738
|
+
}
|
739
|
+
}
|
740
|
+
output {
|
741
|
+
dummyoutput {
|
742
|
+
id => "#{dummy_output_id}"
|
743
|
+
}
|
744
|
+
}
|
745
|
+
EOS
|
746
|
+
end
|
747
|
+
let(:dummyoutput) { ::LogStash::Outputs::DummyOutput.new({ "id" => dummy_output_id }) }
|
748
|
+
let(:metric_store) { subject.metric.collector.snapshot_metric.metric_store }
|
749
|
+
let(:pipeline_thread) do
|
750
|
+
# subject has to be called for the first time outside the thread because it will create a race condition
|
751
|
+
# with the subject.ready? call since subject is lazily initialized
|
752
|
+
s = subject
|
753
|
+
Thread.new { s.run }
|
754
|
+
end
|
755
|
+
|
756
|
+
before :each do
|
757
|
+
allow(::LogStash::Outputs::DummyOutput).to receive(:new).with(any_args).and_return(dummyoutput)
|
758
|
+
allow(LogStash::Plugin).to receive(:lookup).with("input", "generator").and_return(LogStash::Inputs::Generator)
|
759
|
+
allow(LogStash::Plugin).to receive(:lookup).with("codec", "plain").and_return(LogStash::Codecs::Plain)
|
760
|
+
allow(LogStash::Plugin).to receive(:lookup).with("filter", "dummyfilter").and_return(LogStash::Filters::DummyFilter)
|
761
|
+
allow(LogStash::Plugin).to receive(:lookup).with("output", "dummyoutput").and_return(::LogStash::Outputs::DummyOutput)
|
762
|
+
|
763
|
+
pipeline_thread
|
764
|
+
Timeout.timeout(timeout) do
|
765
|
+
sleep(0.1) until subject.ready?
|
766
|
+
end
|
767
|
+
|
768
|
+
# make sure we have received all the generated events
|
769
|
+
Stud.try(max_retry.times, [StandardError, RSpec::Expectations::ExpectationNotMetError]) do
|
770
|
+
wait(3).for do
|
771
|
+
# give us a bit of time to flush the events
|
772
|
+
dummyoutput.events.size >= number_of_events
|
773
|
+
end.to be_truthy
|
774
|
+
end
|
775
|
+
end
|
776
|
+
|
777
|
+
after :each do
|
778
|
+
subject.shutdown
|
779
|
+
pipeline_thread.join
|
780
|
+
end
|
781
|
+
|
782
|
+
context "global metric" do
|
783
|
+
let(:collected_metric) { metric_store.get_with_path("stats/events") }
|
784
|
+
|
785
|
+
it "populates the different metrics" do
|
786
|
+
expect(collected_metric[:stats][:events][:duration_in_millis].value).not_to be_nil
|
787
|
+
expect(collected_metric[:stats][:events][:in].value).to eq(number_of_events)
|
788
|
+
expect(collected_metric[:stats][:events][:filtered].value).to eq(number_of_events)
|
789
|
+
expect(collected_metric[:stats][:events][:out].value).to eq(number_of_events)
|
790
|
+
end
|
791
|
+
end
|
792
|
+
|
793
|
+
context "pipelines" do
|
794
|
+
let(:collected_metric) { metric_store.get_with_path("stats/pipelines/") }
|
795
|
+
|
796
|
+
it "populates the pipelines core metrics" do
|
797
|
+
expect(collected_metric[:stats][:pipelines][:main][:events][:duration_in_millis].value).not_to be_nil
|
798
|
+
expect(collected_metric[:stats][:pipelines][:main][:events][:in].value).to eq(number_of_events)
|
799
|
+
expect(collected_metric[:stats][:pipelines][:main][:events][:filtered].value).to eq(number_of_events)
|
800
|
+
expect(collected_metric[:stats][:pipelines][:main][:events][:out].value).to eq(number_of_events)
|
801
|
+
end
|
802
|
+
|
803
|
+
it "populates the filter metrics" do
|
804
|
+
[dummy_id, dummy_id_other].map(&:to_sym).each do |id|
|
805
|
+
[:in, :out].each do |metric_key|
|
806
|
+
plugin_name = id.to_sym
|
807
|
+
expect(collected_metric[:stats][:pipelines][:main][:plugins][:filters][plugin_name][:events][metric_key].value).to eq(number_of_events)
|
808
|
+
end
|
809
|
+
end
|
810
|
+
end
|
811
|
+
|
812
|
+
it "populates the output metrics" do
|
813
|
+
plugin_name = dummy_output_id.to_sym
|
814
|
+
|
815
|
+
expect(collected_metric[:stats][:pipelines][:main][:plugins][:outputs][plugin_name][:events][:in].value).to eq(number_of_events)
|
816
|
+
expect(collected_metric[:stats][:pipelines][:main][:plugins][:outputs][plugin_name][:events][:out].value).to eq(number_of_events)
|
817
|
+
expect(collected_metric[:stats][:pipelines][:main][:plugins][:outputs][plugin_name][:events][:duration_in_millis].value).not_to be_nil
|
818
|
+
end
|
819
|
+
|
820
|
+
it "populates the name of the output plugin" do
|
821
|
+
plugin_name = dummy_output_id.to_sym
|
822
|
+
expect(collected_metric[:stats][:pipelines][:main][:plugins][:outputs][plugin_name][:name].value).to eq(::LogStash::Outputs::DummyOutput.config_name)
|
823
|
+
end
|
824
|
+
|
825
|
+
it "populates the name of the filter plugin" do
|
826
|
+
[dummy_id, dummy_id_other].map(&:to_sym).each do |id|
|
827
|
+
plugin_name = id.to_sym
|
828
|
+
expect(collected_metric[:stats][:pipelines][:main][:plugins][:filters][plugin_name][:name].value).to eq(LogStash::Filters::DummyFilter.config_name)
|
829
|
+
end
|
830
|
+
end
|
831
|
+
|
832
|
+
context 'when dlq is disabled' do
|
833
|
+
let (:collect_stats) { subject.collect_dlq_stats}
|
834
|
+
let (:collected_stats) { collected_metric[:stats][:pipelines][:main][:dlq]}
|
835
|
+
let (:available_stats) {[:path, :queue_size_in_bytes]}
|
836
|
+
|
837
|
+
it 'should show not show any dlq stats' do
|
838
|
+
collect_stats
|
839
|
+
expect(collected_stats).to be_nil
|
840
|
+
end
|
841
|
+
|
842
|
+
end
|
843
|
+
|
844
|
+
context 'when dlq is enabled' do
|
845
|
+
let (:dead_letter_queue_enabled) { true }
|
846
|
+
let (:dead_letter_queue_path) { Stud::Temporary.directory }
|
847
|
+
let (:pipeline_dlq_path) { "#{dead_letter_queue_path}/#{pipeline_id}"}
|
848
|
+
|
849
|
+
let (:collect_stats) { subject.collect_dlq_stats }
|
850
|
+
let (:collected_stats) { collected_metric[:stats][:pipelines][:main][:dlq]}
|
851
|
+
|
852
|
+
it 'should show dlq stats' do
|
853
|
+
collect_stats
|
854
|
+
# A newly created dead letter queue with no entries will have a size of 1 (the version 'header')
|
855
|
+
expect(collected_stats[:queue_size_in_bytes].value).to eq(1)
|
856
|
+
end
|
857
|
+
end
|
858
|
+
end
|
859
|
+
end
|
860
|
+
|
861
|
+
context "Pipeline object" do
|
862
|
+
before do
|
863
|
+
allow(LogStash::Plugin).to receive(:lookup).with("input", "generator").and_return(LogStash::Inputs::Generator)
|
864
|
+
allow(LogStash::Plugin).to receive(:lookup).with("codec", "plain").and_return(DummyCodec)
|
865
|
+
allow(LogStash::Plugin).to receive(:lookup).with("filter", "dummyfilter").and_return(DummyFilter)
|
866
|
+
allow(LogStash::Plugin).to receive(:lookup).with("output", "dummyoutput").and_return(::LogStash::Outputs::DummyOutput)
|
867
|
+
end
|
868
|
+
|
869
|
+
let(:pipeline1) { mock_java_pipeline_from_string("input { generator {} } filter { dummyfilter {} } output { dummyoutput {}}") }
|
870
|
+
let(:pipeline2) { mock_java_pipeline_from_string("input { generator {} } filter { dummyfilter {} } output { dummyoutput {}}") }
|
871
|
+
|
872
|
+
# multiple pipelines cannot be instantiated using the same PQ settings, force memory queue
|
873
|
+
before :each do
|
874
|
+
pipeline_workers_setting = LogStash::SETTINGS.get_setting("queue.type")
|
875
|
+
allow(pipeline_workers_setting).to receive(:value).and_return("memory")
|
876
|
+
pipeline_settings.each {|k, v| pipeline_settings_obj.set(k, v) }
|
877
|
+
end
|
878
|
+
|
879
|
+
it "should not add ivars" do
|
880
|
+
expect(pipeline1.instance_variables).to eq(pipeline2.instance_variables)
|
881
|
+
end
|
882
|
+
end
|
883
|
+
|
884
|
+
context "#system" do
|
885
|
+
after do
|
886
|
+
pipeline.close # close the queue
|
887
|
+
end
|
888
|
+
|
889
|
+
context "when the pipeline is a system pipeline" do
|
890
|
+
let(:pipeline) { mock_java_pipeline_from_string("input { generator {} } output { null {} }", mock_settings("pipeline.system" => true)) }
|
891
|
+
it "returns true" do
|
892
|
+
expect(pipeline.system?).to be_truthy
|
893
|
+
end
|
894
|
+
end
|
895
|
+
|
896
|
+
context "when the pipeline is not a system pipeline" do
|
897
|
+
let(:pipeline) { mock_java_pipeline_from_string("input { generator {} } output { null {} }", mock_settings("pipeline.system" => false)) }
|
898
|
+
it "returns true" do
|
899
|
+
expect(pipeline.system?).to be_falsey
|
900
|
+
end
|
901
|
+
end
|
902
|
+
end
|
903
|
+
|
904
|
+
context "#reloadable?" do
|
905
|
+
after do
|
906
|
+
pipeline.close # close the queue
|
907
|
+
end
|
908
|
+
|
909
|
+
context "when all plugins are reloadable and pipeline is configured as reloadable" do
|
910
|
+
let(:pipeline) { mock_java_pipeline_from_string("input { generator {} } output { null {} }", mock_settings("pipeline.reloadable" => true)) }
|
911
|
+
|
912
|
+
it "returns true" do
|
913
|
+
expect(pipeline.reloadable?).to be_truthy
|
914
|
+
end
|
915
|
+
end
|
916
|
+
|
917
|
+
context "when the plugins are not reloadable and pipeline is configured as reloadable" do
|
918
|
+
let(:pipeline) { mock_java_pipeline_from_string("input { stdin {} } output { null {} }", mock_settings("pipeline.reloadable" => true)) }
|
919
|
+
|
920
|
+
it "returns true" do
|
921
|
+
expect(pipeline.reloadable?).to be_falsey
|
922
|
+
end
|
923
|
+
end
|
924
|
+
|
925
|
+
context "when all plugins are reloadable and pipeline is configured as non-reloadable" do
|
926
|
+
let(:pipeline) { mock_java_pipeline_from_string("input { generator {} } output { null {} }", mock_settings("pipeline.reloadable" => false)) }
|
927
|
+
|
928
|
+
it "returns true" do
|
929
|
+
expect(pipeline.reloadable?).to be_falsey
|
930
|
+
end
|
931
|
+
end
|
932
|
+
end
|
933
|
+
end
|