logstash-core 6.2.3-java → 6.2.4-java
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/logstash/compiler/lscl.rb +1 -1
- data/lib/logstash/pipeline.rb +5 -10
- data/spec/logstash/pipeline_spec.rb +10 -1
- data/versions-gem-copy.yml +2 -2
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: f0c1ebf3c38d227994519be789ac1c8cdc2e37668f80ee8ce8058007fb3b17aa
|
4
|
+
data.tar.gz: 6b4aeacf0b5d7753a8e7365e46f9c1d0580d4a2b516e07342a74202fecab1168
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: d1a7c97f81eacf05460f2213748f7a764b74d4e9d5891e373066792cffe98a1e3a366ec1a07d259daf6a9c84fbe646df45a0b4f619e35108db0e6cd422c75972
|
7
|
+
data.tar.gz: 72636dc03260c5b700adc426c1494946fcff6ba0bc3f60ff49e253d83f786675f0dbe311c9fdc09c61a18ec67eb115c2fdfe7b282c84d3cd29b0448880a41f12
|
@@ -181,7 +181,7 @@ module LogStashCompilerLSCLGrammar; module LogStash; module Compiler; module LSC
|
|
181
181
|
duplicate_values = find_duplicate_keys
|
182
182
|
|
183
183
|
if duplicate_values.size > 0
|
184
|
-
raise ConfigurationError.new(
|
184
|
+
raise ::LogStash::ConfigurationError.new(
|
185
185
|
I18n.t("logstash.runner.configuration.invalid_plugin_settings_duplicate_keys",
|
186
186
|
:keys => duplicate_values.join(', '),
|
187
187
|
:line => input.line_of(interval.first),
|
data/lib/logstash/pipeline.rb
CHANGED
@@ -196,6 +196,7 @@ module LogStash; class Pipeline < BasePipeline
|
|
196
196
|
@running = Concurrent::AtomicBoolean.new(false)
|
197
197
|
@flushing = Concurrent::AtomicReference.new(false)
|
198
198
|
@outputs_registered = Concurrent::AtomicBoolean.new(false)
|
199
|
+
@worker_shutdown = java.util.concurrent.atomic.AtomicBoolean.new(false)
|
199
200
|
end # def initialize
|
200
201
|
|
201
202
|
def ready?
|
@@ -257,7 +258,7 @@ module LogStash; class Pipeline < BasePipeline
|
|
257
258
|
status = wait_until_started
|
258
259
|
|
259
260
|
if status
|
260
|
-
@logger.info("Pipeline started
|
261
|
+
@logger.info("Pipeline started successfully", default_logging_keys)
|
261
262
|
end
|
262
263
|
|
263
264
|
status
|
@@ -411,13 +412,10 @@ module LogStash; class Pipeline < BasePipeline
|
|
411
412
|
# Main body of what a worker thread does
|
412
413
|
# Repeatedly takes batches off the queue, filters, then outputs them
|
413
414
|
def worker_loop(batch_size, batch_delay)
|
414
|
-
shutdown_requested = false
|
415
|
-
|
416
415
|
@filter_queue_client.set_batch_dimensions(batch_size, batch_delay)
|
417
416
|
output_events_map = Hash.new { |h, k| h[k] = [] }
|
418
417
|
while true
|
419
418
|
signal = @signal_queue.poll || NO_SIGNAL
|
420
|
-
shutdown_requested |= signal.shutdown? # latch on shutdown signal
|
421
419
|
|
422
420
|
batch = @filter_queue_client.read_batch # metrics are started in read_batch
|
423
421
|
batch_size = batch.size
|
@@ -431,7 +429,7 @@ module LogStash; class Pipeline < BasePipeline
|
|
431
429
|
@filter_queue_client.close_batch(batch)
|
432
430
|
end
|
433
431
|
# keep break at end of loop, after the read_batch operation, some pipeline specs rely on this "final read_batch" before shutdown.
|
434
|
-
break if (
|
432
|
+
break if (@worker_shutdown.get && !draining_queue?)
|
435
433
|
end
|
436
434
|
|
437
435
|
# we are shutting down, queue is drained if it was required, now perform a final flush.
|
@@ -576,11 +574,8 @@ module LogStash; class Pipeline < BasePipeline
|
|
576
574
|
# tell the worker threads to stop and then block until they've fully stopped
|
577
575
|
# This also stops all filter and output plugins
|
578
576
|
def shutdown_workers
|
579
|
-
|
580
|
-
@
|
581
|
-
@logger.debug("Pushing shutdown", default_logging_keys(:thread => t.inspect))
|
582
|
-
@signal_queue.put(SHUTDOWN)
|
583
|
-
end
|
577
|
+
@logger.debug("Setting shutdown", default_logging_keys)
|
578
|
+
@worker_shutdown.set(true)
|
584
579
|
|
585
580
|
@worker_threads.each do |t|
|
586
581
|
@logger.debug("Shutdown waiting for worker thread" , default_logging_keys(:thread => t.inspect))
|
@@ -22,6 +22,15 @@ class DummyInput < LogStash::Inputs::Base
|
|
22
22
|
end
|
23
23
|
end
|
24
24
|
|
25
|
+
# This input runs long enough that a flush should occur
|
26
|
+
class DummyFlushEnablingInput < DummyInput
|
27
|
+
def run(queue)
|
28
|
+
while !stop?
|
29
|
+
sleep 1
|
30
|
+
end
|
31
|
+
end
|
32
|
+
end
|
33
|
+
|
25
34
|
class DummyInputGenerator < LogStash::Inputs::Base
|
26
35
|
config_name "dummyinputgenerator"
|
27
36
|
milestone 2
|
@@ -650,7 +659,7 @@ describe LogStash::Pipeline do
|
|
650
659
|
|
651
660
|
before do
|
652
661
|
allow(::LogStash::Outputs::DummyOutput).to receive(:new).with(any_args).and_return(output)
|
653
|
-
allow(LogStash::Plugin).to receive(:lookup).with("input", "dummy_input").and_return(
|
662
|
+
allow(LogStash::Plugin).to receive(:lookup).with("input", "dummy_input").and_return(DummyFlushEnablingInput)
|
654
663
|
allow(LogStash::Plugin).to receive(:lookup).with("filter", "dummy_flushing_filter").and_return(DummyFlushingFilterPeriodic)
|
655
664
|
allow(LogStash::Plugin).to receive(:lookup).with("output", "dummy_output").and_return(::LogStash::Outputs::DummyOutput)
|
656
665
|
allow(LogStash::Plugin).to receive(:lookup).with("codec", "plain").and_return(LogStash::Codecs::Plain)
|
data/versions-gem-copy.yml
CHANGED
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-core
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 6.2.
|
4
|
+
version: 6.2.4
|
5
5
|
platform: java
|
6
6
|
authors:
|
7
7
|
- Elastic
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2018-
|
11
|
+
date: 2018-04-12 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|