logstash-core 5.5.3-java → 5.6.0-java
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/logstash-core/logstash-core.jar +0 -0
- data/lib/logstash-core/version.rb +1 -1
- data/lib/logstash/api/commands/node.rb +2 -2
- data/lib/logstash/api/commands/stats.rb +2 -2
- data/lib/logstash/config/config_ast.rb +24 -1
- data/lib/logstash/config/modules_common.rb +47 -15
- data/lib/logstash/config/source/modules.rb +55 -0
- data/lib/logstash/config/string_escape.rb +27 -0
- data/lib/logstash/elasticsearch_client.rb +24 -2
- data/lib/logstash/environment.rb +2 -0
- data/lib/logstash/filter_delegator.rb +9 -6
- data/lib/logstash/instrument/collector.rb +7 -5
- data/lib/logstash/instrument/metric_store.rb +11 -11
- data/lib/logstash/instrument/namespaced_metric.rb +4 -0
- data/lib/logstash/instrument/namespaced_null_metric.rb +4 -0
- data/lib/logstash/instrument/null_metric.rb +10 -0
- data/lib/logstash/instrument/periodic_poller/dlq.rb +19 -0
- data/lib/logstash/instrument/periodic_pollers.rb +3 -1
- data/lib/logstash/instrument/wrapped_write_client.rb +33 -24
- data/lib/logstash/logging/logger.rb +26 -19
- data/lib/logstash/modules/{importer.rb → elasticsearch_importer.rb} +3 -3
- data/lib/logstash/modules/kibana_base.rb +24 -0
- data/lib/logstash/modules/kibana_client.rb +124 -0
- data/lib/logstash/modules/kibana_config.rb +29 -28
- data/lib/logstash/modules/kibana_dashboards.rb +36 -0
- data/lib/logstash/modules/kibana_importer.rb +17 -0
- data/lib/logstash/modules/kibana_settings.rb +40 -0
- data/lib/logstash/modules/logstash_config.rb +89 -17
- data/lib/logstash/modules/resource_base.rb +6 -5
- data/lib/logstash/modules/scaffold.rb +11 -3
- data/lib/logstash/modules/settings_merger.rb +23 -0
- data/lib/logstash/modules/util.rb +17 -0
- data/lib/logstash/output_delegator.rb +7 -5
- data/lib/logstash/pipeline.rb +34 -2
- data/lib/logstash/runner.rb +8 -13
- data/lib/logstash/settings.rb +20 -1
- data/lib/logstash/util/wrapped_acked_queue.rb +5 -24
- data/lib/logstash/util/wrapped_synchronous_queue.rb +14 -24
- data/lib/logstash/version.rb +1 -1
- data/locales/en.yml +11 -4
- data/spec/logstash/agent_spec.rb +19 -6
- data/spec/logstash/api/modules/node_spec.rb +2 -1
- data/spec/logstash/config/config_ast_spec.rb +47 -8
- data/spec/logstash/config/string_escape_spec.rb +24 -0
- data/spec/logstash/event_spec.rb +9 -0
- data/spec/logstash/filter_delegator_spec.rb +21 -7
- data/spec/logstash/instrument/periodic_poller/dlq_spec.rb +17 -0
- data/spec/logstash/instrument/periodic_poller/jvm_spec.rb +1 -1
- data/spec/logstash/legacy_ruby_event_spec.rb +4 -4
- data/spec/logstash/modules/logstash_config_spec.rb +56 -0
- data/spec/logstash/modules/scaffold_spec.rb +234 -0
- data/spec/logstash/output_delegator_spec.rb +15 -5
- data/spec/logstash/pipeline_spec.rb +76 -26
- data/spec/logstash/runner_spec.rb +46 -25
- data/spec/logstash/settings/splittable_string_array_spec.rb +51 -0
- data/spec/logstash/util/wrapped_synchronous_queue_spec.rb +0 -22
- metadata +22 -4
- data/lib/logstash/modules/kibana_base_resource.rb +0 -10
- data/lib/logstash/program.rb +0 -14
@@ -0,0 +1,36 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/namespace"
|
3
|
+
require "logstash/logging"
|
4
|
+
require_relative "kibana_base"
|
5
|
+
|
6
|
+
module LogStash module Modules class KibanaDashboards < KibanaBase
|
7
|
+
include LogStash::Util::Loggable
|
8
|
+
|
9
|
+
attr_reader :import_path, :content
|
10
|
+
|
11
|
+
# content is a list of kibana file resources
|
12
|
+
def initialize(import_path, content)
|
13
|
+
@import_path, @content = import_path, content
|
14
|
+
end
|
15
|
+
|
16
|
+
def import(client)
|
17
|
+
# e.g. curl "http://localhost:5601/api/kibana/dashboards/import"
|
18
|
+
# extract and prepare all objects
|
19
|
+
objects = []
|
20
|
+
content.each do |resource|
|
21
|
+
hash = {
|
22
|
+
"id" => resource.content_id,
|
23
|
+
"type" => resource.content_type,
|
24
|
+
"version" => 1,
|
25
|
+
"attributes" => resource.content_as_object
|
26
|
+
}
|
27
|
+
objects << hash
|
28
|
+
end
|
29
|
+
body = {:version => client.version, :objects => objects}
|
30
|
+
response = client.post(import_path, body)
|
31
|
+
if response.failed?
|
32
|
+
logger.error("Attempted POST failed", :url_path => import_path, :response => response.body)
|
33
|
+
end
|
34
|
+
response
|
35
|
+
end
|
36
|
+
end end end
|
@@ -0,0 +1,17 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/namespace"
|
3
|
+
require "logstash/logging"
|
4
|
+
|
5
|
+
module LogStash module Modules class KibanaImporter
|
6
|
+
include LogStash::Util::Loggable
|
7
|
+
|
8
|
+
def initialize(client)
|
9
|
+
@client = client
|
10
|
+
end
|
11
|
+
|
12
|
+
def put(via_kibana)
|
13
|
+
path = via_kibana.import_path
|
14
|
+
logger.debug("Attempting POST", :url_path => path, :content => via_kibana.content)
|
15
|
+
via_kibana.import(@client)
|
16
|
+
end
|
17
|
+
end end end
|
@@ -0,0 +1,40 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/namespace"
|
3
|
+
require "logstash/logging"
|
4
|
+
require_relative "kibana_base"
|
5
|
+
|
6
|
+
module LogStash module Modules class KibanaSettings < KibanaBase
|
7
|
+
include LogStash::Util::Loggable
|
8
|
+
|
9
|
+
class Setting
|
10
|
+
attr_reader :name, :value
|
11
|
+
def initialize(name, value)
|
12
|
+
@name, @value = name, value
|
13
|
+
end
|
14
|
+
end
|
15
|
+
|
16
|
+
attr_reader :import_path, :content
|
17
|
+
|
18
|
+
# content is an array of Setting required for this module
|
19
|
+
def initialize(import_path, content)
|
20
|
+
@import_path, @content = import_path, content
|
21
|
+
end
|
22
|
+
|
23
|
+
def import(client)
|
24
|
+
# e.g. curl "http://localhost:5601/api/kibana/settings"
|
25
|
+
# 6.0.0-alpha3 -> {"settings":{"buildNum":{"userValue":15613},"defaultIndex":{"userValue":"arcsight-*"}}}
|
26
|
+
# 5.4 -> {"settings":{"defaultIndex":{"userValue":"cef-*"},"metrics:max_buckets":{"userValue":"600000"}}}
|
27
|
+
# array of Setting objects
|
28
|
+
# The POST api body { "changes": { "defaultIndex": "arcsight-*", "metrics:max_buckets": "400" } }
|
29
|
+
settings = {}
|
30
|
+
content.each do |setting|
|
31
|
+
settings[setting.name] = "#{setting.value}"
|
32
|
+
end
|
33
|
+
body = {"changes" => settings}
|
34
|
+
response = client.post(import_path, body)
|
35
|
+
if response.failed?
|
36
|
+
logger.error("Attempted POST failed", :url_path => import_path, :response => response.body)
|
37
|
+
end
|
38
|
+
response
|
39
|
+
end
|
40
|
+
end end end
|
@@ -1,9 +1,9 @@
|
|
1
1
|
# encoding: utf-8
|
2
2
|
require "logstash/namespace"
|
3
3
|
require_relative "file_reader"
|
4
|
+
require "logstash/settings"
|
4
5
|
|
5
6
|
module LogStash module Modules class LogStashConfig
|
6
|
-
|
7
7
|
# We name it `modul` here because `module` has meaning in Ruby.
|
8
8
|
def initialize(modul, settings)
|
9
9
|
@directory = ::File.join(modul.directory, "logstash")
|
@@ -15,27 +15,72 @@ module LogStash module Modules class LogStashConfig
|
|
15
15
|
::File.join(@directory, "#{@name}.conf.erb")
|
16
16
|
end
|
17
17
|
|
18
|
-
def
|
19
|
-
|
18
|
+
def configured_inputs(default = [], aliases = {})
|
19
|
+
name = "var.inputs"
|
20
|
+
values = get_setting(LogStash::Setting::SplittableStringArray.new(name, String, default))
|
21
|
+
|
22
|
+
aliases.each { |k,v| values << v if values.include?(k) }
|
23
|
+
aliases.invert.each { |k,v| values << v if values.include?(k) }
|
24
|
+
values.flatten.uniq
|
25
|
+
end
|
26
|
+
|
27
|
+
def alias_settings_keys!(aliases)
|
28
|
+
aliased_settings = alias_matching_keys(aliases, @settings)
|
29
|
+
@settings = alias_matching_keys(aliases.invert, aliased_settings)
|
30
|
+
end
|
31
|
+
|
32
|
+
def array_to_string(array)
|
33
|
+
"[#{array.collect { |i| "'#{i}'" }.join(", ")}]"
|
34
|
+
end
|
35
|
+
|
36
|
+
def csv_string(array)
|
37
|
+
"'#{array.join(',')}'"
|
38
|
+
end
|
39
|
+
|
40
|
+
def get_setting(setting_class)
|
41
|
+
raw_value = @settings[setting_class.name]
|
42
|
+
# If we dont check for NIL, the Settings class will try to coerce the value
|
43
|
+
# and most of the it will fails when a NIL value is explicitely set.
|
44
|
+
# This will be fixed once we wrap the plugins settings into a Settings class
|
45
|
+
setting_class.set(raw_value) unless raw_value.nil?
|
46
|
+
setting_class.value
|
47
|
+
end
|
48
|
+
|
49
|
+
def setting(name, default)
|
50
|
+
# by default we use the more permissive setting which is a `NullableString`
|
51
|
+
# This is fine because the end format of the logstash configuration is a string representation
|
52
|
+
# of the pipeline. There is a good reason why I think we should use the settings classes, we
|
53
|
+
# can `preprocess` a template and generate a configuration from the defined settings
|
54
|
+
# validate the values and replace them in the template.
|
55
|
+
case default
|
56
|
+
when String
|
57
|
+
get_setting(LogStash::Setting::NullableString.new(name, default.to_s))
|
58
|
+
when Numeric
|
59
|
+
get_setting(LogStash::Setting::Numeric.new(name, default))
|
60
|
+
else
|
61
|
+
get_setting(LogStash::Setting::NullableString.new(name, default.to_s))
|
62
|
+
end
|
20
63
|
end
|
21
64
|
|
22
65
|
def elasticsearch_output_config(type_string = nil)
|
23
|
-
hosts =
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
user
|
29
|
-
|
66
|
+
hosts = array_to_string(get_setting(LogStash::Setting::SplittableStringArray.new("var.elasticsearch.hosts", String, ["localhost:9200"])))
|
67
|
+
index = "#{@name}-#{setting("var.elasticsearch.index_suffix", "%{+YYYY.MM.dd}")}"
|
68
|
+
user = @settings["var.elasticsearch.username"]
|
69
|
+
password = @settings["var.elasticsearch.password"]
|
70
|
+
lines = ["hosts => #{hosts}", "index => \"#{index}\""]
|
71
|
+
lines.push(user ? "user => \"#{user}\"" : nil)
|
72
|
+
lines.push(password ? "password => \"#{password}\"" : nil)
|
73
|
+
lines.push(type_string ? "document_type => #{type_string}" : nil)
|
74
|
+
lines.push("ssl => #{@settings.fetch('var.elasticsearch.ssl.enabled', false)}")
|
75
|
+
if cacert = @settings["var.elasticsearch.ssl.certificate_authority"]
|
76
|
+
lines.push("cacert => \"#{cacert}\"") if cacert
|
77
|
+
end
|
78
|
+
# NOTE: the first line should be indented in the conf.erb
|
30
79
|
<<-CONF
|
31
80
|
elasticsearch {
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
user => "#{user}"
|
36
|
-
manage_template => false
|
37
|
-
#{document_type_line}
|
38
|
-
}
|
81
|
+
#{lines.compact.join("\n ")}
|
82
|
+
manage_template => false
|
83
|
+
}
|
39
84
|
CONF
|
40
85
|
end
|
41
86
|
|
@@ -45,4 +90,31 @@ CONF
|
|
45
90
|
renderer = ERB.new(FileReader.read(template))
|
46
91
|
renderer.result(binding)
|
47
92
|
end
|
93
|
+
|
94
|
+
private
|
95
|
+
# For a first version we are copying the values of the original hash,
|
96
|
+
# this might become problematic if we users changes the values of the
|
97
|
+
# settings in the template, which could result in an inconsistent view of the original data
|
98
|
+
#
|
99
|
+
# For v1 of the feature I think its an OK compromise, v2 we have a more advanced hash that
|
100
|
+
# support alias.
|
101
|
+
def alias_matching_keys(aliases, target)
|
102
|
+
aliased_target = target.dup
|
103
|
+
|
104
|
+
aliases.each do |matching_key_prefix, new_key_prefix|
|
105
|
+
target.each do |k, v|
|
106
|
+
re = /^#{matching_key_prefix}\./
|
107
|
+
|
108
|
+
if k =~ re
|
109
|
+
alias_key = k.gsub(re, "#{new_key_prefix}.")
|
110
|
+
|
111
|
+
# If the user setup the same values twices with different values lets just halt.
|
112
|
+
raise "Cannot create an alias, the destination key has already a value set: original key: #{k}, alias key: #{alias_key}" if (!aliased_target[alias_key].nil? && aliased_target[alias_key] != v)
|
113
|
+
aliased_target[alias_key] = v unless v.nil?
|
114
|
+
end
|
115
|
+
end
|
116
|
+
end
|
117
|
+
|
118
|
+
aliased_target
|
119
|
+
end
|
48
120
|
end end end
|
@@ -1,5 +1,6 @@
|
|
1
1
|
# encoding: utf-8
|
2
2
|
require "logstash/namespace"
|
3
|
+
require "logstash/json"
|
3
4
|
require_relative "file_reader"
|
4
5
|
|
5
6
|
module LogStash module Modules module ResourceBase
|
@@ -8,7 +9,11 @@ module LogStash module Modules module ResourceBase
|
|
8
9
|
def initialize(base, content_type, content_path, content = nil, content_id = nil)
|
9
10
|
@base, @content_type, @content_path = base, content_type, content_path
|
10
11
|
@content_id = content_id || ::File.basename(@content_path, ".*")
|
12
|
+
# content at this time will be a JSON string
|
11
13
|
@content = content
|
14
|
+
if !@content.nil?
|
15
|
+
@content_as_object = LogStash::Json.load(@content) rescue {}
|
16
|
+
end
|
12
17
|
end
|
13
18
|
|
14
19
|
def content
|
@@ -19,12 +24,8 @@ module LogStash module Modules module ResourceBase
|
|
19
24
|
"#{base}, #{content_type}, #{content_path}, #{content_id}"
|
20
25
|
end
|
21
26
|
|
22
|
-
def contains?(text)
|
23
|
-
content.include?(text)
|
24
|
-
end
|
25
|
-
|
26
27
|
def content_as_object
|
27
|
-
|
28
|
+
@content_as_object ||= FileReader.read_json(@content_path) rescue nil
|
28
29
|
end
|
29
30
|
|
30
31
|
def <=>(other)
|
@@ -1,6 +1,7 @@
|
|
1
1
|
# encoding: utf-8
|
2
2
|
require "logstash/namespace"
|
3
3
|
require "logstash/logging"
|
4
|
+
require "logstash/util/loggable"
|
4
5
|
require "erb"
|
5
6
|
|
6
7
|
require_relative "elasticsearch_config"
|
@@ -10,19 +11,26 @@ require_relative "logstash_config"
|
|
10
11
|
module LogStash module Modules class Scaffold
|
11
12
|
include LogStash::Util::Loggable
|
12
13
|
|
13
|
-
attr_reader :directory, :module_name, :
|
14
|
+
attr_reader :directory, :module_name, :kibana_version_parts
|
15
|
+
attr_reader :kibana_configuration, :logstash_configuration, :elasticsearch_configuration
|
14
16
|
|
15
17
|
def initialize(name, directory)
|
16
18
|
@module_name = name
|
17
19
|
@directory = directory # this is the 'configuration folder in the GEM root.'
|
20
|
+
@kibana_version_parts = "5.6.0".split('.') # this is backup in case kibana client fails to connect
|
21
|
+
logger.info("Initializing module", :module_name => name, :directory => directory)
|
18
22
|
end
|
19
23
|
|
20
|
-
def
|
24
|
+
def add_kibana_version(version_parts)
|
25
|
+
@kibana_version_parts = version_parts
|
26
|
+
end
|
27
|
+
|
28
|
+
def import(import_engine, kibana_import_engine)
|
21
29
|
@elasticsearch_configuration.resources.each do |resource|
|
22
30
|
import_engine.put(resource)
|
23
31
|
end
|
24
32
|
@kibana_configuration.resources.each do |resource|
|
25
|
-
|
33
|
+
kibana_import_engine.put(resource)
|
26
34
|
end
|
27
35
|
end
|
28
36
|
|
@@ -0,0 +1,23 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/namespace"
|
3
|
+
|
4
|
+
module LogStash module Modules class SettingsMerger
|
5
|
+
def self.merge(cli_settings, yml_settings)
|
6
|
+
# both args are arrays of hashes, e.g.
|
7
|
+
# [{"name"=>"mod1", "var.input.tcp.port"=>"3333"}, {"name"=>"mod2"}]
|
8
|
+
# [{"name"=>"mod1", "var.input.tcp.port"=>2222, "var.kibana.username"=>"rupert", "var.kibana.password"=>"fotherington"}, {"name"=>"mod3", "var.input.tcp.port"=>4445}]
|
9
|
+
merged = []
|
10
|
+
# union and group_by preserves order
|
11
|
+
# union will also coalesce identical hashes
|
12
|
+
union_of_settings = (cli_settings | yml_settings)
|
13
|
+
grouped_by_name = union_of_settings.group_by{|e| e["name"]}
|
14
|
+
grouped_by_name.each do |name, array|
|
15
|
+
if array.size == 2
|
16
|
+
merged << array.first.merge(array.last)
|
17
|
+
else
|
18
|
+
merged.concat(array)
|
19
|
+
end
|
20
|
+
end
|
21
|
+
merged
|
22
|
+
end
|
23
|
+
end end end
|
@@ -0,0 +1,17 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require_relative "scaffold"
|
3
|
+
|
4
|
+
# This module function should be used when gems or
|
5
|
+
# x-pack defines modules in their folder structures.
|
6
|
+
module LogStash module Modules module Util
|
7
|
+
def self.register_local_modules(path)
|
8
|
+
modules_path = ::File.join(path, "modules")
|
9
|
+
::Dir.foreach(modules_path) do |item|
|
10
|
+
# Ignore unix relative path ids
|
11
|
+
next if item == '.' or item == '..'
|
12
|
+
# Ignore non-directories
|
13
|
+
next if !::File.directory?(::File.join(modules_path, ::File::Separator, item))
|
14
|
+
LogStash::PLUGIN_REGISTRY.add(:modules, item, Scaffold.new(item, ::File.join(modules_path, item, "configuration")))
|
15
|
+
end
|
16
|
+
end
|
17
|
+
end end end
|
@@ -19,7 +19,9 @@ module LogStash class OutputDelegator
|
|
19
19
|
@namespaced_metric = metric.namespace(id.to_sym)
|
20
20
|
@namespaced_metric.gauge(:name, config_name)
|
21
21
|
@metric_events = @namespaced_metric.namespace(:events)
|
22
|
-
|
22
|
+
@in_counter = @metric_events.counter(:in)
|
23
|
+
@out_counter = @metric_events.counter(:out)
|
24
|
+
@time_metric = @metric_events.counter(:duration_in_millis)
|
23
25
|
@strategy = strategy_registry.
|
24
26
|
class_for(self.concurrency).
|
25
27
|
new(@logger, @output_class, @namespaced_metric, execution_context, plugin_args)
|
@@ -42,11 +44,11 @@ module LogStash class OutputDelegator
|
|
42
44
|
end
|
43
45
|
|
44
46
|
def multi_receive(events)
|
45
|
-
@
|
46
|
-
|
47
|
+
@in_counter.increment(events.length)
|
48
|
+
start_time = java.lang.System.current_time_millis
|
47
49
|
@strategy.multi_receive(events)
|
48
|
-
|
49
|
-
@
|
50
|
+
@time_metric.increment(java.lang.System.current_time_millis - start_time)
|
51
|
+
@out_counter.increment(events.length)
|
50
52
|
end
|
51
53
|
|
52
54
|
def do_close
|
data/lib/logstash/pipeline.rb
CHANGED
@@ -34,6 +34,7 @@ module LogStash; class BasePipeline
|
|
34
34
|
|
35
35
|
def initialize(config_str, settings = SETTINGS)
|
36
36
|
@logger = self.logger
|
37
|
+
@mutex = Mutex.new
|
37
38
|
@config_str = config_str
|
38
39
|
@config_hash = Digest::SHA1.hexdigest(@config_str)
|
39
40
|
# Every time #plugin is invoked this is incremented to give each plugin
|
@@ -58,6 +59,8 @@ module LogStash; class BasePipeline
|
|
58
59
|
parsed_config = grammar.parse(config_str)
|
59
60
|
raise(ConfigurationError, grammar.failure_reason) if parsed_config.nil?
|
60
61
|
|
62
|
+
parsed_config.process_escape_sequences = settings.get_value("config.support_escapes")
|
63
|
+
|
61
64
|
config_code = parsed_config.compile
|
62
65
|
|
63
66
|
# config_code = BasePipeline.compileConfig(config_str)
|
@@ -122,6 +125,12 @@ module LogStash; class BasePipeline
|
|
122
125
|
def non_reloadable_plugins
|
123
126
|
(inputs + filters + outputs).select { |plugin| !plugin.reloadable? }
|
124
127
|
end
|
128
|
+
|
129
|
+
private
|
130
|
+
|
131
|
+
def default_logging_keys(other_keys = {})
|
132
|
+
{ :pipeline_id => pipeline_id }.merge(other_keys)
|
133
|
+
end
|
125
134
|
end; end
|
126
135
|
|
127
136
|
module LogStash; class Pipeline < BasePipeline
|
@@ -313,6 +322,9 @@ module LogStash; class Pipeline < BasePipeline
|
|
313
322
|
config_metric.gauge(:batch_delay, batch_delay)
|
314
323
|
config_metric.gauge(:config_reload_automatic, @settings.get("config.reload.automatic"))
|
315
324
|
config_metric.gauge(:config_reload_interval, @settings.get("config.reload.interval"))
|
325
|
+
config_metric.gauge(:dead_letter_queue_enabled, dlq_enabled?)
|
326
|
+
config_metric.gauge(:dead_letter_queue_path, @dlq_writer.get_path.to_absolute_path.to_s) if dlq_enabled?
|
327
|
+
|
316
328
|
|
317
329
|
@logger.info("Starting pipeline",
|
318
330
|
"id" => self.pipeline_id,
|
@@ -347,6 +359,10 @@ module LogStash; class Pipeline < BasePipeline
|
|
347
359
|
end
|
348
360
|
end
|
349
361
|
|
362
|
+
def dlq_enabled?
|
363
|
+
@settings.get("dead_letter_queue.enable")
|
364
|
+
end
|
365
|
+
|
350
366
|
# Main body of what a worker thread does
|
351
367
|
# Repeatedly takes batches off the queue, filters, then outputs them
|
352
368
|
def worker_loop(batch_size, batch_delay)
|
@@ -419,7 +435,7 @@ module LogStash; class Pipeline < BasePipeline
|
|
419
435
|
output_events_map.each do |output, events|
|
420
436
|
output.multi_receive(events)
|
421
437
|
end
|
422
|
-
|
438
|
+
|
423
439
|
@filter_queue_client.add_output_metrics(batch)
|
424
440
|
end
|
425
441
|
|
@@ -599,6 +615,13 @@ module LogStash; class Pipeline < BasePipeline
|
|
599
615
|
.each {|t| t.delete("status") }
|
600
616
|
end
|
601
617
|
|
618
|
+
def collect_dlq_stats
|
619
|
+
if dlq_enabled?
|
620
|
+
dlq_metric = @metric.namespace([:stats, :pipelines, pipeline_id.to_s.to_sym, :dlq])
|
621
|
+
dlq_metric.gauge(:queue_size_in_bytes, @dlq_writer.get_current_queue_size)
|
622
|
+
end
|
623
|
+
end
|
624
|
+
|
602
625
|
def collect_stats
|
603
626
|
pipeline_metric = @metric.namespace([:stats, :pipelines, pipeline_id.to_s.to_sym, :queue])
|
604
627
|
pipeline_metric.gauge(:type, settings.get("queue.type"))
|
@@ -639,11 +662,20 @@ module LogStash; class Pipeline < BasePipeline
|
|
639
662
|
|
640
663
|
private
|
641
664
|
|
665
|
+
def default_logging_keys(other_keys = {})
|
666
|
+
keys = super
|
667
|
+
keys[:thread] = thread.inspect if thread
|
668
|
+
keys
|
669
|
+
end
|
670
|
+
|
642
671
|
def draining_queue?
|
643
672
|
@drain_queue ? !@filter_queue_client.empty? : false
|
644
673
|
end
|
645
674
|
|
646
675
|
def wrapped_write_client(plugin)
|
647
|
-
|
676
|
+
#need to ensure that metrics are initialized one plugin at a time, else a race condition can exist.
|
677
|
+
@mutex.synchronize do
|
678
|
+
LogStash::Instrument::WrappedWriteClient.new(@input_queue_client, self, metric, plugin)
|
679
|
+
end
|
648
680
|
end
|
649
681
|
end; end
|