logstash-core 5.4.3-java → 5.5.0-java
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/logstash-core/logstash-core.jar +0 -0
- data/lib/logstash-core/version.rb +1 -1
- data/lib/logstash/api/commands/hot_threads_reporter.rb +2 -2
- data/lib/logstash/api/commands/node.rb +0 -1
- data/lib/logstash/api/commands/stats.rb +0 -1
- data/lib/logstash/config/mixin.rb +5 -43
- data/lib/logstash/config/modules_common.rb +71 -0
- data/lib/logstash/elasticsearch_client.rb +120 -0
- data/lib/logstash/environment.rb +14 -3
- data/lib/logstash/errors.rb +1 -0
- data/lib/logstash/execution_context.rb +11 -3
- data/lib/logstash/inputs/base.rb +2 -0
- data/lib/logstash/instrument/global_metrics.rb +13 -0
- data/lib/logstash/instrument/metric_type/mean.rb +5 -0
- data/lib/logstash/instrument/periodic_poller/jvm.rb +5 -5
- data/lib/logstash/logging/logger.rb +26 -1
- data/lib/logstash/modules/cli_parser.rb +74 -0
- data/lib/logstash/modules/elasticsearch_config.rb +22 -0
- data/lib/logstash/modules/elasticsearch_resource.rb +10 -0
- data/lib/logstash/modules/file_reader.rb +36 -0
- data/lib/logstash/modules/importer.rb +37 -0
- data/lib/logstash/modules/kibana_base_resource.rb +10 -0
- data/lib/logstash/modules/kibana_config.rb +104 -0
- data/lib/logstash/modules/kibana_resource.rb +10 -0
- data/lib/logstash/modules/logstash_config.rb +48 -0
- data/lib/logstash/modules/resource_base.rb +37 -0
- data/lib/logstash/modules/scaffold.rb +44 -0
- data/lib/logstash/namespace.rb +1 -0
- data/lib/logstash/outputs/base.rb +2 -0
- data/lib/logstash/pipeline.rb +18 -4
- data/lib/logstash/plugin.rb +1 -0
- data/lib/logstash/plugins/registry.rb +5 -0
- data/lib/logstash/runner.rb +42 -2
- data/lib/logstash/settings.rb +7 -1
- data/lib/logstash/timestamp.rb +4 -0
- data/lib/logstash/util/dead_letter_queue_manager.rb +61 -0
- data/lib/logstash/util/safe_uri.rb +130 -11
- data/lib/logstash/util/thread_dump.rb +3 -1
- data/lib/logstash/util/wrapped_acked_queue.rb +24 -6
- data/lib/logstash/util/wrapped_synchronous_queue.rb +19 -5
- data/lib/logstash/version.rb +1 -1
- data/locales/en.yml +46 -0
- data/logstash-core.gemspec +7 -2
- data/spec/{api/lib/commands/stats.rb → logstash/api/commands/stats_spec.rb} +7 -2
- data/spec/{api/lib → logstash/api}/errors_spec.rb +1 -1
- data/spec/{api/lib/api → logstash/api/modules}/logging_spec.rb +1 -10
- data/spec/{api/lib/api → logstash/api/modules}/node_plugins_spec.rb +2 -3
- data/spec/{api/lib/api → logstash/api/modules}/node_spec.rb +6 -7
- data/spec/{api/lib/api → logstash/api/modules}/node_stats_spec.rb +2 -2
- data/spec/{api/lib/api → logstash/api/modules}/plugins_spec.rb +4 -3
- data/spec/{api/lib/api → logstash/api/modules}/root_spec.rb +3 -3
- data/spec/{api/lib → logstash/api}/rack_app_spec.rb +0 -0
- data/spec/logstash/config/mixin_spec.rb +2 -2
- data/spec/logstash/execution_context_spec.rb +20 -1
- data/spec/logstash/filter_delegator_spec.rb +2 -1
- data/spec/logstash/inputs/base_spec.rb +1 -1
- data/spec/logstash/output_delegator_spec.rb +2 -1
- data/spec/logstash/outputs/base_spec.rb +1 -1
- data/spec/logstash/pipeline_dlq_commit_spec.rb +107 -0
- data/spec/logstash/pipeline_pq_file_spec.rb +1 -1
- data/spec/logstash/plugin_spec.rb +1 -1
- data/spec/logstash/plugins/registry_spec.rb +22 -5
- data/spec/logstash/runner_spec.rb +122 -19
- data/spec/logstash/settings_spec.rb +91 -0
- data/spec/logstash/timestamp_spec.rb +6 -0
- data/spec/support/helpers.rb +80 -1
- data/spec/support/matchers.rb +13 -0
- data/spec/support/shared_contexts.rb +38 -0
- data/spec/support/shared_examples.rb +1 -1
- metadata +95 -40
- data/spec/api/lib/api/support/resource_dsl_methods.rb +0 -87
- data/spec/api/spec_helper.rb +0 -111
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 4b98be9ab7006918b14eec0ad319a2faa11e2461
|
4
|
+
data.tar.gz: d94019aa11fa085d61ed700b8eab297975449648
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 3fe7898ed25893c3b47c06762dbf411613a29a07a00d15eddc496f25f6be3d7fe017a60d8e1e0016814be2755a6e3a81cdbc71958c6c56ec5c149cbce5d1be59
|
7
|
+
data.tar.gz: 4b06ab8cfbc1bdf914977a57a9dbf99f429c1b6df6da703fd6bb9a2a51b5fb7f1fb7343c364c66577c34a234769aaad5f2ab2c92d362b1ce25b1fc2ce5f7a3a0
|
Binary file
|
@@ -1,4 +1,5 @@
|
|
1
1
|
# encoding: utf-8
|
2
|
+
java_import 'org.logstash.instrument.reports.ThreadsReport'
|
2
3
|
|
3
4
|
class HotThreadsReport
|
4
5
|
STRING_SEPARATOR_LENGTH = 80.freeze
|
@@ -7,8 +8,7 @@ class HotThreadsReport
|
|
7
8
|
def initialize(cmd, options)
|
8
9
|
@cmd = cmd
|
9
10
|
filter = { :stacktrace_size => options.fetch(:stacktrace_size, HOT_THREADS_STACK_TRACES_SIZE_DEFAULT) }
|
10
|
-
|
11
|
-
@thread_dump = ::LogStash::Util::ThreadDump.new(options.merge(:dump => jr_dump))
|
11
|
+
@thread_dump = ::LogStash::Util::ThreadDump.new(options.merge(:dump => ThreadsReport.generate(filter)))
|
12
12
|
end
|
13
13
|
|
14
14
|
def to_s
|
@@ -40,7 +40,6 @@ module LogStash
|
|
40
40
|
{
|
41
41
|
:pid => ManagementFactory.getRuntimeMXBean().getName().split("@").first.to_i,
|
42
42
|
:version => java.lang.System.getProperty("java.version"),
|
43
|
-
:vm_name => java.lang.System.getProperty("java.vm.name"),
|
44
43
|
:vm_version => java.lang.System.getProperty("java.version"),
|
45
44
|
:vm_vendor => java.lang.System.getProperty("java.vendor"),
|
46
45
|
:vm_name => java.lang.System.getProperty("java.vm.name"),
|
@@ -54,7 +54,6 @@ module LogStash
|
|
54
54
|
def memory
|
55
55
|
memory = service.get_shallow(:jvm, :memory)
|
56
56
|
{
|
57
|
-
:heap_used_in_bytes => memory[:heap][:used_in_bytes],
|
58
57
|
:heap_used_percent => memory[:heap][:used_percent],
|
59
58
|
:heap_committed_in_bytes => memory[:heap][:committed_in_bytes],
|
60
59
|
:heap_max_in_bytes => memory[:heap][:max_in_bytes],
|
@@ -6,7 +6,6 @@ require "logstash/util/password"
|
|
6
6
|
require "logstash/util/safe_uri"
|
7
7
|
require "logstash/version"
|
8
8
|
require "logstash/environment"
|
9
|
-
require "logstash/util/environment_variables"
|
10
9
|
require "logstash/util/plugin_version"
|
11
10
|
require "filesize"
|
12
11
|
|
@@ -34,37 +33,21 @@ LogStash::Environment.load_locale!
|
|
34
33
|
# }
|
35
34
|
#
|
36
35
|
module LogStash::Config::Mixin
|
36
|
+
|
37
|
+
include LogStash::Util::EnvironmentVariables
|
38
|
+
|
37
39
|
attr_accessor :config
|
38
40
|
attr_accessor :original_params
|
39
41
|
|
40
42
|
PLUGIN_VERSION_1_0_0 = LogStash::Util::PluginVersion.new(1, 0, 0)
|
41
43
|
PLUGIN_VERSION_0_9_0 = LogStash::Util::PluginVersion.new(0, 9, 0)
|
42
|
-
|
43
|
-
ENV_PLACEHOLDER_REGEX = /\$\{(?<name>\w+)(\:(?<default>[^}]*))?\}/
|
44
|
-
|
44
|
+
|
45
45
|
# This method is called when someone does 'include LogStash::Config'
|
46
46
|
def self.included(base)
|
47
47
|
# Add the DSL methods to the 'base' given.
|
48
48
|
base.extend(LogStash::Config::Mixin::DSL)
|
49
49
|
end
|
50
|
-
|
51
|
-
# Recursive method to replace environment variable references in parameters
|
52
|
-
def deep_replace(value)
|
53
|
-
if (value.is_a?(Hash))
|
54
|
-
value.each do |valueHashKey, valueHashValue|
|
55
|
-
value[valueHashKey.to_s] = deep_replace(valueHashValue)
|
56
|
-
end
|
57
|
-
else
|
58
|
-
if (value.is_a?(Array))
|
59
|
-
value.each_index do | valueArrayIndex|
|
60
|
-
value[valueArrayIndex] = deep_replace(value[valueArrayIndex])
|
61
|
-
end
|
62
|
-
else
|
63
|
-
return replace_env_placeholders(value)
|
64
|
-
end
|
65
|
-
end
|
66
|
-
end
|
67
|
-
|
50
|
+
|
68
51
|
def config_init(params)
|
69
52
|
# Validation will modify the values inside params if necessary.
|
70
53
|
# For example: converting a string to a number, etc.
|
@@ -159,27 +142,6 @@ module LogStash::Config::Mixin
|
|
159
142
|
@config = params
|
160
143
|
end # def config_init
|
161
144
|
|
162
|
-
# Replace all environment variable references in 'value' param by environment variable value and return updated value
|
163
|
-
# Process following patterns : $VAR, ${VAR}, ${VAR:defaultValue}
|
164
|
-
def replace_env_placeholders(value)
|
165
|
-
return value unless value.is_a?(String)
|
166
|
-
|
167
|
-
value.gsub(ENV_PLACEHOLDER_REGEX) do |placeholder|
|
168
|
-
# Note: Ruby docs claim[1] Regexp.last_match is thread-local and scoped to
|
169
|
-
# the call, so this should be thread-safe.
|
170
|
-
#
|
171
|
-
# [1] http://ruby-doc.org/core-2.1.1/Regexp.html#method-c-last_match
|
172
|
-
name = Regexp.last_match(:name)
|
173
|
-
default = Regexp.last_match(:default)
|
174
|
-
|
175
|
-
replacement = ENV.fetch(name, default)
|
176
|
-
if replacement.nil?
|
177
|
-
raise LogStash::ConfigurationError, "Cannot evaluate `#{placeholder}`. Environment variable `#{name}` is not set and there is no default value given."
|
178
|
-
end
|
179
|
-
replacement
|
180
|
-
end
|
181
|
-
end # def replace_env_placeholders
|
182
|
-
|
183
145
|
module DSL
|
184
146
|
|
185
147
|
include LogStash::Util::EnvironmentVariables
|
@@ -0,0 +1,71 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/util/loggable"
|
3
|
+
require "logstash/elasticsearch_client"
|
4
|
+
require "logstash/modules/importer"
|
5
|
+
require "logstash/errors"
|
6
|
+
|
7
|
+
module LogStash module Config
|
8
|
+
class ModulesCommon # extracted here for bwc with 5.x
|
9
|
+
include LogStash::Util::Loggable
|
10
|
+
|
11
|
+
def self.pipeline_configs(settings)
|
12
|
+
pipelines = []
|
13
|
+
plugin_modules = LogStash::PLUGIN_REGISTRY.plugins_with_type(:modules)
|
14
|
+
|
15
|
+
modules_array = settings.get("modules.cli").empty? ? settings.get("modules") : settings.get("modules.cli")
|
16
|
+
if modules_array.empty?
|
17
|
+
# no specifed modules
|
18
|
+
return pipelines
|
19
|
+
end
|
20
|
+
logger.debug("Specified modules", :modules_array => modules_array.to_s)
|
21
|
+
|
22
|
+
module_names = modules_array.collect {|module_hash| module_hash["name"]}
|
23
|
+
if module_names.length > module_names.uniq.length
|
24
|
+
duplicate_modules = module_names.group_by(&:to_s).select { |_,v| v.size > 1 }.keys
|
25
|
+
raise LogStash::ConfigLoadingError, I18n.t("logstash.modules.configuration.modules-must-be-unique", :duplicate_modules => duplicate_modules)
|
26
|
+
end
|
27
|
+
|
28
|
+
available_module_names = plugin_modules.map(&:module_name)
|
29
|
+
specified_and_available_names = module_names & available_module_names
|
30
|
+
|
31
|
+
if (specified_and_available_names).empty?
|
32
|
+
i18n_opts = {:specified_modules => module_names, :available_modules => available_module_names}
|
33
|
+
raise LogStash::ConfigLoadingError, I18n.t("logstash.modules.configuration.modules-unavailable", i18n_opts)
|
34
|
+
end
|
35
|
+
|
36
|
+
specified_and_available_names.each do |module_name|
|
37
|
+
connect_fail_args = {}
|
38
|
+
begin
|
39
|
+
module_hash = modules_array.find {|m| m["name"] == module_name}
|
40
|
+
current_module = plugin_modules.find { |allmodules| allmodules.module_name == module_name }
|
41
|
+
|
42
|
+
alt_name = "module-#{module_name}"
|
43
|
+
pipeline_id = alt_name
|
44
|
+
|
45
|
+
current_module.with_settings(module_hash)
|
46
|
+
esclient = LogStash::ElasticsearchClient.build(module_hash)
|
47
|
+
config_test = settings.get("config.test_and_exit")
|
48
|
+
if esclient.can_connect? || config_test
|
49
|
+
if !config_test
|
50
|
+
current_module.import(LogStash::Modules::Importer.new(esclient))
|
51
|
+
end
|
52
|
+
|
53
|
+
config_string = current_module.config_string
|
54
|
+
|
55
|
+
pipelines << {"pipeline_id" => pipeline_id, "alt_name" => alt_name, "config_string" => config_string, "settings" => settings}
|
56
|
+
else
|
57
|
+
connect_fail_args[:module_name] = module_name
|
58
|
+
connect_fail_args[:hosts] = esclient.host_settings
|
59
|
+
end
|
60
|
+
rescue => e
|
61
|
+
raise LogStash::ConfigLoadingError, I18n.t("logstash.modules.configuration.parse-failed", :error => e.message)
|
62
|
+
end
|
63
|
+
|
64
|
+
if !connect_fail_args.empty?
|
65
|
+
raise LogStash::ConfigLoadingError, I18n.t("logstash.modules.configuration.elasticsearch_connection_failed", connect_fail_args)
|
66
|
+
end
|
67
|
+
end
|
68
|
+
pipelines
|
69
|
+
end
|
70
|
+
end
|
71
|
+
end end
|
@@ -0,0 +1,120 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/namespace"
|
3
|
+
require "logstash/logging"
|
4
|
+
require "elasticsearch"
|
5
|
+
require "elasticsearch/transport/transport/http/manticore"
|
6
|
+
|
7
|
+
module LogStash class ElasticsearchClient
|
8
|
+
include LogStash::Util::Loggable
|
9
|
+
|
10
|
+
class Response
|
11
|
+
# duplicated here from Elasticsearch::Transport::Transport::Response
|
12
|
+
# to create a normalised response across different client IMPL
|
13
|
+
attr_reader :status, :body, :headers
|
14
|
+
def initialize(status, body, headers={})
|
15
|
+
@status, @body, @headers = status, body, headers
|
16
|
+
@body = body.force_encoding('UTF-8') if body.respond_to?(:force_encoding)
|
17
|
+
end
|
18
|
+
end
|
19
|
+
|
20
|
+
def self.build(settings)
|
21
|
+
new(RubyClient.new(settings, logger))
|
22
|
+
end
|
23
|
+
|
24
|
+
class RubyClient
|
25
|
+
def initialize(settings, logger)
|
26
|
+
@settings = settings
|
27
|
+
@logger = logger
|
28
|
+
@client_args = client_args
|
29
|
+
@client = Elasticsearch::Client.new(@client_args)
|
30
|
+
end
|
31
|
+
|
32
|
+
def can_connect?
|
33
|
+
begin
|
34
|
+
head(SecureRandom.hex(32).prepend('_'))
|
35
|
+
rescue Elasticsearch::Transport::Transport::Errors::BadRequest
|
36
|
+
true
|
37
|
+
rescue Manticore::SocketException
|
38
|
+
false
|
39
|
+
end
|
40
|
+
end
|
41
|
+
|
42
|
+
def host_settings
|
43
|
+
@client_args[:hosts]
|
44
|
+
end
|
45
|
+
|
46
|
+
def delete(path)
|
47
|
+
begin
|
48
|
+
normalize_response(@client.perform_request('DELETE', path, {}, nil))
|
49
|
+
rescue Exception => e
|
50
|
+
if is_404_error?(e)
|
51
|
+
Response.new(404, "", {})
|
52
|
+
else
|
53
|
+
raise e
|
54
|
+
end
|
55
|
+
end
|
56
|
+
end
|
57
|
+
|
58
|
+
def put(path, content)
|
59
|
+
normalize_response(@client.perform_request('PUT', path, {}, content))
|
60
|
+
end
|
61
|
+
|
62
|
+
def head(path)
|
63
|
+
begin
|
64
|
+
normalize_response(@client.perform_request('HEAD', path, {}, nil))
|
65
|
+
rescue Exception => e
|
66
|
+
if is_404_error?(e)
|
67
|
+
Response.new(404, "", {})
|
68
|
+
else
|
69
|
+
raise e
|
70
|
+
end
|
71
|
+
end
|
72
|
+
end
|
73
|
+
|
74
|
+
private
|
75
|
+
|
76
|
+
def is_404_error?(error)
|
77
|
+
error.class.to_s =~ /NotFound/ || error.message =~ /Not\s*Found|404/i
|
78
|
+
end
|
79
|
+
|
80
|
+
def normalize_response(response)
|
81
|
+
Response.new(response.status, response.body, response.headers)
|
82
|
+
end
|
83
|
+
|
84
|
+
def client_args
|
85
|
+
{
|
86
|
+
:transport_class => Elasticsearch::Transport::Transport::HTTP::Manticore,
|
87
|
+
:hosts => [*unpack_hosts],
|
88
|
+
# :logger => @logger, # silence the client logging
|
89
|
+
}
|
90
|
+
end
|
91
|
+
|
92
|
+
def unpack_hosts
|
93
|
+
@settings.fetch("var.output.elasticsearch.hosts", "localhost:9200").split(',').map(&:strip)
|
94
|
+
end
|
95
|
+
end
|
96
|
+
|
97
|
+
def initialize(client)
|
98
|
+
@client = client
|
99
|
+
end
|
100
|
+
|
101
|
+
def delete(path)
|
102
|
+
@client.delete(path)
|
103
|
+
end
|
104
|
+
|
105
|
+
def put(path, content)
|
106
|
+
@client.put(path, content)
|
107
|
+
end
|
108
|
+
|
109
|
+
def head(path)
|
110
|
+
@client.head(path)
|
111
|
+
end
|
112
|
+
|
113
|
+
def can_connect?
|
114
|
+
@client.can_connect?
|
115
|
+
end
|
116
|
+
|
117
|
+
def host_settings
|
118
|
+
@client.host_settings
|
119
|
+
end
|
120
|
+
end end # class LogStash::ModulesImporter
|
data/lib/logstash/environment.rb
CHANGED
@@ -20,6 +20,8 @@ module LogStash
|
|
20
20
|
Setting::NullableString.new("path.config", nil, false),
|
21
21
|
Setting::WritableDirectory.new("path.data", ::File.join(LogStash::Environment::LOGSTASH_HOME, "data")),
|
22
22
|
Setting::NullableString.new("config.string", nil, false),
|
23
|
+
Setting.new("modules.cli", Array, []),
|
24
|
+
Setting.new("modules", Array, []),
|
23
25
|
Setting::Boolean.new("config.test_and_exit", false),
|
24
26
|
Setting::Boolean.new("config.reload.automatic", false),
|
25
27
|
Setting::Numeric.new("config.reload.interval", 3), # in seconds
|
@@ -49,6 +51,7 @@ module LogStash
|
|
49
51
|
Setting::Numeric.new("queue.checkpoint.acks", 1024), # 0 is unlimited
|
50
52
|
Setting::Numeric.new("queue.checkpoint.writes", 1024), # 0 is unlimited
|
51
53
|
Setting::Numeric.new("queue.checkpoint.interval", 1000), # 0 is no time-based checkpointing
|
54
|
+
Setting::Boolean.new("dead_letter_queue.enable", false),
|
52
55
|
Setting::TimeValue.new("slowlog.threshold.warn", "-1"),
|
53
56
|
Setting::TimeValue.new("slowlog.threshold.info", "-1"),
|
54
57
|
Setting::TimeValue.new("slowlog.threshold.debug", "-1"),
|
@@ -58,13 +61,21 @@ module LogStash
|
|
58
61
|
# Compute the default queue path based on `path.data`
|
59
62
|
default_queue_file_path = ::File.join(SETTINGS.get("path.data"), "queue")
|
60
63
|
SETTINGS.register Setting::WritableDirectory.new("path.queue", default_queue_file_path)
|
61
|
-
|
64
|
+
# Compute the default dead_letter_queue path based on `path.data`
|
65
|
+
default_dlq_file_path = ::File.join(SETTINGS.get("path.data"), "dead_letter_queue")
|
66
|
+
SETTINGS.register Setting::WritableDirectory.new("path.dead_letter_queue", default_dlq_file_path)
|
67
|
+
|
62
68
|
SETTINGS.on_post_process do |settings|
|
63
69
|
# If the data path is overridden but the queue path isn't recompute the queue path
|
64
70
|
# We need to do this at this stage because of the weird execution order
|
65
71
|
# our monkey-patched Clamp follows
|
66
|
-
if settings.set?("path.data")
|
67
|
-
|
72
|
+
if settings.set?("path.data")
|
73
|
+
if !settings.set?("path.queue")
|
74
|
+
settings.set_value("path.queue", ::File.join(settings.get("path.data"), "queue"))
|
75
|
+
end
|
76
|
+
if !settings.set?("path.dead_letter_queue")
|
77
|
+
settings.set_value("path.dead_letter_queue", ::File.join(settings.get("path.data"), "dead_letter_queue"))
|
78
|
+
end
|
68
79
|
end
|
69
80
|
end
|
70
81
|
|
data/lib/logstash/errors.rb
CHANGED
@@ -1,10 +1,18 @@
|
|
1
1
|
# encoding: utf-8
|
2
|
+
require "logstash/util/dead_letter_queue_manager"
|
2
3
|
module LogStash
|
3
4
|
class ExecutionContext
|
4
|
-
attr_reader :
|
5
|
+
attr_reader :pipeline, :dlq_writer
|
5
6
|
|
6
|
-
def initialize(
|
7
|
-
@
|
7
|
+
def initialize(pipeline, plugin_id, plugin_type, dlq_writer)
|
8
|
+
@pipeline = pipeline
|
9
|
+
@plugin_id = plugin_id
|
10
|
+
@plugin_type = plugin_type
|
11
|
+
@dlq_writer = LogStash::Util::PluginDeadLetterQueueWriter.new(dlq_writer, @plugin_id, @plugin_type)
|
12
|
+
end
|
13
|
+
|
14
|
+
def pipeline_id
|
15
|
+
@pipeline.pipeline_id
|
8
16
|
end
|
9
17
|
end
|
10
18
|
end
|
data/lib/logstash/inputs/base.rb
CHANGED
@@ -105,6 +105,8 @@ class LogStash::Inputs::Base < LogStash::Plugin
|
|
105
105
|
super
|
106
106
|
# There is no easy way to propage an instance variable into the codec, because the codec
|
107
107
|
# are created at the class level
|
108
|
+
# TODO(talevy): Codecs should have their own execution_context, for now they will inherit their
|
109
|
+
# parent plugin's
|
108
110
|
@codec.execution_context = context
|
109
111
|
context
|
110
112
|
end
|
@@ -4,7 +4,12 @@ require "concurrent"
|
|
4
4
|
|
5
5
|
module LogStash module Instrument module MetricType
|
6
6
|
class Mean < Base
|
7
|
+
include ::LogStash::Util::Loggable
|
8
|
+
|
7
9
|
def initialize(namespaces, key)
|
10
|
+
logger.warn("Deprecated 'mean' metric type used! This will be removed in Logstash 6.0!",
|
11
|
+
:namespaces => namespaces,
|
12
|
+
:key => key)
|
8
13
|
super(namespaces, key)
|
9
14
|
|
10
15
|
@counter = Concurrent::AtomicFixnum.new
|
@@ -2,18 +2,19 @@
|
|
2
2
|
require "logstash/instrument/periodic_poller/base"
|
3
3
|
require "logstash/instrument/periodic_poller/load_average"
|
4
4
|
require "logstash/environment"
|
5
|
-
require "jrmonitor"
|
6
5
|
require "set"
|
7
6
|
|
7
|
+
java_import 'com.sun.management.UnixOperatingSystemMXBean'
|
8
8
|
java_import 'java.lang.management.ManagementFactory'
|
9
9
|
java_import 'java.lang.management.OperatingSystemMXBean'
|
10
10
|
java_import 'java.lang.management.GarbageCollectorMXBean'
|
11
11
|
java_import 'java.lang.management.RuntimeMXBean'
|
12
|
-
java_import 'com.sun.management.UnixOperatingSystemMXBean'
|
13
12
|
java_import 'javax.management.MBeanServer'
|
14
13
|
java_import 'javax.management.ObjectName'
|
15
14
|
java_import 'javax.management.AttributeList'
|
16
15
|
java_import 'javax.naming.directory.Attribute'
|
16
|
+
java_import 'org.logstash.instrument.reports.MemoryReport'
|
17
|
+
java_import 'org.logstash.instrument.reports.ProcessReport'
|
17
18
|
|
18
19
|
|
19
20
|
module LogStash module Instrument module PeriodicPoller
|
@@ -50,7 +51,7 @@ module LogStash module Instrument module PeriodicPoller
|
|
50
51
|
end
|
51
52
|
|
52
53
|
def collect
|
53
|
-
raw =
|
54
|
+
raw = MemoryReport.generate
|
54
55
|
collect_jvm_metrics(raw)
|
55
56
|
collect_pools_metrics(raw)
|
56
57
|
collect_threads_metrics
|
@@ -81,11 +82,10 @@ module LogStash module Instrument module PeriodicPoller
|
|
81
82
|
end
|
82
83
|
|
83
84
|
def collect_process_metrics
|
84
|
-
process_metrics =
|
85
|
+
process_metrics = ProcessReport.generate
|
85
86
|
|
86
87
|
path = [:jvm, :process]
|
87
88
|
|
88
|
-
|
89
89
|
open_fds = process_metrics["open_file_descriptors"]
|
90
90
|
if @peak_open_fds.nil? || open_fds > @peak_open_fds
|
91
91
|
@peak_open_fds = open_fds
|