logstash-core 7.1.1-java → 7.2.0-java

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 64360418777f1955caf42e8b14ac7c9201a641bd668612c55f9382cdf0f94b9e
4
- data.tar.gz: 96661365e73cb4eeaaf943a7cc25e87f9e5639f81a39657f77578ca9c2097c2e
3
+ metadata.gz: f34b96ed7e4139bc1eb3d820d6f325e3bd579be9381a369be73aa08dc231d4e1
4
+ data.tar.gz: e8d3c2aa6e2d59b44e7f99f27af966ed962608f90e7c9af0f68b5a459d4804cf
5
5
  SHA512:
6
- metadata.gz: 9343c65ed931f58f9350972d9fffccbab77674cce2c7f4001a9d7a9b5b37f30461a5e5b687c61ec6efca6093517d1ce8a67ac34bce0d38d95e9f8493a19ab841
7
- data.tar.gz: 73c06d0f1400e194db5373300cb0464dec9094dfa822147e0f3b911571aeab464d3bebded47435318b0360aa38753fd39010fd8a2dacdc1db67e883829f14de0
6
+ metadata.gz: 0c13a53ea439c0d4293ab3e6bcb4f91bd93e98c98184d2cadc1c1466372bd2af7ad469522802f4e82fa5d66cae9b486c8b3b51111ebdd683c50e5203b4975cae
7
+ data.tar.gz: c7dbd2b483cc41c47fd3036dc4bf7223578de3cdb4ef7ea5d75cfa3c4bf7572808756bfb5dc85ac394a9e74cb238b2c19707ca6361edb871de8c2dd403f0aa15
@@ -7,8 +7,15 @@ module LogStash
7
7
  module Commands
8
8
  class DefaultMetadata < Commands::Base
9
9
  def all
10
- {:host => host, :version => version, :http_address => http_address,
11
- :id => service.agent.id, :name => service.agent.name}
10
+ {:host => host,
11
+ :version => version,
12
+ :http_address => http_address,
13
+ :id => service.agent.id,
14
+ :name => service.agent.name,
15
+ :ephemeral_id => service.agent.ephemeral_id,
16
+ :status => "green", # This is hard-coded to mirror x-pack behavior
17
+ :snapshot => ::BUILD_INFO["build_snapshot"],
18
+ }
12
19
  end
13
20
 
14
21
  def host
@@ -3,11 +3,11 @@ java_import 'org.logstash.instrument.reports.ThreadsReport'
3
3
 
4
4
  class HotThreadsReport
5
5
  STRING_SEPARATOR_LENGTH = 80.freeze
6
- HOT_THREADS_STACK_TRACES_SIZE_DEFAULT = 10.freeze
6
+ HOT_THREADS_STACK_TRACES_SIZE_DEFAULT = 50.freeze
7
7
 
8
8
  def initialize(cmd, options)
9
9
  @cmd = cmd
10
- filter = { :stacktrace_size => options.fetch(:stacktrace_size, HOT_THREADS_STACK_TRACES_SIZE_DEFAULT) }
10
+ filter = { 'stacktrace_size' => "#{options.fetch(:stacktrace_size, HOT_THREADS_STACK_TRACES_SIZE_DEFAULT)}" }
11
11
  @thread_dump = ::LogStash::Util::ThreadDump.new(options.merge(:dump => ThreadsReport.generate(filter)))
12
12
  end
13
13
 
@@ -17,18 +17,31 @@ module LogStash
17
17
  payload
18
18
  end
19
19
 
20
- def pipelines
20
+ def pipelines(options={})
21
21
  pipeline_ids = service.get_shallow(:stats, :pipelines).keys
22
22
  pipeline_ids.each_with_object({}) do |pipeline_id, result|
23
- result[pipeline_id] = pipeline(pipeline_id)
23
+ result[pipeline_id] = pipeline(pipeline_id, options)
24
24
  end
25
25
  end
26
26
 
27
- def pipeline(pipeline_id)
28
- extract_metrics(
27
+ def pipeline(pipeline_id, options={})
28
+ metrics = extract_metrics(
29
29
  [:stats, :pipelines, pipeline_id.to_sym, :config],
30
- :workers, :batch_size, :batch_delay, :config_reload_automatic, :config_reload_interval, :dead_letter_queue_enabled, :dead_letter_queue_path
30
+ :ephemeral_id,
31
+ :hash,
32
+ :workers,
33
+ :batch_size,
34
+ :batch_delay,
35
+ :config_reload_automatic,
36
+ :config_reload_interval,
37
+ :dead_letter_queue_enabled,
38
+ :dead_letter_queue_path,
39
+ :cluster_uuids
31
40
  ).reject{|_, v|v.nil?}
41
+ if options.fetch(:graph, false)
42
+ metrics.merge!(extract_metrics([:stats, :pipelines, pipeline_id.to_sym, :config], :graph))
43
+ end
44
+ metrics
32
45
  rescue
33
46
  {}
34
47
  end
@@ -115,6 +115,7 @@ module LogStash
115
115
  :events => stats[:events],
116
116
  :plugins => {
117
117
  :inputs => plugin_stats(stats, :inputs),
118
+ :codecs => plugin_stats(stats, :codecs),
118
119
  :filters => plugin_stats(stats, :filters),
119
120
  :outputs => plugin_stats(stats, :outputs)
120
121
  },
@@ -13,9 +13,10 @@ module LogStash
13
13
  get "/hot_threads" do
14
14
  begin
15
15
  ignore_idle_threads = params["ignore_idle_threads"] || true
16
-
17
16
  options = {:ignore_idle_threads => as_boolean(ignore_idle_threads)}
18
17
  options[:threads] = params["threads"].to_i if params.has_key?("threads")
18
+ options[:ordered_by] = params["ordered_by"] if params.has_key?("ordered_by")
19
+ options[:stacktrace_size] = params["stacktrace_size"] if params.has_key?("stacktrace_size")
19
20
 
20
21
  as = human? ? :string : :json
21
22
  respond_with(node.hot_threads(options), {:as => as})
@@ -28,11 +29,19 @@ module LogStash
28
29
 
29
30
  get "/pipelines/:id" do
30
31
  pipeline_id = params["id"]
31
- payload = node.pipeline(pipeline_id)
32
+ opts = {:graph => as_boolean(params.fetch("graph", false))}
33
+ payload = node.pipeline(pipeline_id, opts)
32
34
  halt(404) if payload.empty?
33
35
  respond_with(:pipelines => { pipeline_id => payload } )
34
36
  end
35
37
 
38
+ get "/pipelines" do
39
+ opts = {:graph => as_boolean(params.fetch("graph", false))}
40
+ payload = node.pipelines(opts)
41
+ halt(404) if payload.empty?
42
+ respond_with(:pipelines => payload )
43
+ end
44
+
36
45
  get "/?:filter?" do
37
46
  selected_fields = extract_fields(params["filter"].to_s.strip)
38
47
  values = node.all(selected_fields)
@@ -0,0 +1,52 @@
1
+ module LogStash::Codecs
2
+ class Delegator < SimpleDelegator
3
+ def initialize(obj)
4
+ super(obj)
5
+ @encode_metric = LogStash::Instrument::NamespacedNullMetric.new
6
+ @decode_metric = LogStash::Instrument::NamespacedNullMetric.new
7
+ end
8
+
9
+ def class
10
+ __getobj__.class
11
+ end
12
+
13
+ def metric=(metric)
14
+ __getobj__.metric = metric
15
+
16
+ __getobj__.metric.gauge(:name, __getobj__.class.config_name)
17
+
18
+ @encode_metric = __getobj__.metric.namespace(:encode)
19
+ @encode_metric.counter(:writes_in)
20
+ @encode_metric.report_time(:duration_in_millis, 0)
21
+
22
+ @decode_metric = __getobj__.metric.namespace(:decode)
23
+ @decode_metric.counter(:writes_in)
24
+ @decode_metric.counter(:out)
25
+ @decode_metric.report_time(:duration_in_millis, 0)
26
+ end
27
+
28
+ def encode(event)
29
+ @encode_metric.increment(:writes_in)
30
+ @encode_metric.time(:duration_in_millis) do
31
+ __getobj__.encode(event)
32
+ end
33
+ end
34
+
35
+ def multi_encode(events)
36
+ @encode_metric.increment(:writes_in, events.length)
37
+ @encode_metric.time(:duration_in_millis) do
38
+ __getobj__.multi_encode(events)
39
+ end
40
+ end
41
+
42
+ def decode(data)
43
+ @decode_metric.increment(:writes_in)
44
+ @decode_metric.time(:duration_in_millis) do
45
+ __getobj__.decode(data) do |event|
46
+ @decode_metric.increment(:out)
47
+ yield event
48
+ end
49
+ end
50
+ end
51
+ end
52
+ end
@@ -3,6 +3,7 @@ require "treetop"
3
3
  require "logstash/compiler/treetop_monkeypatches"
4
4
  require "logstash/compiler/lscl/helpers"
5
5
  require "logstash/config/string_escape"
6
+ require "logstash/util"
6
7
 
7
8
  java_import org.logstash.config.ir.DSL
8
9
  java_import org.logstash.common.SourceWithMetadata
@@ -111,7 +112,7 @@ module LogStashCompilerLSCLGrammar; module LogStash; module Compiler; module LSC
111
112
  # hash value; e.g., `{"match" => {"baz" => "bar"}, "match" => {"foo" => "bulb"}}` is
112
113
  # interpreted as `{"match" => {"baz" => "bar", "foo" => "blub"}}`.
113
114
  # (NOTE: this bypasses `AST::Hash`'s ability to detect duplicate keys)
114
- hash[k] = existing.merge(v)
115
+ hash[k] = ::LogStash::Util.hash_merge_many(existing, v)
115
116
  elsif existing.kind_of?(::Array)
116
117
  hash[k] = existing.push(*v)
117
118
  else
@@ -164,8 +165,8 @@ module LogStashCompilerLSCLGrammar; module LogStash; module Compiler; module LSC
164
165
  class Number < Value
165
166
  def expr
166
167
  jdsl.eValue(source_meta, text_value.include?(".") ?
167
- text_value.to_f :
168
- text_value.to_i)
168
+ Float(text_value) :
169
+ Integer(text_value))
169
170
  end
170
171
  end
171
172
 
@@ -0,0 +1,126 @@
1
+ # encoding: utf-8
2
+ require 'logstash-core'
3
+ require 'logstash/compiler'
4
+
5
+ module LogStash;
6
+ module Config;
7
+ class LIRSerializer
8
+ attr_reader :lir_pipeline
9
+
10
+ def self.serialize(lir_pipeline)
11
+ self.new(lir_pipeline).serialize
12
+ end
13
+
14
+ def initialize(lir_pipeline)
15
+ @lir_pipeline = lir_pipeline
16
+ end
17
+
18
+ def serialize
19
+ {
20
+ "hash" => lir_pipeline.unique_hash,
21
+ "type" => "lir",
22
+ "version" => "0.0.0",
23
+ "graph" => {
24
+ "vertices" => vertices,
25
+ "edges" => edges
26
+ }
27
+ }
28
+ end
29
+
30
+ def vertices
31
+ graph.getVertices.map {|v| vertex(v) }
32
+ end
33
+
34
+ def edges
35
+ graph.getEdges.map {|e| edge(e) }
36
+ end
37
+
38
+ def graph
39
+ lir_pipeline.graph
40
+ end
41
+
42
+ def vertex(v)
43
+ hashified_vertex = case vertex_type(v)
44
+ when :plugin
45
+ plugin_vertex(v)
46
+ when :if
47
+ if_vertex(v)
48
+ when :queue
49
+ queue_vertex(v)
50
+ end
51
+
52
+ decorate_vertex(v, hashified_vertex)
53
+ end
54
+
55
+ def vertex_type(v)
56
+ if v.java_kind_of?(org.logstash.config.ir.graph.PluginVertex)
57
+ :plugin
58
+ elsif v.java_kind_of?(org.logstash.config.ir.graph.IfVertex)
59
+ :if
60
+ elsif v.java_kind_of?(org.logstash.config.ir.graph.QueueVertex)
61
+ :queue
62
+ else
63
+ raise "Unexpected vertex type! #{v}"
64
+ end
65
+ end
66
+
67
+ def decorate_vertex(v, v_json)
68
+ v_json["meta"] = format_swm(v.source_with_metadata)
69
+ v_json["id"] = v.id
70
+ v_json["explicit_id"] = !!v.explicit_id
71
+ v_json["type"] = vertex_type(v).to_s
72
+ v_json
73
+ end
74
+
75
+ def plugin_vertex(v)
76
+ pd = v.plugin_definition
77
+ {
78
+ "config_name" => pd.name,
79
+ "plugin_type" => pd.getType.to_s.downcase
80
+ }
81
+ end
82
+
83
+ def if_vertex(v)
84
+ {
85
+ "condition" => v.humanReadableExpression
86
+ }
87
+ end
88
+
89
+ def queue_vertex(v)
90
+ {}
91
+ end
92
+
93
+ def edge(e)
94
+ e_json = {
95
+ "from" => e.from.id,
96
+ "to" => e.to.id,
97
+ "id" => e.id
98
+ }
99
+
100
+ if e.java_kind_of?(org.logstash.config.ir.graph.BooleanEdge)
101
+ e_json["when"] = e.edge_type
102
+ e_json["type"] = "boolean"
103
+ else
104
+ e_json["type"] = "plain"
105
+ end
106
+
107
+ e_json
108
+ end
109
+
110
+ def format_swm(source_with_metadata)
111
+ return nil unless source_with_metadata
112
+ {
113
+ "source" => {
114
+ "protocol" => source_with_metadata.protocol,
115
+ "id" => source_with_metadata.id,
116
+ "line" => source_with_metadata.line,
117
+ "column" => source_with_metadata.column
118
+ # We omit the text of the source code for security reasons
119
+ # raw text may contain passwords
120
+ }
121
+ }
122
+ end
123
+
124
+ end
125
+ end
126
+ end
@@ -4,6 +4,7 @@ require "logstash/util/safe_uri"
4
4
  require "logstash/version"
5
5
  require "logstash/environment"
6
6
  require "logstash/util/plugin_version"
7
+ require "logstash/codecs/delegator"
7
8
  require "filesize"
8
9
 
9
10
  LogStash::Environment.load_locale!
@@ -410,7 +411,7 @@ module LogStash::Config::Mixin
410
411
  case validator
411
412
  when :codec
412
413
  if value.first.is_a?(String)
413
- value = LogStash::Plugin.lookup("codec", value.first).new
414
+ value = LogStash::Codecs::Delegator.new LogStash::Plugin.lookup("codec", value.first).new
414
415
  return true, value
415
416
  else
416
417
  value = value.first
@@ -43,6 +43,7 @@ module LogStash
43
43
  Setting::Boolean.new("pipeline.unsafe_shutdown", false),
44
44
  Setting::Boolean.new("pipeline.java_execution", true),
45
45
  Setting::Boolean.new("pipeline.reloadable", true),
46
+ Setting::Boolean.new("pipeline.plugin_classloaders", false),
46
47
  Setting.new("path.plugins", Array, []),
47
48
  Setting::NullableString.new("interactive", nil, false),
48
49
  Setting::Boolean.new("config.debug", false),
@@ -163,11 +164,15 @@ module LogStash
163
164
  end
164
165
 
165
166
  def windows?
166
- RbConfig::CONFIG['host_os'] =~ WINDOW_OS_RE
167
+ host_os =~ WINDOW_OS_RE
167
168
  end
168
169
 
169
170
  def linux?
170
- RbConfig::CONFIG['host_os'] =~ LINUX_OS_RE
171
+ host_os =~ LINUX_OS_RE
172
+ end
173
+
174
+ def host_os
175
+ RbConfig::CONFIG['host_os']
171
176
  end
172
177
 
173
178
  def locales_path(path)
@@ -99,6 +99,13 @@ class LogStash::Inputs::Base < LogStash::Plugin
99
99
  cloned
100
100
  end
101
101
 
102
+ def metric=(metric)
103
+ super
104
+ # Hack to create a new metric namespace using 'plugins' as the root
105
+ @codec.metric = metric.root.namespace(metric.namespace_name[0...-2].push(:codecs, codec.id))
106
+ metric
107
+ end
108
+
102
109
  def execution_context=(context)
103
110
  super
104
111
  # There is no easy way to propage an instance variable into the codec, because the codec
@@ -153,12 +153,15 @@ module LogStash module Instrument module PeriodicPoller
153
153
  old = {}
154
154
  old = old.merge!(heap["CMS Old Gen"]) if heap.has_key?("CMS Old Gen")
155
155
  old = old.merge!(heap["PS Old Gen"]) if heap.has_key?("PS Old Gen")
156
+ old = old.merge!(heap["G1 Old Gen"]) if heap.has_key?("G1 Old Gen")
156
157
  young = {}
157
158
  young = young.merge!(heap["Par Eden Space"]) if heap.has_key?("Par Eden Space")
158
159
  young = young.merge!(heap["PS Eden Space"]) if heap.has_key?("PS Eden Space")
160
+ young = young.merge!(heap["G1 Eden Space"]) if heap.has_key?("G1 Eden Space")
159
161
  survivor = {}
160
162
  survivor = survivor.merge!(heap["Par Survivor Space"]) if heap.has_key?("Par Survivor Space")
161
163
  survivor = survivor.merge!(heap["PS Survivor Space"]) if heap.has_key?("PS Survivor Space")
164
+ survivor = survivor.merge!(heap["G1 Survivor Space"]) if heap.has_key?("G1 Survivor Space")
162
165
  {
163
166
  "young" => aggregate_information_for(young),
164
167
  "old" => aggregate_information_for(old),
@@ -6,6 +6,7 @@ require "logstash/inputs/base"
6
6
  require "logstash/outputs/base"
7
7
  require "logstash/instrument/collector"
8
8
  require "logstash/compiler"
9
+ require "logstash/config/lir_serializer"
9
10
 
10
11
  module LogStash; class JavaPipeline < JavaBasePipeline
11
12
  include LogStash::Util::Loggable
@@ -216,7 +217,10 @@ module LogStash; class JavaPipeline < JavaBasePipeline
216
217
  config_metric.gauge(:config_reload_interval, settings.get("config.reload.interval"))
217
218
  config_metric.gauge(:dead_letter_queue_enabled, dlq_enabled?)
218
219
  config_metric.gauge(:dead_letter_queue_path, dlq_writer.get_path.to_absolute_path.to_s) if dlq_enabled?
219
-
220
+ config_metric.gauge(:ephemeral_id, ephemeral_id)
221
+ config_metric.gauge(:hash, lir.unique_hash)
222
+ config_metric.gauge(:graph, ::LogStash::Config::LIRSerializer.serialize(lir))
223
+ config_metric.gauge(:cluster_uuids, resolve_cluster_uuids)
220
224
 
221
225
  @logger.info("Starting pipeline", default_logging_keys(
222
226
  "pipeline.workers" => pipeline_workers,
@@ -255,6 +259,14 @@ module LogStash; class JavaPipeline < JavaBasePipeline
255
259
  end
256
260
  end
257
261
 
262
+ def resolve_cluster_uuids
263
+ outputs.each_with_object(Set.new) do |output, cluster_uuids|
264
+ if LogStash::PluginMetadata.exists?(output.id)
265
+ cluster_uuids << LogStash::PluginMetadata.for_plugin(output.id).get(:cluster_uuid)
266
+ end
267
+ end.to_a.compact
268
+ end
269
+
258
270
  def wait_inputs
259
271
  @input_threads.each do |thread|
260
272
  if thread.class == Java::JavaObject
@@ -313,9 +325,14 @@ module LogStash; class JavaPipeline < JavaBasePipeline
313
325
  # Assuming the failure that caused this exception is transient,
314
326
  # let's sleep for a bit and execute #run again
315
327
  sleep(1)
328
+ begin
329
+ plugin.do_close
330
+ rescue => close_exception
331
+ @logger.debug("Input plugin raised exception while closing, ignoring",
332
+ default_logging_keys(:plugin => plugin.class.config_name, :exception => close_exception.message,
333
+ :backtrace => close_exception.backtrace))
334
+ end
316
335
  retry
317
- ensure
318
- plugin.do_close
319
336
  end
320
337
  end # def inputworker
321
338