logstash-core 7.1.1-java → 7.2.0-java

Sign up to get free protection for your applications and to get access to all the features.
@@ -102,6 +102,13 @@ class LogStash::Outputs::Base < LogStash::Plugin
102
102
  self.class.concurrency
103
103
  end
104
104
 
105
+ def metric=(metric)
106
+ super
107
+ # Hack to create a new metric namespace using 'plugins' as the root
108
+ @codec.metric = metric.root.namespace(metric.namespace_name[0...-2].push(:codecs, codec.id))
109
+ metric
110
+ end
111
+
105
112
  def execution_context=(context)
106
113
  super
107
114
  # There is no easy way to propage an instance variable into the codec, because the codec
@@ -394,6 +394,14 @@ module LogStash; class Pipeline < BasePipeline
394
394
  filter_queue_client.add_output_metrics(batch.filtered_size)
395
395
  end
396
396
 
397
+ def resolve_cluster_uuids
398
+ outputs.each_with_object(Set.new) do |output, cluster_uuids|
399
+ if LogStash::PluginMetadata.exists?(output.id)
400
+ cluster_uuids << LogStash::PluginMetadata.for_plugin(output.id).get(:cluster_uuid)
401
+ end
402
+ end.to_a.compact
403
+ end
404
+
397
405
  def wait_inputs
398
406
  @input_threads.each(&:join)
399
407
  end
@@ -442,9 +450,14 @@ module LogStash; class Pipeline < BasePipeline
442
450
  # Assuming the failure that caused this exception is transient,
443
451
  # let's sleep for a bit and execute #run again
444
452
  sleep(1)
453
+ begin
454
+ plugin.do_close
455
+ rescue => close_exception
456
+ @logger.debug("Input plugin raised exception while closing, ignoring",
457
+ default_logging_keys(:plugin => plugin.class.config_name, :exception => close_exception.message,
458
+ :backtrace => close_exception.backtrace))
459
+ end
445
460
  retry
446
- ensure
447
- plugin.do_close
448
461
  end
449
462
  end # def inputworker
450
463
 
@@ -15,6 +15,7 @@ module LogStash
15
15
  "dead_letter_queue.max_bytes",
16
16
  "metric.collect",
17
17
  "pipeline.java_execution",
18
+ "pipeline.plugin_classloaders",
18
19
  "path.config",
19
20
  "path.dead_letter_queue",
20
21
  "path.queue",
@@ -3,6 +3,8 @@ require "logstash/config/mixin"
3
3
  require "concurrent"
4
4
  require "securerandom"
5
5
 
6
+ require_relative 'plugin_metadata'
7
+
6
8
  class LogStash::Plugin
7
9
  include LogStash::Util::Loggable
8
10
 
@@ -70,7 +72,11 @@ class LogStash::Plugin
70
72
  # main task terminates
71
73
  def do_close
72
74
  @logger.debug("Closing", :plugin => self.class.name)
73
- close
75
+ begin
76
+ close
77
+ ensure
78
+ LogStash::PluginMetadata.delete_for_plugin(self.id)
79
+ end
74
80
  end
75
81
 
76
82
  # Subclasses should implement this close method if you need to perform any
@@ -136,4 +142,22 @@ class LogStash::Plugin
136
142
  require "logstash/plugins/registry"
137
143
  LogStash::PLUGIN_REGISTRY.lookup_pipeline_plugin(type, name)
138
144
  end
145
+
146
+ ##
147
+ # Returns this plugin's metadata key/value store.
148
+ #
149
+ # @see LogStash::PluginMetadata for restrictions and caveats.
150
+ # @since 7.1
151
+ #
152
+ # @usage:
153
+ # ~~~
154
+ # if defined?(plugin_metadata)
155
+ # plugin_metadata.set(:foo, 'value')
156
+ # end
157
+ # ~~~
158
+ #
159
+ # @return [LogStash::PluginMetadata]
160
+ def plugin_metadata
161
+ LogStash::PluginMetadata.for_plugin(self.id)
162
+ end
139
163
  end # class LogStash::Plugin
@@ -0,0 +1,139 @@
1
+ # encoding: utf-8
2
+
3
+ require 'thread_safe/cache'
4
+
5
+ module LogStash
6
+ ##
7
+ # `PluginMetadata` provides a space to store key/value metadata about a plugin, typically metadata about
8
+ # external resources that can be gleaned during plugin registration.
9
+ #
10
+ # Data should not be persisted across pipeline reloads, and should be cleaned up after a pipeline reload
11
+ #
12
+ # - It MUST NOT be used to store processing state
13
+ # - It SHOULD NOT be updated frequently.
14
+ # - Individual metadata keys MUST be Symbols and SHOULD NOT be dynamically generated
15
+ #
16
+ # USAGE FROM PLUGINS
17
+ # ------------------
18
+ #
19
+ # Since we allow plugins to be updated, it is important to introduce bindings to new Logstash features in a way
20
+ # that doesn't break when installed onto a Logstash that doesn't have those features, e.g.:
21
+ #
22
+ # ~~~
23
+ #
24
+ # plugin_metadata.set(:foo, bar) if defined?(plugin_metadata?)
25
+ #
26
+ # ~~~
27
+ #
28
+ # @since 7.1
29
+ class PluginMetadata
30
+ include LogStash::Util::Loggable
31
+
32
+ Thread.exclusive do
33
+ @registry = ThreadSafe::Cache.new unless defined?(@registry)
34
+ end
35
+
36
+ class << self
37
+ ##
38
+ # Get the PluginMetadata object corresponding to the given plugin id
39
+ #
40
+ # @param plugin_id [String]
41
+ #
42
+ # @return [PluginMetadata]: the metadata object for the provided `plugin_id`; if no
43
+ # metadata object exists, it will be created.
44
+ def for_plugin(plugin_id)
45
+ @registry.compute_if_absent(plugin_id) { PluginMetadata.new }
46
+ end
47
+
48
+ ##
49
+ # Determine if we have an existing PluginMetadata object for the given plugin id
50
+ # This allows us to avoid creating a metadata object if we don't already have one.
51
+ #
52
+ # @param plugin_id [String]
53
+ #
54
+ # @return [Boolean]
55
+ def exists?(plugin_id)
56
+ @registry.key?(plugin_id)
57
+ end
58
+
59
+ ##
60
+ # Deletes, and then clears the contents of an existing PluginMetadata object for the given plugin id if one exists
61
+ #
62
+ # @param plugin_id [String]
63
+ #
64
+ # @return [Boolean]
65
+ def delete_for_plugin(plugin_id)
66
+ logger.debug("Removing metadata for plugin #{plugin_id}")
67
+ old_registry = @registry.delete(plugin_id)
68
+ old_registry.clear unless old_registry.nil?
69
+ end
70
+
71
+ ##
72
+ # @api private
73
+ def reset!
74
+ @registry.clear
75
+ end
76
+ end
77
+
78
+ ##
79
+ # @see [LogStash::PluginMetadata#for_plugin(String)]
80
+ # @api private
81
+ def initialize
82
+ @datastore = ThreadSafe::Cache.new
83
+ end
84
+
85
+ ##
86
+ # Set the metadata key for this plugin, returning the previous value (if any)
87
+ #
88
+ # @param key [Symbol]
89
+ # @param value [Object]
90
+ #
91
+ # @return [Object]
92
+ def set(key, value)
93
+ if value.nil?
94
+ @datastore.delete(key)
95
+ else
96
+ @datastore.get_and_set(key, value)
97
+ end
98
+ end
99
+
100
+ ##
101
+ # Get the metadata value for the given key on this plugin
102
+ #
103
+ # @param key [Symbol]
104
+ #
105
+ # @return [Object]: the value object associated with the given key on this
106
+ # plugin, or nil if no value is associated
107
+ def get(key)
108
+ @datastore.get(key)
109
+ end
110
+
111
+ ##
112
+ # Determine whether specific key/value metadata exists for this plugin
113
+ #
114
+ # @param key [Symbol]: the key
115
+ #
116
+ # @return [Boolean]: true if the plugin includes metadata for the key
117
+ def set?(key)
118
+ @datastore.key?(key)
119
+ end
120
+
121
+ ##
122
+ # Delete the metadata key for this plugin, returning the previous value (if any)
123
+ #
124
+ # @param key [Symbol]
125
+ #
126
+ # @return [Object]
127
+ def delete(key)
128
+ @datastore.delete(key)
129
+ end
130
+
131
+ ##
132
+ # Clear all metadata keys for this plugin
133
+ #
134
+ # @return [Object]
135
+ def clear
136
+ @datastore.clear
137
+ end
138
+ end
139
+ end
@@ -105,6 +105,7 @@ module LogStash module Plugins
105
105
  # but it is the case that we can call lookups from multiple threads,
106
106
  # when multiple pipelines are in play, and that a lookup may modify the registry.
107
107
  @registry = java.util.concurrent.ConcurrentHashMap.new
108
+ @java_plugins = java.util.concurrent.ConcurrentHashMap.new
108
109
  @hooks = HooksRegistry.new
109
110
  end
110
111
 
@@ -133,6 +134,15 @@ module LogStash module Plugins
133
134
  require "logstash/plugins/builtin"
134
135
 
135
136
  GemRegistry.logstash_plugins.each do |plugin_context|
137
+ if plugin_context.spec.metadata.key?('java_plugin')
138
+ jar_files = plugin_context.spec.files.select {|f| f =~ /.*\.jar/}
139
+ expected_jar_name = plugin_context.spec.name + "-" + plugin_context.spec.version.to_s + ".jar"
140
+ if (jar_files.length != 1 || !jar_files[0].end_with?(expected_jar_name))
141
+ raise LoadError, "Java plugin '#{plugin_context.spec.name}' does not contain a single jar file with the plugin's name and version"
142
+ end
143
+ @java_plugins[plugin_context.spec.name] = [plugin_context.spec.loaded_from, jar_files[0]]
144
+ end
145
+
136
146
  # When a plugin has a HOOK_FILE defined, its the responsibility of the plugin
137
147
  # to register itself to the registry of available plugins.
138
148
  #
@@ -262,13 +272,31 @@ module LogStash module Plugins
262
272
  # @param name [String] plugin name
263
273
  # @return [Boolean] true if klass is a valid plugin for name
264
274
  def is_a_plugin?(klass, name)
275
+ (klass.class == Java::JavaLang::Class && klass.simple_name.downcase == name.gsub('_','')) ||
265
276
  (klass.class == Java::JavaClass && klass.simple_name.downcase == name.gsub('_','')) ||
266
277
  (klass.ancestors.include?(LogStash::Plugin) && klass.respond_to?(:config_name) && klass.config_name == name)
267
278
  end
268
279
 
269
280
  def add_plugin(type, name, klass)
270
281
  if klass.respond_to?("javaClass", true)
271
- @registry[key_for(type, name)] = PluginSpecification.new(type, name, klass.javaClass)
282
+ if LogStash::SETTINGS.get_value('pipeline.plugin_classloaders')
283
+ full_name = 'logstash-' + key_for(type, name)
284
+ if @java_plugins.key?(full_name)
285
+ plugin_paths = @java_plugins[full_name]
286
+ else
287
+ raise LoadError, "Could not find metadata for Java plugin: #{full_name}"
288
+ end
289
+
290
+ java_import org.logstash.plugins.PluginClassLoader
291
+ java_import org.logstash.Logstash
292
+
293
+ classloader = PluginClassLoader.create(plugin_paths[0], plugin_paths[1], Logstash.java_class.class_loader)
294
+ klazz = classloader.load_class(klass.javaClass.name)
295
+
296
+ @registry[key_for(type, name)] = PluginSpecification.new(type, name, klazz.ruby_class.java_class)
297
+ else
298
+ @registry[key_for(type, name)] = PluginSpecification.new(type, name, klass.javaClass)
299
+ end
272
300
  elsif !exists?(type, name)
273
301
  specification_klass = type == :universal ? UniversalPluginSpecification : PluginSpecification
274
302
  @registry[key_for(type, name)] = specification_klass.new(type, name, klass)
@@ -113,6 +113,11 @@ class LogStash::Runner < Clamp::StrictCommand
113
113
  :attribute_name => "pipeline.java_execution",
114
114
  :default => LogStash::SETTINGS.get_default("pipeline.java_execution")
115
115
 
116
+ option ["--plugin-classloaders"], :flag,
117
+ I18n.t("logstash.runner.flag.plugin-classloaders"),
118
+ :attribute_name => "pipeline.plugin_classloaders",
119
+ :default => LogStash::SETTINGS.get_default("pipeline.plugin_classloaders")
120
+
116
121
  option ["-b", "--pipeline.batch.size"], "SIZE",
117
122
  I18n.t("logstash.runner.flag.pipeline-batch-size"),
118
123
  :attribute_name => "pipeline.batch.size",
@@ -290,6 +290,8 @@ en:
290
290
  Sets the number of pipeline workers to run.
291
291
  java-execution: |+
292
292
  Use Java execution engine.
293
+ plugin-classloaders: |+
294
+ (Beta) Load Java plugins in independent classloaders to isolate their dependencies.
293
295
  pipeline-batch-size: |+
294
296
  Size of batches the pipeline is to work in.
295
297
  pipeline-batch-delay: |+
@@ -57,7 +57,6 @@ Gem::Specification.new do |gem|
57
57
  gem.add_runtime_dependency "sinatra", '~> 1', '>= 1.4.6'
58
58
  gem.add_runtime_dependency 'puma', '~> 2'
59
59
  gem.add_runtime_dependency "jruby-openssl", "~> 0.10" # >= 0.9.13 Required to support TLSv1.2
60
- gem.add_runtime_dependency "faraday", "~> 0.9.0" # because of conflicting dependencies between twitter input and octokit
61
60
  gem.add_runtime_dependency "chronic_duration", "~> 0.10"
62
61
 
63
62
  gem.add_runtime_dependency "treetop", "~> 1" #(MIT license)
@@ -73,4 +72,5 @@ Gem::Specification.new do |gem|
73
72
 
74
73
  gem.add_runtime_dependency "elasticsearch", "~> 5"
75
74
  gem.add_runtime_dependency "manticore", '~> 0.6'
75
+ gem.add_runtime_dependency "faraday", '~> 0.9.0'
76
76
  end
@@ -0,0 +1,85 @@
1
+ # encoding: utf-8
2
+ require "spec_helper"
3
+
4
+ class LogStash::Codecs::MockCodec < LogStash::Codecs::Base
5
+ config_name "my_name"
6
+
7
+ def multi_encode(e)
8
+ end
9
+
10
+ def encode(e)
11
+ end
12
+
13
+ def decode(e)
14
+ for i in e.split('|')
15
+ yield i
16
+ end
17
+ end
18
+ end
19
+
20
+ describe LogStash::Codecs::Delegator do
21
+ let(:collector) { LogStash::Instrument::Collector.new }
22
+ let(:metric) { LogStash::Instrument::Metric.new(collector) }
23
+ let(:codec) { LogStash::Codecs::MockCodec.new }
24
+
25
+ subject do
26
+ delegator = described_class.new(codec)
27
+ delegator.metric = metric.namespace([:stats, :pipelines, :main, :plugins, :codecs, :my_id])
28
+ delegator
29
+ end
30
+
31
+ let(:snapshot_store) { collector.snapshot_metric.metric_store }
32
+
33
+ let(:snapshot_metric) { snapshot_store.get_shallow(:stats) }
34
+
35
+ describe "#encode" do
36
+ it "should delegate call to codec" do
37
+ expect(codec).to receive(:encode).with("abcdef")
38
+ subject.encode("abcdef")
39
+ end
40
+
41
+ it "should increment metrics" do
42
+ subject.encode("test")
43
+ expect(snapshot_metric[:pipelines][:main][:plugins][:codecs][:my_id][:encode][:writes_in].value).to eq(1)
44
+ end
45
+ end
46
+
47
+ describe "#multi_encode" do
48
+ it "should delegate call to codec" do
49
+ expect(codec).to receive(:multi_encode).with(%w(ay laa))
50
+ subject.multi_encode(%w(ay laa))
51
+ end
52
+
53
+ it "should increment metrics" do
54
+ subject.multi_encode(%w(ay test))
55
+ expect(snapshot_metric[:pipelines][:main][:plugins][:codecs][:my_id][:encode][:writes_in].value).to eq(2)
56
+ end
57
+ end
58
+
59
+ describe "#decode" do
60
+ it "should delegate call to codec" do
61
+ expect(codec).to receive(:decode).with("ayooooo")
62
+ subject.decode("ayooooo")
63
+ end
64
+
65
+ it "should increment metrics" do
66
+ subject.decode("bird|law") {}
67
+ expect(snapshot_metric[:pipelines][:main][:plugins][:codecs][:my_id][:decode][:writes_in].value).to eq(1)
68
+ expect(snapshot_metric[:pipelines][:main][:plugins][:codecs][:my_id][:decode][:out].value).to eq(2)
69
+ end
70
+ end
71
+
72
+ describe "#close" do
73
+ it "should delegate call to codec" do
74
+ expect(codec).to receive(:close)
75
+ subject.close
76
+ end
77
+ end
78
+
79
+ describe "#plugin_type" do
80
+ it "should delegate call to codec" do
81
+ expect(codec).to receive(:plugin_type)
82
+ subject.plugin_type
83
+ end
84
+ end
85
+ end
@@ -252,6 +252,34 @@ describe LogStash::Compiler do
252
252
  expect(c_plugin).to ir_eql(j.iPlugin(rand_meta, FILTER, "grok", expected_plugin_args))
253
253
  end
254
254
 
255
+ describe "a filter plugin with a repeated hash directive with duplicated keys" do
256
+ let(:source) { "input { } filter { #{plugin_source} } output { } " }
257
+ let(:plugin_source) do
258
+ %q[
259
+ grok {
260
+ match => { "message" => "foo" }
261
+ match => { "message" => "bar" }
262
+ break_on_match => false
263
+ }
264
+ ]
265
+ end
266
+ subject(:c_plugin) { compiled[:filter] }
267
+
268
+ let(:expected_plugin_args) do
269
+ {
270
+ "match" => {
271
+ "message" => ["foo", "bar"]
272
+ },
273
+ "break_on_match" => "false"
274
+ }
275
+ end
276
+
277
+ it "should merge the values of the duplicate keys into an array" do
278
+ expect(c_plugin).to ir_eql(j.iPlugin(rand_meta, FILTER, "grok", expected_plugin_args))
279
+ end
280
+
281
+ end
282
+
255
283
  describe "a filter plugin that has nested Hash directives" do
256
284
  let(:source) { "input { } filter { #{plugin_source} } output { } " }
257
285
  let(:plugin_source) do