logstash-core 2.1.3-java → 2.2.0-java

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of logstash-core might be problematic. Click here for more details.

Files changed (71) hide show
  1. checksums.yaml +4 -4
  2. data/lib/logstash-core.rb +1 -3
  3. data/lib/logstash-core/logstash-core.rb +3 -0
  4. data/lib/logstash-core/version.rb +8 -0
  5. data/lib/logstash/agent.rb +48 -20
  6. data/lib/logstash/codecs/base.rb +2 -2
  7. data/lib/logstash/config/config_ast.rb +8 -3
  8. data/lib/logstash/environment.rb +0 -16
  9. data/lib/logstash/filters/base.rb +9 -5
  10. data/lib/logstash/inputs/base.rb +1 -1
  11. data/lib/logstash/output_delegator.rb +150 -0
  12. data/lib/logstash/outputs/base.rb +37 -40
  13. data/lib/logstash/pipeline.rb +259 -178
  14. data/lib/logstash/pipeline_reporter.rb +114 -0
  15. data/lib/logstash/plugin.rb +1 -1
  16. data/lib/logstash/{shutdown_controller.rb → shutdown_watcher.rb} +10 -37
  17. data/lib/logstash/util.rb +17 -0
  18. data/lib/logstash/util/decorators.rb +14 -7
  19. data/lib/logstash/util/worker_threads_default_printer.rb +4 -4
  20. data/lib/logstash/util/wrapped_synchronous_queue.rb +41 -0
  21. data/lib/logstash/version.rb +10 -2
  22. data/locales/en.yml +8 -3
  23. data/logstash-core.gemspec +5 -3
  24. data/spec/{core/conditionals_spec.rb → conditionals_spec.rb} +0 -0
  25. data/spec/{core/config_spec.rb → logstash/config/config_ast_spec.rb} +0 -0
  26. data/spec/{core/config_cpu_core_strategy_spec.rb → logstash/config/cpu_core_strategy_spec.rb} +0 -0
  27. data/spec/{core/config_defaults_spec.rb → logstash/config/defaults_spec.rb} +0 -0
  28. data/spec/{core/config_mixin_spec.rb → logstash/config/mixin_spec.rb} +0 -0
  29. data/spec/{core → logstash}/environment_spec.rb +0 -0
  30. data/spec/{filters → logstash/filters}/base_spec.rb +0 -0
  31. data/spec/{inputs → logstash/inputs}/base_spec.rb +0 -0
  32. data/spec/{lib/logstash → logstash}/java_integration_spec.rb +0 -0
  33. data/spec/{util → logstash}/json_spec.rb +0 -0
  34. data/spec/logstash/output_delegator_spec.rb +126 -0
  35. data/spec/logstash/outputs/base_spec.rb +40 -0
  36. data/spec/logstash/pipeline_reporter_spec.rb +85 -0
  37. data/spec/{core → logstash}/pipeline_spec.rb +128 -16
  38. data/spec/{core → logstash}/plugin_spec.rb +47 -1
  39. data/spec/logstash/runner_spec.rb +68 -0
  40. data/spec/{core/shutdown_controller_spec.rb → logstash/shutdown_watcher_spec.rb} +17 -11
  41. data/spec/{util → logstash/util}/buftok_spec.rb +0 -0
  42. data/spec/{util → logstash/util}/charset_spec.rb +0 -0
  43. data/spec/{util → logstash/util}/defaults_printer_spec.rb +4 -4
  44. data/spec/{util → logstash/util}/java_version_spec.rb +0 -0
  45. data/spec/{util → logstash/util}/plugin_version_spec.rb +0 -0
  46. data/spec/{util → logstash/util}/unicode_trimmer_spec.rb +0 -0
  47. data/spec/{util → logstash/util}/worker_threads_default_printer_spec.rb +8 -8
  48. data/spec/logstash/util/wrapped_synchronous_queue_spec.rb +28 -0
  49. data/spec/{util_spec.rb → logstash/util_spec.rb} +0 -0
  50. metadata +74 -81
  51. data/lib/logstash/event.rb +0 -275
  52. data/lib/logstash/patches/bundler.rb +0 -36
  53. data/lib/logstash/sized_queue.rb +0 -8
  54. data/lib/logstash/string_interpolation.rb +0 -140
  55. data/lib/logstash/timestamp.rb +0 -97
  56. data/lib/logstash/util/accessors.rb +0 -123
  57. data/spec/core/event_spec.rb +0 -518
  58. data/spec/core/runner_spec.rb +0 -40
  59. data/spec/core/timestamp_spec.rb +0 -84
  60. data/spec/coverage_helper.rb +0 -24
  61. data/spec/lib/logstash/bundler_spec.rb +0 -121
  62. data/spec/license_spec.rb +0 -67
  63. data/spec/outputs/base_spec.rb +0 -26
  64. data/spec/plugin_manager/install_spec.rb +0 -28
  65. data/spec/plugin_manager/update_spec.rb +0 -39
  66. data/spec/plugin_manager/util_spec.rb +0 -71
  67. data/spec/spec_helper.rb +0 -11
  68. data/spec/util/accessors_spec.rb +0 -170
  69. data/spec/util/compress_spec.rb +0 -121
  70. data/spec/util/gemfile_spec.rb +0 -212
  71. data/spec/util/retryable_spec.rb +0 -139
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 30d5b09c260219583243c4a9b051ba90f711f8f8
4
- data.tar.gz: f1f41b55b7cc31ddcb5719af686d254521303198
3
+ metadata.gz: 7797b3d45a9c4f51bb59252dba06af6076ade965
4
+ data.tar.gz: faf5ef78e9b530598bddb9778d99244e73cb0125
5
5
  SHA512:
6
- metadata.gz: d1a4112b84784a6203cfd81cfa39604dd6541f50cb903796c06922b017e0c1da4c87632cb1757e2e0b922636bbe0095fb374ad704027ec9efeff3298f9692650
7
- data.tar.gz: 7c62b5a5f63b8b702af1b7e2dd111e4acd6c31a70f9313ef517adc5215995795bde732d695012dfdfc58d4dd8b3d806c470dde9cc944fd2880581a044bd58c5a
6
+ metadata.gz: 8021fa77ce2b27961dfa76fd42a65993f14b9273df0ecf013b77098feaf68779909469c6bd69535d52f800619f43d2d31984320de626e7d725e218a5cdced39b
7
+ data.tar.gz: 97992197bce414ee23b8f62c703334296274b943a414a5b2abf318eb9843c15964e029eb626f6d4458f7209053d1958fd3f25f2439518bb74530cc1866ef2a0d
@@ -1,3 +1 @@
1
- # encoding: utf-8
2
- module LogStash
3
- end
1
+ require "logstash-core/logstash-core"
@@ -0,0 +1,3 @@
1
+ # encoding: utf-8
2
+ module LogStash
3
+ end
@@ -0,0 +1,8 @@
1
+ # encoding: utf-8
2
+
3
+ # The version of logstash core gem.
4
+ #
5
+ # Note to authors: this should not include dashes because 'gem' barfs if
6
+ # you include a dash in the version string.
7
+
8
+ LOGSTASH_CORE_VERSION = "2.2.0"
@@ -5,6 +5,7 @@ require "logstash/errors"
5
5
  require "logstash/config/cpu_core_strategy"
6
6
  require "uri"
7
7
  require "net/http"
8
+ require "logstash/pipeline"
8
9
  LogStash::Environment.load_locale!
9
10
 
10
11
  class LogStash::Agent < Clamp::Command
@@ -20,10 +21,21 @@ class LogStash::Agent < Clamp::Command
20
21
  :default_input => DEFAULT_INPUT, :default_output => DEFAULT_OUTPUT),
21
22
  :default => "", :attribute_name => :config_string
22
23
 
23
- option ["-w", "--filterworkers"], "COUNT",
24
- I18n.t("logstash.agent.flag.filterworkers"),
25
- :attribute_name => :filter_workers,
26
- :default => 0, &:to_i
24
+ option ["-w", "--pipeline-workers"], "COUNT",
25
+ I18n.t("logstash.runner.flag.pipeline-workers"),
26
+ :attribute_name => :pipeline_workers,
27
+ :default => LogStash::Pipeline::DEFAULT_SETTINGS[:default_pipeline_workers]
28
+
29
+
30
+ option ["-b", "--pipeline-batch-size"], "SIZE",
31
+ I18n.t("logstash.runner.flag.pipeline-batch-size"),
32
+ :attribute_name => :pipeline_batch_size,
33
+ :default => LogStash::Pipeline::DEFAULT_SETTINGS[:pipeline_batch_size]
34
+
35
+ option ["-u", "--pipeline-batch-delay"], "DELAY_IN_MS",
36
+ I18n.t("logstash.runner.flag.pipeline-batch-delay"),
37
+ :attribute_name => :pipeline_batch_delay,
38
+ :default => LogStash::Pipeline::DEFAULT_SETTINGS[:pipeline_batch_delay]
27
39
 
28
40
  option ["-l", "--log"], "FILE",
29
41
  I18n.t("logstash.agent.flag.log"),
@@ -55,6 +67,32 @@ class LogStash::Agent < Clamp::Command
55
67
  :attribute_name => :unsafe_shutdown,
56
68
  :default => false
57
69
 
70
+ def initialize(*args)
71
+ super(*args)
72
+ @pipeline_settings ||= { :pipeline_id => "base" }
73
+ end
74
+
75
+ def pipeline_workers=(pipeline_workers_value)
76
+ @pipeline_settings[:pipeline_workers] = validate_positive_integer(pipeline_workers_value)
77
+ end
78
+
79
+ def pipeline_batch_size=(pipeline_batch_size_value)
80
+ @pipeline_settings[:pipeline_batch_size] = validate_positive_integer(pipeline_batch_size_value)
81
+ end
82
+
83
+ def pipeline_batch_delay=(pipeline_batch_delay_value)
84
+ @pipeline_settings[:pipeline_batch_delay] = validate_positive_integer(pipeline_batch_delay_value)
85
+ end
86
+
87
+ def validate_positive_integer(str_arg)
88
+ int_arg = str_arg.to_i
89
+ if str_arg !~ /^\d+$/ || int_arg < 1
90
+ raise ArgumentError, "Expected a positive integer, got '#{str_arg}'"
91
+ end
92
+
93
+ int_arg
94
+ end
95
+
58
96
  # Emit a warning message.
59
97
  def warn(message)
60
98
  # For now, all warnings are fatal.
@@ -80,8 +118,8 @@ class LogStash::Agent < Clamp::Command
80
118
  require "logstash/plugin"
81
119
  @logger = Cabin::Channel.get(LogStash)
82
120
 
83
- LogStash::ShutdownController.unsafe_shutdown = unsafe_shutdown?
84
- LogStash::ShutdownController.logger = @logger
121
+ LogStash::ShutdownWatcher.unsafe_shutdown = unsafe_shutdown?
122
+ LogStash::ShutdownWatcher.logger = @logger
85
123
 
86
124
  if version?
87
125
  show_version
@@ -121,7 +159,7 @@ class LogStash::Agent < Clamp::Command
121
159
  end
122
160
 
123
161
  begin
124
- pipeline = LogStash::Pipeline.new(@config_string)
162
+ pipeline = LogStash::Pipeline.new(@config_string, @pipeline_settings)
125
163
  rescue LoadError => e
126
164
  fail("Configuration problem.")
127
165
  end
@@ -151,8 +189,6 @@ class LogStash::Agent < Clamp::Command
151
189
  configure_logging(log_file)
152
190
  end
153
191
 
154
- pipeline.configure("filter-workers", filter_workers) if filter_workers > 0
155
-
156
192
  # Stop now if we are only asking for a config test.
157
193
  if config_test?
158
194
  report "Configuration OK"
@@ -184,7 +220,7 @@ class LogStash::Agent < Clamp::Command
184
220
 
185
221
  def shutdown(pipeline)
186
222
  pipeline.shutdown do
187
- ::LogStash::ShutdownController.start(pipeline)
223
+ ::LogStash::ShutdownWatcher.start(pipeline)
188
224
  end
189
225
  end
190
226
 
@@ -319,27 +355,19 @@ class LogStash::Agent < Clamp::Command
319
355
  Dir.glob(path).sort.each do |file|
320
356
  next unless File.file?(file)
321
357
  if file.match(/~$/)
322
- @logger.debug("NOT reading config file because it is a temp file", :config_file => file)
358
+ @logger.debug("NOT reading config file because it is a temp file", :file => file)
323
359
  next
324
360
  end
325
- @logger.debug("Reading config file", :config_file => file)
361
+ @logger.debug("Reading config file", :file => file)
326
362
  cfg = File.read(file)
327
363
  if !cfg.ascii_only? && !cfg.valid_encoding?
328
364
  encoding_issue_files << file
329
365
  end
330
366
  config << cfg + "\n"
331
- if config_test?
332
- @logger.debug? && @logger.debug("\nThe following is the content of a file", :config_file => file.to_s)
333
- @logger.debug? && @logger.debug("\n" + cfg + "\n\n")
334
- end
335
367
  end
336
368
  if (encoding_issue_files.any?)
337
369
  fail("The following config files contains non-ascii characters but are not UTF-8 encoded #{encoding_issue_files}")
338
370
  end
339
- if config_test?
340
- @logger.debug? && @logger.debug("\nThe following is the merged configuration")
341
- @logger.debug? && @logger.debug("\n" + config + "\n\n")
342
- end
343
371
  return config
344
372
  end # def load_config
345
373
 
@@ -11,7 +11,7 @@ module LogStash::Codecs; class Base < LogStash::Plugin
11
11
 
12
12
  def initialize(params={})
13
13
  super
14
- config_init(params)
14
+ config_init(@params)
15
15
  register if respond_to?(:register)
16
16
  end
17
17
 
@@ -27,7 +27,7 @@ module LogStash::Codecs; class Base < LogStash::Plugin
27
27
  raise "#{self.class}#encode must be overidden"
28
28
  end # def encode
29
29
 
30
- public
30
+ public
31
31
  def close; end;
32
32
 
33
33
  # @param block [Proc(event, data)] the callback proc passing the original event and the encoded event
@@ -107,7 +107,11 @@ module LogStash; module Config; module AST
107
107
  ["filter", "output"].each do |type|
108
108
  # defines @filter_func and @output_func
109
109
 
110
- definitions << "def #{type}_func(event)"
110
+ # This need to be defined as a singleton method
111
+ # so each instance of the pipeline has his own implementation
112
+ # of the output/filter function
113
+ definitions << "define_singleton_method :#{type}_func do |event|"
114
+ definitions << " targeted_outputs = []" if type == "output"
111
115
  definitions << " events = [event]" if type == "filter"
112
116
  definitions << " @logger.debug? && @logger.debug(\"#{type} received\", :event => event.to_hash)"
113
117
 
@@ -116,6 +120,7 @@ module LogStash; module Config; module AST
116
120
  end
117
121
 
118
122
  definitions << " events" if type == "filter"
123
+ definitions << " targeted_outputs" if type == "output"
119
124
  definitions << "end"
120
125
  end
121
126
 
@@ -237,7 +242,7 @@ module LogStash; module Config; module AST
237
242
  events = #{variable_name}.multi_filter(events)
238
243
  CODE
239
244
  when "output"
240
- return "#{variable_name}.handle(event)\n"
245
+ return "targeted_outputs << #{variable_name}\n"
241
246
  when "codec"
242
247
  settings = attributes.recursive_select(Attribute).collect(&:compile).reject(&:empty?)
243
248
  attributes_code = "LogStash::Util.hash_merge_many(#{settings.map { |c| "{ #{c} }" }.join(", ")})"
@@ -402,7 +407,7 @@ module LogStash; module Config; module AST
402
407
  <<-CODE
403
408
  events = cond_func_#{i}(events)
404
409
  CODE
405
- else
410
+ else # Output
406
411
  <<-CODE
407
412
  #{super}
408
413
  end
@@ -1,18 +1,10 @@
1
1
  # encoding: utf-8
2
2
  require "logstash/errors"
3
- require "logstash/version"
4
3
 
5
4
  module LogStash
6
5
  module Environment
7
6
  extend self
8
7
 
9
- # rehydrate the bootstrap environment if the startup was not done by executing bootstrap.rb
10
- # and we are in the context of the logstash package
11
- if !LogStash::Environment.const_defined?("LOGSTASH_HOME") && !ENV["LOGSTASH_HOME"].to_s.empty?
12
- $LOAD_PATH << ::File.join(ENV["LOGSTASH_HOME"], "lib")
13
- require "bootstrap/environment"
14
- end
15
-
16
8
  LOGSTASH_CORE = ::File.expand_path(::File.join(::File.dirname(__FILE__), "..", ".."))
17
9
  LOGSTASH_ENV = (ENV["LS_ENV"] || 'production').to_s.freeze
18
10
 
@@ -81,14 +73,6 @@ module LogStash
81
73
  ::Gem.win_platform?
82
74
  end
83
75
 
84
- def vendor_path(path)
85
- return ::File.join(LOGSTASH_HOME, "vendor", path)
86
- end
87
-
88
- def pattern_path(path)
89
- return ::File.join(LOGSTASH_HOME, "patterns", path)
90
- end
91
-
92
76
  def locales_path(path)
93
77
  return ::File.join(LOGSTASH_CORE, "locales", path)
94
78
  end
@@ -120,7 +120,7 @@ class LogStash::Filters::Base < LogStash::Plugin
120
120
  public
121
121
  def initialize(params)
122
122
  super
123
- config_init(params)
123
+ config_init(@params)
124
124
  @threadsafe = true
125
125
  end # def initialize
126
126
 
@@ -179,12 +179,16 @@ class LogStash::Filters::Base < LogStash::Plugin
179
179
 
180
180
  LogStash::Util::Decorators.add_tags(@add_tag,event,"filters/#{self.class.name}")
181
181
 
182
+ # note below that the tags array field needs to be updated then reassigned to the event.
183
+ # this is important because a construct like event["tags"].delete(tag) will not work
184
+ # in the current Java event implementation. see https://github.com/elastic/logstash/issues/4140
182
185
  @remove_tag.each do |tag|
183
- break if event["tags"].nil?
186
+ tags = event["tags"]
187
+ break if tags.nil? || tags.empty?
184
188
  tag = event.sprintf(tag)
185
- @logger.debug? and @logger.debug("filters/#{self.class.name}: removing tag",
186
- :tag => tag)
187
- event["tags"].delete(tag)
189
+ @logger.debug? and @logger.debug("filters/#{self.class.name}: removing tag", :tag => tag)
190
+ tags.delete(tag)
191
+ event["tags"] = tags
188
192
  end
189
193
  end # def filter_matched
190
194
 
@@ -53,7 +53,7 @@ class LogStash::Inputs::Base < LogStash::Plugin
53
53
  super
54
54
  @threadable = false
55
55
  @stop_called = Concurrent::AtomicBoolean.new(false)
56
- config_init(params)
56
+ config_init(@params)
57
57
  @tags ||= []
58
58
  end # def initialize
59
59
 
@@ -0,0 +1,150 @@
1
+ # encoding: utf-8
2
+ require "concurrent/atomic/atomic_fixnum"
3
+
4
+ # This class goes hand in hand with the pipeline to provide a pool of
5
+ # free workers to be used by pipeline worker threads. The pool is
6
+ # internally represented with a SizedQueue set the the size of the number
7
+ # of 'workers' the output plugin is configured with.
8
+ #
9
+ # This plugin also records some basic statistics
10
+ module LogStash class OutputDelegator
11
+ attr_reader :workers, :config, :worker_count, :threadsafe
12
+
13
+ # The *args this takes are the same format that a Outputs::Base takes. A list of hashes with parameters in them
14
+ # Internally these just get merged together into a single hash
15
+ def initialize(logger, klass, default_worker_count, *args)
16
+ @logger = logger
17
+ @threadsafe = klass.threadsafe?
18
+ @config = args.reduce({}, :merge)
19
+ @klass = klass
20
+
21
+ # We define this as an array regardless of threadsafety
22
+ # to make reporting simpler, even though a threadsafe plugin will just have
23
+ # a single instance
24
+ #
25
+ # Older plugins invoke the instance method Outputs::Base#workers_not_supported
26
+ # To detect these we need an instance to be created first :()
27
+ # TODO: In the next major version after 2.x remove support for this
28
+ @workers = [@klass.new(*args)]
29
+ @workers.first.register # Needed in case register calls `workers_not_supported`
30
+
31
+ # DO NOT move this statement before the instantiation of the first single instance
32
+ # Read the note above to understand why
33
+ @worker_count = calculate_worker_count(default_worker_count)
34
+ @logger.debug("Will start workers for output", :worker_count => @worker_count, :class => klass)
35
+
36
+ warn_on_worker_override!
37
+ # This queue is used to manage sharing across threads
38
+ @worker_queue = SizedQueue.new(@worker_count)
39
+
40
+ @workers += (@worker_count - 1).times.map do
41
+ inst = @klass.new(*args)
42
+ inst.register
43
+ inst
44
+ end
45
+
46
+ @workers.each { |w| @worker_queue << w }
47
+
48
+ @events_received = Concurrent::AtomicFixnum.new(0)
49
+
50
+
51
+ # One might wonder why we don't use something like
52
+ # define_singleton_method(:multi_receive, method(:threadsafe_multi_receive)
53
+ # and the answer is this is buggy on Jruby 1.7.x . It works 98% of the time!
54
+ # The other 2% you get weird errors about rebinding to the same object
55
+ # Until we switch to Jruby 9.x keep the define_singleton_method parts
56
+ # the way they are, with a block
57
+ # See https://github.com/jruby/jruby/issues/3582
58
+ if threadsafe?
59
+ @threadsafe_worker = @workers.first
60
+ define_singleton_method(:multi_receive) do |events|
61
+ threadsafe_multi_receive(events)
62
+ end
63
+ else
64
+ define_singleton_method(:multi_receive) do |events|
65
+ worker_multi_receive(events)
66
+ end
67
+ end
68
+ end
69
+
70
+ def threadsafe?
71
+ !!@threadsafe
72
+ end
73
+
74
+ def warn_on_worker_override!
75
+ # The user has configured extra workers, but this plugin doesn't support it :(
76
+ if worker_limits_overriden?
77
+ message = @klass.workers_not_supported_message
78
+ warning_meta = {:plugin => @klass.config_name, :worker_count => @config["workers"]}
79
+ if message
80
+ warning_meta[:message] = message
81
+ @logger.warn(I18n.t("logstash.pipeline.output-worker-unsupported-with-message", warning_meta))
82
+ else
83
+ @logger.warn(I18n.t("logstash.pipeline.output-worker-unsupported", warning_meta))
84
+ end
85
+ end
86
+ end
87
+
88
+ def worker_limits_overriden?
89
+ @config["workers"] && @config["workers"] > 1 && @klass.workers_not_supported?
90
+ end
91
+
92
+ def calculate_worker_count(default_worker_count)
93
+ if @threadsafe || @klass.workers_not_supported?
94
+ 1
95
+ else
96
+ @config["workers"] || default_worker_count
97
+ end
98
+ end
99
+
100
+ def config_name
101
+ @klass.config_name
102
+ end
103
+
104
+ def register
105
+ @workers.each {|w| w.register}
106
+ end
107
+
108
+ def threadsafe_multi_receive(events)
109
+ @events_received.increment(events.length)
110
+
111
+ @threadsafe_worker.multi_receive(events)
112
+ end
113
+
114
+ def worker_multi_receive(events)
115
+ @events_received.increment(events.length)
116
+
117
+ worker = @worker_queue.pop
118
+ begin
119
+ worker.multi_receive(events)
120
+ ensure
121
+ @worker_queue.push(worker)
122
+ end
123
+ end
124
+
125
+ def do_close
126
+ @logger.debug("closing output delegator", :klass => self)
127
+
128
+ @worker_count.times do
129
+ worker = @worker_queue.pop
130
+ worker.do_close
131
+ end
132
+ end
133
+
134
+ def events_received
135
+ @events_received.value
136
+ end
137
+
138
+ # There's no concept of 'busy' workers for a threadsafe plugin!
139
+ def busy_workers
140
+ if @threadsafe
141
+ 0
142
+ else
143
+ @workers.size - @worker_queue.size
144
+ end
145
+ end
146
+
147
+ private
148
+ # Needed for testing, so private
149
+ attr_reader :threadsafe_worker, :worker_queue
150
+ end end
@@ -4,6 +4,8 @@ require "logstash/logging"
4
4
  require "logstash/plugin"
5
5
  require "logstash/namespace"
6
6
  require "logstash/config/mixin"
7
+ require "logstash/util/wrapped_synchronous_queue"
8
+ require "concurrent/atomic/atomic_fixnum"
7
9
 
8
10
  class LogStash::Outputs::Base < LogStash::Plugin
9
11
  include LogStash::Config::Mixin
@@ -23,23 +25,46 @@ class LogStash::Outputs::Base < LogStash::Plugin
23
25
  # Note that this setting may not be useful for all outputs.
24
26
  config :workers, :validate => :number, :default => 1
25
27
 
26
- attr_reader :worker_plugins, :worker_queue, :worker_threads
28
+ attr_reader :worker_plugins, :available_workers, :workers, :worker_plugins, :workers_not_supported
29
+
30
+ def self.declare_threadsafe!
31
+ declare_workers_not_supported!
32
+ @threadsafe = true
33
+ end
34
+
35
+ def self.threadsafe?
36
+ @threadsafe == true
37
+ end
38
+
39
+ def self.declare_workers_not_supported!(message=nil)
40
+ @workers_not_supported_message = message
41
+ @workers_not_supported = true
42
+ end
43
+
44
+ def self.workers_not_supported_message
45
+ @workers_not_supported_message
46
+ end
47
+
48
+ def self.workers_not_supported?
49
+ !!@workers_not_supported
50
+ end
27
51
 
28
52
  public
53
+ # TODO: Remove this in the next major version after Logstash 2.x
54
+ # Post 2.x it should raise an error and tell people to use the class level
55
+ # declaration
29
56
  def workers_not_supported(message=nil)
30
- return if @workers == 1
31
- if message
32
- @logger.warn(I18n.t("logstash.pipeline.output-worker-unsupported-with-message", :plugin => self.class.config_name, :worker_count => @workers, :message => message))
33
- else
34
- @logger.warn(I18n.t("logstash.pipeline.output-worker-unsupported", :plugin => self.class.config_name, :worker_count => @workers))
35
- end
36
- @workers = 1
57
+ self.class.declare_workers_not_supported!(message)
37
58
  end
38
59
 
39
60
  public
40
61
  def initialize(params={})
41
62
  super
42
- config_init(params)
63
+ config_init(@params)
64
+
65
+ # If we're running with a single thread we must enforce single-threaded concurrency by default
66
+ # Maybe in a future version we'll assume output plugins are threadsafe
67
+ @single_worker_mutex = Mutex.new
43
68
  end
44
69
 
45
70
  public
@@ -53,37 +78,9 @@ class LogStash::Outputs::Base < LogStash::Plugin
53
78
  end # def receive
54
79
 
55
80
  public
56
- def worker_setup
57
- if @workers == 1
58
- @worker_plugins = [self]
59
- @worker_threads = []
60
- else
61
- define_singleton_method(:handle, method(:handle_worker))
62
- @worker_queue = SizedQueue.new(20)
63
- @worker_plugins = @workers.times.map { self.class.new(@original_params.merge("workers" => 1)) }
64
- @worker_threads = @worker_plugins.map.with_index do |plugin, i|
65
- Thread.new(original_params, @worker_queue) do |params, queue|
66
- LogStash::Util.set_thread_name(">#{self.class.config_name}.#{i}")
67
- LogStash::Util.set_thread_plugin(self)
68
- plugin.register
69
- while true
70
- event = queue.pop
71
- plugin.handle(event)
72
- end
73
- end
74
- end
75
- end
76
- end
77
-
78
- public
79
- def handle(event)
80
- LogStash::Util.set_thread_plugin(self)
81
- receive(event)
82
- end # def handle
83
-
84
- def handle_worker(event)
85
- LogStash::Util.set_thread_plugin(self)
86
- @worker_queue.push(event)
81
+ # To be overriden in implementations
82
+ def multi_receive(events)
83
+ events.each {|event| receive(event) }
87
84
  end
88
85
 
89
86
  private