logstash-core 6.0.0.alpha1-java → 6.0.0.alpha2-java

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (75) hide show
  1. checksums.yaml +4 -4
  2. data/lib/logstash-core/logstash-core.jar +0 -0
  3. data/lib/logstash-core/version.rb +1 -1
  4. data/lib/logstash/agent.rb +81 -45
  5. data/lib/logstash/api/commands/hot_threads_reporter.rb +3 -3
  6. data/lib/logstash/api/commands/node.rb +13 -6
  7. data/lib/logstash/api/commands/stats.rb +18 -6
  8. data/lib/logstash/api/modules/node.rb +7 -0
  9. data/lib/logstash/api/modules/node_stats.rb +12 -5
  10. data/lib/logstash/bootstrap_check/default_config.rb +3 -7
  11. data/lib/logstash/compiler.rb +33 -15
  12. data/lib/logstash/compiler/lscl.rb +16 -8
  13. data/lib/logstash/config/mixin.rb +5 -42
  14. data/lib/logstash/config/pipeline_config.rb +1 -1
  15. data/lib/logstash/config/source/local.rb +28 -13
  16. data/lib/logstash/config/source/multi_local.rb +72 -0
  17. data/lib/logstash/config/source_loader.rb +1 -2
  18. data/lib/logstash/environment.rb +12 -3
  19. data/lib/logstash/execution_context.rb +7 -3
  20. data/lib/logstash/inputs/base.rb +2 -0
  21. data/lib/logstash/instrument/metric_type.rb +0 -2
  22. data/lib/logstash/instrument/periodic_poller/jvm.rb +5 -5
  23. data/lib/logstash/instrument/periodic_poller/pq.rb +1 -1
  24. data/lib/logstash/outputs/base.rb +2 -0
  25. data/lib/logstash/pipeline.rb +31 -14
  26. data/lib/logstash/pipeline_action/create.rb +1 -2
  27. data/lib/logstash/pipeline_action/reload.rb +2 -1
  28. data/lib/logstash/pipeline_settings.rb +50 -0
  29. data/lib/logstash/plugin.rb +1 -0
  30. data/lib/logstash/runner.rb +7 -5
  31. data/lib/logstash/settings.rb +11 -3
  32. data/lib/logstash/shutdown_watcher.rb +26 -0
  33. data/lib/logstash/state_resolver.rb +1 -3
  34. data/lib/logstash/util/dead_letter_queue_manager.rb +61 -0
  35. data/lib/logstash/util/environment_variables.rb +43 -0
  36. data/lib/logstash/util/thread_dump.rb +3 -1
  37. data/lib/logstash/version.rb +1 -1
  38. data/locales/en.yml +4 -0
  39. data/logstash-core.gemspec +4 -1
  40. data/spec/logstash/agent/converge_spec.rb +36 -35
  41. data/spec/logstash/agent_spec.rb +48 -177
  42. data/spec/{api/lib/commands/stats.rb → logstash/api/commands/stats_spec.rb} +7 -2
  43. data/spec/{api/lib → logstash/api}/errors_spec.rb +1 -1
  44. data/spec/{api/lib/api → logstash/api/modules}/logging_spec.rb +1 -10
  45. data/spec/{api/lib/api → logstash/api/modules}/node_plugins_spec.rb +1 -2
  46. data/spec/{api/lib/api → logstash/api/modules}/node_spec.rb +9 -8
  47. data/spec/{api/lib/api → logstash/api/modules}/node_stats_spec.rb +11 -9
  48. data/spec/{api/lib/api → logstash/api/modules}/plugins_spec.rb +4 -3
  49. data/spec/{api/lib/api → logstash/api/modules}/root_spec.rb +2 -2
  50. data/spec/{api/lib → logstash/api}/rack_app_spec.rb +0 -0
  51. data/spec/logstash/compiler/compiler_spec.rb +72 -9
  52. data/spec/logstash/config/source/local_spec.rb +20 -4
  53. data/spec/logstash/config/source/multi_local_spec.rb +113 -0
  54. data/spec/logstash/execution_context_spec.rb +14 -4
  55. data/spec/logstash/inputs/base_spec.rb +1 -1
  56. data/spec/logstash/instrument/wrapped_write_client_spec.rb +34 -19
  57. data/spec/logstash/output_delegator_spec.rb +1 -1
  58. data/spec/logstash/outputs/base_spec.rb +1 -1
  59. data/spec/logstash/pipeline_action/reload_spec.rb +1 -1
  60. data/spec/logstash/pipeline_action/stop_spec.rb +1 -1
  61. data/spec/logstash/pipeline_dlq_commit_spec.rb +107 -0
  62. data/spec/logstash/pipeline_pq_file_spec.rb +3 -1
  63. data/spec/logstash/pipeline_reporter_spec.rb +2 -1
  64. data/spec/logstash/pipeline_spec.rb +54 -43
  65. data/spec/logstash/runner_spec.rb +27 -36
  66. data/spec/logstash/settings/array_coercible_spec.rb +65 -0
  67. data/spec/logstash/settings_spec.rb +91 -0
  68. data/spec/logstash/shutdown_watcher_spec.rb +10 -16
  69. data/spec/logstash/state_resolver_spec.rb +6 -4
  70. data/spec/support/helpers.rb +16 -3
  71. data/spec/support/shared_contexts.rb +26 -2
  72. metadata +42 -39
  73. data/lib/logstash/instrument/metric_type/mean.rb +0 -33
  74. data/spec/api/lib/api/support/resource_dsl_methods.rb +0 -87
  75. data/spec/api/spec_helper.rb +0 -106
@@ -11,7 +11,7 @@ module LogStash module Instrument module PeriodicPoller
11
11
  end
12
12
 
13
13
  def collect
14
- pipeline_id, pipeline = @agent.running_pipelines.first
14
+ pipeline_id, pipeline = @agent.with_running_pipelines {|pipelines| pipelines.first }
15
15
  unless pipeline.nil?
16
16
  pipeline.collect_stats
17
17
  end
@@ -109,6 +109,8 @@ class LogStash::Outputs::Base < LogStash::Plugin
109
109
  super
110
110
  # There is no easy way to propage an instance variable into the codec, because the codec
111
111
  # are created at the class level
112
+ # TODO(talevy): Codecs should have their own execution_context, for now they will inherit their
113
+ # parent plugin's
112
114
  @codec.execution_context = context
113
115
  context
114
116
  end
@@ -18,22 +18,28 @@ require "logstash/instrument/null_metric"
18
18
  require "logstash/instrument/namespaced_null_metric"
19
19
  require "logstash/instrument/collector"
20
20
  require "logstash/instrument/wrapped_write_client"
21
+ require "logstash/util/dead_letter_queue_manager"
21
22
  require "logstash/output_delegator"
22
23
  require "logstash/filter_delegator"
23
24
  require "logstash/queue_factory"
24
25
  require "logstash/compiler"
25
26
  require "logstash/execution_context"
26
27
 
28
+ java_import org.logstash.common.DeadLetterQueueFactory
29
+ java_import org.logstash.common.io.DeadLetterQueueWriter
30
+
27
31
  module LogStash; class BasePipeline
28
32
  include LogStash::Util::Loggable
29
33
 
30
34
  attr_reader :settings, :config_str, :config_hash, :inputs, :filters, :outputs, :pipeline_id, :lir, :execution_context
35
+ attr_reader :pipeline_config
31
36
 
32
- def initialize(config_str, settings = SETTINGS, namespaced_metric = nil, agent = nil)
37
+ def initialize(pipeline_config, namespaced_metric = nil, agent = nil)
33
38
  @logger = self.logger
34
39
 
35
- @config_str = config_str
36
- @settings = settings
40
+ @pipeline_config = pipeline_config
41
+ @config_str = pipeline_config.config_string
42
+ @settings = pipeline_config.settings
37
43
  @config_hash = Digest::SHA1.hexdigest(@config_str)
38
44
 
39
45
  @lir = compile_lir
@@ -42,14 +48,20 @@ module LogStash; class BasePipeline
42
48
  # a unique id when auto-generating plugin ids
43
49
  @plugin_counter ||= 0
44
50
 
45
- @pipeline_id = settings.get_value("pipeline.id") || self.object_id
51
+ @pipeline_id = @settings.get_value("pipeline.id") || self.object_id
46
52
 
47
53
  # A list of plugins indexed by id
48
54
  @plugins_by_id = {}
49
55
  @inputs = nil
50
56
  @filters = nil
51
57
  @outputs = nil
52
- @execution_context = LogStash::ExecutionContext.new(self, agent)
58
+ @agent = agent
59
+
60
+ if settings.get_value("dead_letter_queue.enable")
61
+ @dlq_writer = DeadLetterQueueFactory.getWriter(pipeline_id, settings.get_value("path.dead_letter_queue"))
62
+ else
63
+ @dlq_writer = LogStash::Util::DummyDeadLetterQueueWriter.new
64
+ end
53
65
 
54
66
  grammar = LogStashConfigParser.new
55
67
  parsed_config = grammar.parse(config_str)
@@ -73,7 +85,8 @@ module LogStash; class BasePipeline
73
85
  end
74
86
 
75
87
  def compile_lir
76
- LogStash::Compiler.compile_pipeline(self.config_str)
88
+ source_with_metadata = org.logstash.common.SourceWithMetadata.new("str", "pipeline", self.config_str)
89
+ LogStash::Compiler.compile_sources(source_with_metadata)
77
90
  end
78
91
 
79
92
  def plugin(plugin_type, name, *args)
@@ -100,16 +113,18 @@ module LogStash; class BasePipeline
100
113
 
101
114
  klass = Plugin.lookup(plugin_type, name)
102
115
 
116
+ execution_context = ExecutionContext.new(self, @agent, id, klass.config_name, @dlq_writer)
117
+
103
118
  if plugin_type == "output"
104
- OutputDelegator.new(@logger, klass, type_scoped_metric, @execution_context, OutputDelegatorStrategyRegistry.instance, args)
119
+ OutputDelegator.new(@logger, klass, type_scoped_metric, execution_context, OutputDelegatorStrategyRegistry.instance, args)
105
120
  elsif plugin_type == "filter"
106
- FilterDelegator.new(@logger, klass, type_scoped_metric, @execution_context, args)
121
+ FilterDelegator.new(@logger, klass, type_scoped_metric, execution_context, args)
107
122
  else # input
108
123
  input_plugin = klass.new(args)
109
124
  scoped_metric = type_scoped_metric.namespace(id.to_sym)
110
125
  scoped_metric.gauge(:name, input_plugin.config_name)
111
126
  input_plugin.metric = scoped_metric
112
- input_plugin.execution_context = @execution_context
127
+ input_plugin.execution_context = execution_context
113
128
  input_plugin
114
129
  end
115
130
  end
@@ -147,7 +162,8 @@ module LogStash; class Pipeline < BasePipeline
147
162
 
148
163
  MAX_INFLIGHT_WARN_THRESHOLD = 10_000
149
164
 
150
- def initialize(config_str, settings = SETTINGS, namespaced_metric = nil, agent = nil)
165
+ def initialize(pipeline_config, namespaced_metric = nil, agent = nil)
166
+ @settings = pipeline_config.settings
151
167
  # This needs to be configured before we call super which will evaluate the code to make
152
168
  # sure the metric instance is correctly send to the plugins to make the namespace scoping work
153
169
  @metric = if namespaced_metric
@@ -180,6 +196,7 @@ module LogStash; class Pipeline < BasePipeline
180
196
  )
181
197
  @drain_queue = @settings.get_value("queue.drain")
182
198
 
199
+
183
200
  @events_filtered = Concurrent::AtomicFixnum.new(0)
184
201
  @events_consumed = Concurrent::AtomicFixnum.new(0)
185
202
 
@@ -191,8 +208,6 @@ module LogStash; class Pipeline < BasePipeline
191
208
  @force_shutdown = Concurrent::AtomicBoolean.new(false)
192
209
  end # def initialize
193
210
 
194
-
195
-
196
211
  def ready?
197
212
  @ready.value
198
213
  end
@@ -230,7 +245,7 @@ module LogStash; class Pipeline < BasePipeline
230
245
  # this is useful in the context of pipeline reloading
231
246
  collect_stats
232
247
 
233
- logger.debug("Starting pipeline", default_logging_keys)
248
+ @logger.debug("Starting pipeline", default_logging_keys)
234
249
 
235
250
  @finished_execution = Concurrent::AtomicBoolean.new(false)
236
251
 
@@ -279,7 +294,7 @@ module LogStash; class Pipeline < BasePipeline
279
294
 
280
295
  start_workers
281
296
 
282
- @logger.info("Pipeline started", default_logging_keys)
297
+ @logger.info("Pipeline started", "pipeline.id" => @pipeline_id)
283
298
 
284
299
  # Block until all inputs have stopped
285
300
  # Generally this happens if SIGINT is sent and `shutdown` is called from an external thread
@@ -305,6 +320,7 @@ module LogStash; class Pipeline < BasePipeline
305
320
  def close
306
321
  @filter_queue_client.close
307
322
  @queue.close
323
+ @dlq_writer.close
308
324
  end
309
325
 
310
326
  def transition_to_running
@@ -559,6 +575,7 @@ module LogStash; class Pipeline < BasePipeline
559
575
  # stopped
560
576
  wait_for_workers
561
577
  clear_pipeline_metrics
578
+ @logger.info("Pipeline terminated", "pipeline.id" => @pipeline_id)
562
579
  end # def shutdown
563
580
 
564
581
  def force_shutdown!
@@ -32,7 +32,7 @@ module LogStash module PipelineAction
32
32
  # The execute assume that the thread safety access of the pipeline
33
33
  # is managed by the caller.
34
34
  def execute(agent, pipelines)
35
- pipeline = LogStash::Pipeline.new(@pipeline_config.config_string, @pipeline_config.settings, @metric, agent)
35
+ pipeline = LogStash::Pipeline.new(@pipeline_config, @metric, agent)
36
36
 
37
37
  status = pipeline.start # block until the pipeline is correctly started or crashed
38
38
 
@@ -42,6 +42,5 @@ module LogStash module PipelineAction
42
42
 
43
43
  LogStash::ConvergeResult::ActionResult.create(self, status)
44
44
  end
45
-
46
45
  end
47
46
  end end
@@ -27,7 +27,7 @@ module LogStash module PipelineAction
27
27
  end
28
28
 
29
29
  begin
30
- pipeline_validator = LogStash::BasePipeline.new(@pipeline_config.config_string, @pipeline_config.settings)
30
+ pipeline_validator = LogStash::BasePipeline.new(@pipeline_config)
31
31
  rescue => e
32
32
  return LogStash::ConvergeResult::FailedAction.from_exception(e)
33
33
  end
@@ -36,6 +36,7 @@ module LogStash module PipelineAction
36
36
  return LogStash::ConvergeResult::FailedAction.new("Cannot reload pipeline, because the new pipeline is not reloadable")
37
37
  end
38
38
 
39
+ logger.info("Reloading pipeline", "pipeline.id" => pipeline_id)
39
40
  status = Stop.new(pipeline_id).execute(agent, pipelines)
40
41
 
41
42
  if status
@@ -0,0 +1,50 @@
1
+ # encoding: utf-8
2
+ require "logstash/settings"
3
+
4
+ module LogStash
5
+ class PipelineSettings < Settings
6
+
7
+ # there are settings that the pipeline uses and can be changed per pipeline instance
8
+ SETTINGS_WHITE_LIST = [
9
+ "config.debug",
10
+ "config.reload.automatic",
11
+ "config.reload.interval",
12
+ "config.string",
13
+ "dead_letter_queue.enable",
14
+ "metric.collect",
15
+ "path.config",
16
+ "path.queue",
17
+ "pipeline.batch.delay",
18
+ "pipeline.batch.size",
19
+ "pipeline.id",
20
+ "pipeline.output.workers",
21
+ "pipeline.reloadable",
22
+ "pipeline.system",
23
+ "pipeline.workers",
24
+ "queue.checkpoint.acks",
25
+ "queue.checkpoint.interval",
26
+ "queue.checkpoint.writes",
27
+ "queue.drain",
28
+ "queue.max_bytes",
29
+ "queue.max_events",
30
+ "queue.page_capacity",
31
+ "queue.type",
32
+ ]
33
+
34
+ # register a set of settings that is used as the default set of pipelines settings
35
+ def self.from_settings(settings)
36
+ pipeline_settings = self.new
37
+ SETTINGS_WHITE_LIST.each do |setting|
38
+ pipeline_settings.register(settings.get_setting(setting).clone)
39
+ end
40
+ pipeline_settings
41
+ end
42
+
43
+ def register(setting)
44
+ unless SETTINGS_WHITE_LIST.include?(setting.name)
45
+ raise ArgumentError.new("Only pipeline related settings can be registed in a PipelineSettings object. Received \"#{setting.name}\". Allowed settings: #{SETTINGS_WHITE_LIST}")
46
+ end
47
+ super(setting)
48
+ end
49
+ end
50
+ end
@@ -3,6 +3,7 @@ require "logstash/namespace"
3
3
  require "logstash/logging"
4
4
  require "logstash/config/mixin"
5
5
  require "logstash/instrument/null_metric"
6
+ require "logstash/util/dead_letter_queue_manager"
6
7
  require "concurrent"
7
8
  require "securerandom"
8
9
 
@@ -165,7 +165,7 @@ class LogStash::Runner < Clamp::StrictCommand
165
165
  I18n.t("logstash.runner.flag.quiet"),
166
166
  :new_flag => "log.level", :new_value => "error"
167
167
 
168
- attr_reader :agent, :settings
168
+ attr_reader :agent, :settings, :source_loader
169
169
  attr_accessor :bootstrap_checks
170
170
 
171
171
  def initialize(*args)
@@ -173,7 +173,9 @@ class LogStash::Runner < Clamp::StrictCommand
173
173
  @bootstrap_checks = DEFAULT_BOOTSTRAP_CHECKS.dup
174
174
 
175
175
  # Default we check local sources: `-e`, `-f` and the logstash.yml options.
176
- LogStash::Config::SOURCE_LOADER.add_source(LogStash::Config::Source::Local.new(@settings))
176
+ @source_loader = LogStash::Config::SourceLoader.new(@settings)
177
+ @source_loader.add_source(LogStash::Config::Source::Local.new(@settings))
178
+ @source_loader.add_source(LogStash::Config::Source::MultiLocal.new(@settings))
177
179
 
178
180
  super(*args)
179
181
  end
@@ -266,12 +268,12 @@ class LogStash::Runner < Clamp::StrictCommand
266
268
 
267
269
  if setting("config.test_and_exit")
268
270
  begin
269
- results = LogStash::Config::SOURCE_LOADER.fetch
271
+ results = @source_loader.fetch
270
272
 
271
273
  # TODO(ph): make it better for multiple pipeline
272
274
  if results.success?
273
275
  results.response.each do |pipeline_config|
274
- LogStash::BasePipeline.new(pipeline_config.config_string)
276
+ LogStash::BasePipeline.new(pipeline_config)
275
277
  end
276
278
  puts "Configuration OK"
277
279
  logger.info "Using config.test_and_exit mode. Config Validation Result: OK. Exiting Logstash"
@@ -289,7 +291,7 @@ class LogStash::Runner < Clamp::StrictCommand
289
291
  @data_path_lock = FileLockFactory.getDefault().obtainLock(setting("path.data"), ".lock");
290
292
 
291
293
  @dispatcher.fire(:before_agent)
292
- @agent = create_agent(@settings, LogStash::Config::SOURCE_LOADER)
294
+ @agent = create_agent(@settings, @source_loader)
293
295
  @dispatcher.fire(:after_agent)
294
296
 
295
297
  # enable sigint/sigterm before starting the agent
@@ -2,11 +2,14 @@
2
2
  require "logstash/util/loggable"
3
3
  require "fileutils"
4
4
  require "logstash/util/byte_value"
5
+ require "logstash/util/environment_variables"
5
6
  require "logstash/util/time_value"
6
7
 
7
8
  module LogStash
8
9
  class Settings
9
10
 
11
+ include LogStash::Util::EnvironmentVariables
12
+
10
13
  def initialize
11
14
  @settings = {}
12
15
  # Theses settings were loaded from the yaml file
@@ -106,9 +109,9 @@ module LogStash
106
109
  @settings.values.each(&:reset)
107
110
  end
108
111
 
109
- def from_yaml(yaml_path)
110
- settings = read_yaml(::File.join(yaml_path, "logstash.yml"))
111
- self.merge(flatten_hash(settings), true)
112
+ def from_yaml(yaml_path, file_name="logstash.yml")
113
+ settings = read_yaml(::File.join(yaml_path, file_name))
114
+ self.merge(deep_replace(flatten_hash(settings)), true)
112
115
  self
113
116
  end
114
117
 
@@ -134,6 +137,11 @@ module LogStash
134
137
  end
135
138
  end
136
139
 
140
+ def ==(other)
141
+ return false unless other.kind_of?(::LogStash::Settings)
142
+ self.to_hash == other.to_hash
143
+ end
144
+
137
145
  private
138
146
  def read_yaml(path)
139
147
  YAML.safe_load(IO.read(path)) || {}
@@ -1,4 +1,6 @@
1
1
  # encoding: utf-8
2
+ require "concurrent/atomic/atomic_fixnum"
3
+ require "concurrent/atomic/atomic_boolean"
2
4
 
3
5
  module LogStash
4
6
  class ShutdownWatcher
@@ -16,6 +18,8 @@ module LogStash
16
18
  @report_every = report_every
17
19
  @abort_threshold = abort_threshold
18
20
  @reports = []
21
+ @attempts_count = Concurrent::AtomicFixnum.new(0)
22
+ @running = Concurrent::AtomicBoolean.new(false)
19
23
  end
20
24
 
21
25
  def self.unsafe_shutdown=(boolean)
@@ -35,11 +39,26 @@ module LogStash
35
39
  self.class.logger
36
40
  end
37
41
 
42
+ def attempts_count
43
+ @attempts_count.value
44
+ end
45
+
46
+ def stop!
47
+ @running.make_false
48
+ end
49
+
50
+ def stopped?
51
+ @running.false?
52
+ end
53
+
38
54
  def start
39
55
  sleep(@cycle_period)
40
56
  cycle_number = 0
41
57
  stalled_count = 0
58
+ running!
42
59
  Stud.interval(@cycle_period) do
60
+ @attempts_count.increment
61
+ break if stopped?
43
62
  break unless @pipeline.thread.alive?
44
63
  @reports << pipeline_report_snapshot
45
64
  @reports.delete_at(0) if @reports.size > @report_every # expire old report
@@ -61,6 +80,8 @@ module LogStash
61
80
  end
62
81
  cycle_number = (cycle_number + 1) % @report_every
63
82
  end
83
+ ensure
84
+ stop!
64
85
  end
65
86
 
66
87
  def pipeline_report_snapshot
@@ -90,5 +111,10 @@ module LogStash
90
111
  def force_exit
91
112
  exit(-1)
92
113
  end
114
+
115
+ private
116
+ def running!
117
+ @running.make_true
118
+ end
93
119
  end
94
120
  end
@@ -19,9 +19,7 @@ module LogStash
19
19
  if pipeline.nil?
20
20
  actions << LogStash::PipelineAction::Create.new(pipeline_config, @metric)
21
21
  else
22
- # TODO(ph): The pipeline should keep a reference to the original PipelineConfig
23
- # and we could use straight comparison.
24
- if pipeline_config.config_hash != pipeline.config_hash
22
+ if pipeline_config != pipeline.pipeline_config
25
23
  actions << LogStash::PipelineAction::Reload.new(pipeline_config, @metric)
26
24
  end
27
25
  end
@@ -0,0 +1,61 @@
1
+ require 'logstash/environment'
2
+
3
+ module LogStash; module Util
4
+ class PluginDeadLetterQueueWriter
5
+
6
+ attr_reader :plugin_id, :plugin_type, :inner_writer
7
+
8
+ def initialize(inner_writer, plugin_id, plugin_type)
9
+ @plugin_id = plugin_id
10
+ @plugin_type = plugin_type
11
+ @inner_writer = inner_writer
12
+ end
13
+
14
+ def write(logstash_event, reason)
15
+ if @inner_writer && @inner_writer.is_open
16
+ @inner_writer.writeEntry(logstash_event.to_java, @plugin_type, @plugin_id, reason)
17
+ end
18
+ end
19
+
20
+ def close
21
+ if @inner_writer && @inner_writer.is_open
22
+ @inner_writer.close
23
+ end
24
+ end
25
+ end
26
+
27
+ class DummyDeadLetterQueueWriter
28
+ # class uses to represent a writer when dead_letter_queue is disabled
29
+ def initialize
30
+ end
31
+
32
+ def write(logstash_event, reason)
33
+ # noop
34
+ end
35
+
36
+ def is_open
37
+ false
38
+ end
39
+
40
+ def close
41
+ # noop
42
+ end
43
+ end
44
+
45
+ class DeadLetterQueueFactory
46
+ java_import org.logstash.common.DeadLetterQueueFactory
47
+
48
+ def self.get(pipeline_id)
49
+ if LogStash::SETTINGS.get("dead_letter_queue.enable")
50
+ return DeadLetterQueueWriter.new(
51
+ DeadLetterQueueFactory.getWriter(pipeline_id, LogStash::SETTINGS.get("path.dead_letter_queue")))
52
+ else
53
+ return DeadLetterQueueWriter.new(nil)
54
+ end
55
+ end
56
+
57
+ def self.close(pipeline_id)
58
+ DeadLetterQueueFactory.close(pipeline_id)
59
+ end
60
+ end
61
+ end end