logstash-core 5.6.16-java → 6.0.0.alpha1-java

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (156) hide show
  1. checksums.yaml +4 -4
  2. data/gemspec_jars.rb +4 -7
  3. data/lib/logstash-core/logstash-core.jar +0 -0
  4. data/lib/logstash-core/version.rb +4 -8
  5. data/lib/logstash-core_jars.rb +12 -26
  6. data/lib/logstash/agent.rb +261 -246
  7. data/lib/logstash/api/commands/default_metadata.rb +1 -1
  8. data/lib/logstash/api/commands/hot_threads_reporter.rb +5 -11
  9. data/lib/logstash/api/commands/node.rb +3 -2
  10. data/lib/logstash/api/commands/stats.rb +3 -2
  11. data/lib/logstash/bootstrap_check/bad_java.rb +16 -0
  12. data/lib/logstash/bootstrap_check/bad_ruby.rb +12 -0
  13. data/lib/logstash/bootstrap_check/default_config.rb +17 -0
  14. data/lib/logstash/compiler.rb +38 -0
  15. data/lib/logstash/compiler/lscl.rb +566 -0
  16. data/lib/logstash/compiler/lscl/lscl_grammar.rb +3503 -0
  17. data/lib/logstash/compiler/treetop_monkeypatches.rb +92 -0
  18. data/lib/logstash/config/config_ast.rb +4 -82
  19. data/lib/logstash/config/mixin.rb +73 -41
  20. data/lib/logstash/config/pipeline_config.rb +48 -0
  21. data/lib/logstash/config/source/base.rb +16 -0
  22. data/lib/logstash/config/source/local.rb +215 -0
  23. data/lib/logstash/config/source_loader.rb +125 -0
  24. data/lib/logstash/converge_result.rb +103 -0
  25. data/lib/logstash/environment.rb +6 -19
  26. data/lib/logstash/errors.rb +2 -0
  27. data/lib/logstash/execution_context.rb +4 -7
  28. data/lib/logstash/filter_delegator.rb +6 -9
  29. data/lib/logstash/inputs/base.rb +0 -2
  30. data/lib/logstash/instrument/collector.rb +5 -7
  31. data/lib/logstash/instrument/metric_store.rb +12 -12
  32. data/lib/logstash/instrument/metric_type/mean.rb +0 -5
  33. data/lib/logstash/instrument/namespaced_metric.rb +0 -4
  34. data/lib/logstash/instrument/namespaced_null_metric.rb +0 -4
  35. data/lib/logstash/instrument/null_metric.rb +0 -10
  36. data/lib/logstash/instrument/periodic_poller/cgroup.rb +85 -168
  37. data/lib/logstash/instrument/periodic_poller/jvm.rb +5 -5
  38. data/lib/logstash/instrument/periodic_poller/pq.rb +3 -7
  39. data/lib/logstash/instrument/periodic_pollers.rb +1 -3
  40. data/lib/logstash/instrument/wrapped_write_client.rb +24 -33
  41. data/lib/logstash/logging/logger.rb +15 -47
  42. data/lib/logstash/namespace.rb +0 -1
  43. data/lib/logstash/output_delegator.rb +5 -7
  44. data/lib/logstash/outputs/base.rb +0 -2
  45. data/lib/logstash/pipeline.rb +159 -87
  46. data/lib/logstash/pipeline_action.rb +13 -0
  47. data/lib/logstash/pipeline_action/base.rb +29 -0
  48. data/lib/logstash/pipeline_action/create.rb +47 -0
  49. data/lib/logstash/pipeline_action/reload.rb +48 -0
  50. data/lib/logstash/pipeline_action/stop.rb +23 -0
  51. data/lib/logstash/plugin.rb +0 -1
  52. data/lib/logstash/plugins/hooks_registry.rb +6 -0
  53. data/lib/logstash/plugins/registry.rb +0 -1
  54. data/lib/logstash/program.rb +14 -0
  55. data/lib/logstash/queue_factory.rb +5 -1
  56. data/lib/logstash/runner.rb +58 -80
  57. data/lib/logstash/settings.rb +3 -27
  58. data/lib/logstash/state_resolver.rb +41 -0
  59. data/lib/logstash/util/java_version.rb +6 -0
  60. data/lib/logstash/util/safe_uri.rb +12 -148
  61. data/lib/logstash/util/thread_dump.rb +4 -7
  62. data/lib/logstash/util/wrapped_acked_queue.rb +36 -39
  63. data/lib/logstash/util/wrapped_synchronous_queue.rb +29 -39
  64. data/lib/logstash/version.rb +10 -8
  65. data/locales/en.yml +3 -54
  66. data/logstash-core.gemspec +8 -35
  67. data/spec/{logstash/api/modules → api/lib/api}/logging_spec.rb +10 -1
  68. data/spec/{logstash/api/modules → api/lib/api}/node_plugins_spec.rb +2 -1
  69. data/spec/{logstash/api/modules → api/lib/api}/node_spec.rb +3 -3
  70. data/spec/{logstash/api/modules → api/lib/api}/node_stats_spec.rb +3 -7
  71. data/spec/{logstash/api/modules → api/lib/api}/plugins_spec.rb +3 -4
  72. data/spec/{logstash/api/modules → api/lib/api}/root_spec.rb +2 -2
  73. data/spec/api/lib/api/support/resource_dsl_methods.rb +87 -0
  74. data/spec/{logstash/api/commands/stats_spec.rb → api/lib/commands/stats.rb} +2 -7
  75. data/spec/{logstash/api → api/lib}/errors_spec.rb +1 -1
  76. data/spec/{logstash/api → api/lib}/rack_app_spec.rb +0 -0
  77. data/spec/api/spec_helper.rb +106 -0
  78. data/spec/logstash/agent/converge_spec.rb +286 -0
  79. data/spec/logstash/agent/metrics_spec.rb +244 -0
  80. data/spec/logstash/agent_spec.rb +213 -225
  81. data/spec/logstash/compiler/compiler_spec.rb +584 -0
  82. data/spec/logstash/config/config_ast_spec.rb +8 -47
  83. data/spec/logstash/config/mixin_spec.rb +2 -42
  84. data/spec/logstash/config/pipeline_config_spec.rb +75 -0
  85. data/spec/logstash/config/source/local_spec.rb +395 -0
  86. data/spec/logstash/config/source_loader_spec.rb +122 -0
  87. data/spec/logstash/converge_result_spec.rb +179 -0
  88. data/spec/logstash/event_spec.rb +0 -66
  89. data/spec/logstash/execution_context_spec.rb +8 -12
  90. data/spec/logstash/filter_delegator_spec.rb +12 -24
  91. data/spec/logstash/inputs/base_spec.rb +7 -5
  92. data/spec/logstash/instrument/periodic_poller/cgroup_spec.rb +92 -225
  93. data/spec/logstash/instrument/periodic_poller/jvm_spec.rb +1 -1
  94. data/spec/logstash/instrument/periodic_poller/os_spec.rb +32 -29
  95. data/spec/logstash/instrument/wrapped_write_client_spec.rb +33 -33
  96. data/spec/logstash/legacy_ruby_event_spec.rb +13 -4
  97. data/spec/logstash/output_delegator_spec.rb +11 -20
  98. data/spec/logstash/outputs/base_spec.rb +7 -5
  99. data/spec/logstash/pipeline_action/create_spec.rb +83 -0
  100. data/spec/logstash/pipeline_action/reload_spec.rb +83 -0
  101. data/spec/logstash/pipeline_action/stop_spec.rb +37 -0
  102. data/spec/logstash/pipeline_pq_file_spec.rb +1 -1
  103. data/spec/logstash/pipeline_spec.rb +81 -137
  104. data/spec/logstash/plugin_spec.rb +2 -1
  105. data/spec/logstash/plugins/hooks_registry_spec.rb +6 -0
  106. data/spec/logstash/queue_factory_spec.rb +13 -1
  107. data/spec/logstash/runner_spec.rb +29 -140
  108. data/spec/logstash/settings/writable_directory_spec.rb +10 -13
  109. data/spec/logstash/settings_spec.rb +0 -91
  110. data/spec/logstash/state_resolver_spec.rb +156 -0
  111. data/spec/logstash/timestamp_spec.rb +2 -6
  112. data/spec/logstash/util/java_version_spec.rb +22 -0
  113. data/spec/logstash/util/safe_uri_spec.rb +0 -56
  114. data/spec/logstash/util/wrapped_synchronous_queue_spec.rb +22 -0
  115. data/spec/support/helpers.rb +9 -11
  116. data/spec/support/matchers.rb +96 -6
  117. data/spec/support/mocks_classes.rb +80 -0
  118. data/spec/support/shared_contexts.rb +2 -27
  119. metadata +100 -149
  120. data/lib/logstash/config/loader.rb +0 -107
  121. data/lib/logstash/config/modules_common.rb +0 -103
  122. data/lib/logstash/config/source/modules.rb +0 -55
  123. data/lib/logstash/config/string_escape.rb +0 -27
  124. data/lib/logstash/dependency_report.rb +0 -131
  125. data/lib/logstash/dependency_report_runner.rb +0 -17
  126. data/lib/logstash/elasticsearch_client.rb +0 -142
  127. data/lib/logstash/instrument/global_metrics.rb +0 -13
  128. data/lib/logstash/instrument/periodic_poller/dlq.rb +0 -24
  129. data/lib/logstash/modules/cli_parser.rb +0 -74
  130. data/lib/logstash/modules/elasticsearch_config.rb +0 -22
  131. data/lib/logstash/modules/elasticsearch_importer.rb +0 -37
  132. data/lib/logstash/modules/elasticsearch_resource.rb +0 -10
  133. data/lib/logstash/modules/file_reader.rb +0 -36
  134. data/lib/logstash/modules/kibana_base.rb +0 -24
  135. data/lib/logstash/modules/kibana_client.rb +0 -124
  136. data/lib/logstash/modules/kibana_config.rb +0 -105
  137. data/lib/logstash/modules/kibana_dashboards.rb +0 -36
  138. data/lib/logstash/modules/kibana_importer.rb +0 -17
  139. data/lib/logstash/modules/kibana_resource.rb +0 -10
  140. data/lib/logstash/modules/kibana_settings.rb +0 -40
  141. data/lib/logstash/modules/logstash_config.rb +0 -120
  142. data/lib/logstash/modules/resource_base.rb +0 -38
  143. data/lib/logstash/modules/scaffold.rb +0 -52
  144. data/lib/logstash/modules/settings_merger.rb +0 -23
  145. data/lib/logstash/modules/util.rb +0 -17
  146. data/lib/logstash/util/dead_letter_queue_manager.rb +0 -61
  147. data/lib/logstash/util/environment_variables.rb +0 -43
  148. data/spec/logstash/config/loader_spec.rb +0 -38
  149. data/spec/logstash/config/string_escape_spec.rb +0 -24
  150. data/spec/logstash/instrument/periodic_poller/dlq_spec.rb +0 -17
  151. data/spec/logstash/modules/logstash_config_spec.rb +0 -56
  152. data/spec/logstash/modules/scaffold_spec.rb +0 -234
  153. data/spec/logstash/pipeline_dlq_commit_spec.rb +0 -109
  154. data/spec/logstash/settings/splittable_string_array_spec.rb +0 -51
  155. data/spec/logstash/util/wrapped_acked_queue_spec.rb +0 -49
  156. data/versions-gem-copy.yml +0 -12
@@ -79,7 +79,7 @@ describe LogStash::Pipeline do
79
79
  let(:number_of_events) { 100_000 }
80
80
  let(:page_capacity) { 1 * 1024 * 512 } # 1 128
81
81
  let(:max_bytes) { 1024 * 1024 * 1024 } # 1 gb
82
- let(:queue_type) { "persisted" } # "memory"
82
+ let(:queue_type) { "persisted" } # "memory" "memory_acked"
83
83
  let(:times) { [] }
84
84
 
85
85
  let(:pipeline_thread) do
@@ -3,9 +3,8 @@ require "spec_helper"
3
3
  require "logstash/inputs/generator"
4
4
  require "logstash/filters/multiline"
5
5
  require_relative "../support/mocks_classes"
6
+ require_relative "../support/helpers"
6
7
  require_relative "../logstash/pipeline_reporter_spec" # for DummyOutput class
7
- require "stud/try"
8
- require 'timeout'
9
8
 
10
9
  class DummyInput < LogStash::Inputs::Base
11
10
  config_name "dummyinput"
@@ -82,21 +81,6 @@ class DummySafeFilter < LogStash::Filters::Base
82
81
  def close() end
83
82
  end
84
83
 
85
- class DummyFlushingFilter < LogStash::Filters::Base
86
- config_name "dummyflushingfilter"
87
- milestone 2
88
-
89
- def register() end
90
- def filter(event) end
91
- def periodic_flush
92
- true
93
- end
94
- def flush(options)
95
- return [::LogStash::Event.new("message" => "dummy_flush")]
96
- end
97
- def close() end
98
- end
99
-
100
84
  class TestPipeline < LogStash::Pipeline
101
85
  attr_reader :outputs, :settings
102
86
  end
@@ -105,24 +89,20 @@ describe LogStash::Pipeline do
105
89
  let(:worker_thread_count) { 5 }
106
90
  let(:safe_thread_count) { 1 }
107
91
  let(:override_thread_count) { 42 }
108
- let(:dead_letter_queue_enabled) { false }
109
- let(:dead_letter_queue_path) { }
110
- let(:pipeline_settings_obj) { LogStash::SETTINGS.clone }
92
+ let(:pipeline_settings_obj) { LogStash::SETTINGS }
111
93
  let(:pipeline_settings) { {} }
112
- let(:max_retry) {10} #times
113
- let(:timeout) {120} #seconds
114
94
 
115
95
  before :each do
116
96
  pipeline_workers_setting = LogStash::SETTINGS.get_setting("pipeline.workers")
117
97
  allow(pipeline_workers_setting).to receive(:default).and_return(worker_thread_count)
118
- dlq_enabled_setting = LogStash::SETTINGS.get_setting("dead_letter_queue.enable")
119
- allow(dlq_enabled_setting).to receive(:value).and_return(dead_letter_queue_enabled)
120
- dlq_path_setting = LogStash::SETTINGS.get_setting("path.dead_letter_queue")
121
- allow(dlq_path_setting).to receive(:value).and_return(dead_letter_queue_path)
122
-
123
98
  pipeline_settings.each {|k, v| pipeline_settings_obj.set(k, v) }
124
99
  end
125
100
 
101
+ after :each do
102
+ pipeline_settings_obj.reset
103
+ end
104
+
105
+
126
106
  describe "event cancellation" do
127
107
  # test harness for https://github.com/elastic/logstash/issues/6055
128
108
 
@@ -161,16 +141,12 @@ describe LogStash::Pipeline do
161
141
 
162
142
  pipeline = LogStash::Pipeline.new(config, pipeline_settings_obj)
163
143
  t = Thread.new { pipeline.run }
164
- Timeout.timeout(timeout) do
165
- sleep(0.1) until pipeline.ready?
166
- end
167
- Stud.try(max_retry.times, [StandardError, RSpec::Expectations::ExpectationNotMetError]) do
168
- wait(3).for do
169
- # give us a bit of time to flush the events
170
- # puts("*****" + output.events.map{|e| e.message}.to_s)
171
- output.events.map{|e| e.get("message")}.include?("END")
172
- end.to be_truthy
173
- end
144
+ sleep(0.1) until pipeline.ready?
145
+ wait(3).for do
146
+ # give us a bit of time to flush the events
147
+ # puts("*****" + output.events.map{|e| e.message}.to_s)
148
+ output.events.map{|e| e.get("message")}.include?("END")
149
+ end.to be_truthy
174
150
  expect(output.events.size).to eq(2)
175
151
  expect(output.events[0].get("tags")).to eq(["notdropped"])
176
152
  expect(output.events[1].get("tags")).to eq(["notdropped"])
@@ -228,14 +204,6 @@ describe LogStash::Pipeline do
228
204
  pipeline = TestPipeline.new(test_config_with_filters, pipeline_settings_obj)
229
205
  pipeline.close
230
206
  end
231
-
232
- it "should log each filtered event if config.debug is set to true" do
233
- pipeline_settings_obj.set("config.debug", true)
234
- pipeline = TestPipeline.new(test_config_with_filters, pipeline_settings_obj)
235
- expect(logger).to receive(:debug).with(/filter received/, anything)
236
- pipeline.filter_func([LogStash::Event.new])
237
- pipeline.close
238
- end
239
207
  end
240
208
 
241
209
  context "when there is no command line -w N set" do
@@ -243,7 +211,7 @@ describe LogStash::Pipeline do
243
211
  msg = "Defaulting pipeline worker threads to 1 because there are some filters that might not work with multiple worker threads"
244
212
  pipeline = TestPipeline.new(test_config_with_filters)
245
213
  expect(pipeline.logger).to receive(:warn).with(msg,
246
- {:count_was=>worker_thread_count, :filters=>["dummyfilter"]})
214
+ hash_including({:count_was=>worker_thread_count, :filters=>["dummyfilter"]}))
247
215
  pipeline.run
248
216
  expect(pipeline.worker_threads.size).to eq(safe_thread_count)
249
217
  pipeline.shutdown
@@ -256,8 +224,7 @@ describe LogStash::Pipeline do
256
224
  msg = "Warning: Manual override - there are filters that might" +
257
225
  " not work with multiple worker threads"
258
226
  pipeline = TestPipeline.new(test_config_with_filters, pipeline_settings_obj)
259
- expect(pipeline.logger).to receive(:warn).with(msg,
260
- {:worker_threads=> override_thread_count, :filters=>["dummyfilter"]})
227
+ expect(pipeline.logger).to receive(:warn).with(msg, hash_including({:worker_threads=> override_thread_count, :filters=>["dummyfilter"]}))
261
228
  pipeline.run
262
229
  expect(pipeline.worker_threads.size).to eq(override_thread_count)
263
230
  pipeline.shutdown
@@ -283,7 +250,6 @@ describe LogStash::Pipeline do
283
250
  }
284
251
 
285
252
  it "starts multiple filter threads" do
286
- skip("This test has been failing periodically since November 2016. Tracked as https://github.com/elastic/logstash/issues/6245")
287
253
  pipeline = TestPipeline.new(test_config_with_filters)
288
254
  pipeline.run
289
255
  expect(pipeline.worker_threads.size).to eq(worker_thread_count)
@@ -427,9 +393,7 @@ describe LogStash::Pipeline do
427
393
  # race condition if called in the thread
428
394
  p = pipeline
429
395
  t = Thread.new { p.run }
430
- Timeout.timeout(timeout) do
431
- sleep(0.1) until pipeline.ready?
432
- end
396
+ sleep(0.1) until pipeline.ready?
433
397
  pipeline.shutdown
434
398
  t.join
435
399
  end
@@ -442,7 +406,7 @@ describe LogStash::Pipeline do
442
406
  let(:batch_size) { LogStash::Pipeline::MAX_INFLIGHT_WARN_THRESHOLD + 1 }
443
407
 
444
408
  it "should raise a max inflight warning if the max_inflight count is exceeded" do
445
- expect(logger).to have_received(:warn).with(warning_prefix)
409
+ expect(logger).to have_received(:warn).with(warning_prefix, hash_including(:pipeline_id => anything))
446
410
  end
447
411
  end
448
412
  end
@@ -593,23 +557,30 @@ describe LogStash::Pipeline do
593
557
 
594
558
  it "should handle evaluating different config" do
595
559
  expect(pipeline1.output_func(LogStash::Event.new)).not_to include(nil)
596
- expect(pipeline1.filter_func([LogStash::Event.new])).not_to include(nil)
560
+ expect(pipeline1.filter_func(LogStash::Event.new)).not_to include(nil)
597
561
  expect(pipeline2.output_func(LogStash::Event.new)).not_to include(nil)
598
- expect(pipeline1.filter_func([LogStash::Event.new])).not_to include(nil)
562
+ expect(pipeline1.filter_func(LogStash::Event.new)).not_to include(nil)
599
563
  end
600
564
  end
601
565
 
602
566
  context "Periodic Flush" do
567
+ let(:number_of_events) { 100 }
603
568
  let(:config) do
604
569
  <<-EOS
605
570
  input {
606
- dummy_input {}
571
+ generator {
572
+ count => #{number_of_events}
573
+ }
607
574
  }
608
575
  filter {
609
- dummy_flushing_filter {}
576
+ multiline {
577
+ pattern => "^NeverMatch"
578
+ negate => true
579
+ what => "previous"
580
+ }
610
581
  }
611
582
  output {
612
- dummy_output {}
583
+ dummyoutput {}
613
584
  }
614
585
  EOS
615
586
  end
@@ -617,31 +588,24 @@ describe LogStash::Pipeline do
617
588
 
618
589
  before do
619
590
  allow(::LogStash::Outputs::DummyOutput).to receive(:new).with(any_args).and_return(output)
620
- allow(LogStash::Plugin).to receive(:lookup).with("input", "dummy_input").and_return(DummyInput)
621
- allow(LogStash::Plugin).to receive(:lookup).with("filter", "dummy_flushing_filter").and_return(DummyFlushingFilter)
622
- allow(LogStash::Plugin).to receive(:lookup).with("output", "dummy_output").and_return(::LogStash::Outputs::DummyOutput)
591
+ allow(LogStash::Plugin).to receive(:lookup).with("input", "generator").and_return(LogStash::Inputs::Generator)
623
592
  allow(LogStash::Plugin).to receive(:lookup).with("codec", "plain").and_return(LogStash::Codecs::Plain)
593
+ allow(LogStash::Plugin).to receive(:lookup).with("filter", "multiline").and_return(LogStash::Filters::Multiline)
594
+ allow(LogStash::Plugin).to receive(:lookup).with("output", "dummyoutput").and_return(::LogStash::Outputs::DummyOutput)
624
595
  end
625
596
 
626
- it "flush periodically" do
597
+ it "flushes the buffered contents of the filter" do
627
598
  Thread.abort_on_exception = true
628
-
629
599
  pipeline = LogStash::Pipeline.new(config, pipeline_settings_obj)
630
600
  t = Thread.new { pipeline.run }
631
- Timeout.timeout(timeout) do
632
- sleep(0.1) until pipeline.ready?
633
- end
634
- Stud.try(max_retry.times, [StandardError, RSpec::Expectations::ExpectationNotMetError]) do
635
- wait(10).for do
636
- # give us a bit of time to flush the events
637
- output.events.empty?
638
- end.to be_falsey
639
- end
640
-
641
- expect(output.events.any? {|e| e.get("message") == "dummy_flush"}).to eq(true)
642
-
601
+ sleep(0.1) until pipeline.ready?
602
+ wait(3).for do
603
+ # give us a bit of time to flush the events
604
+ output.events.empty?
605
+ end.to be_falsey
606
+ event = output.events.pop
607
+ expect(event.get("message").count("\n")).to eq(99)
643
608
  pipeline.shutdown
644
-
645
609
  t.join
646
610
  end
647
611
  end
@@ -676,9 +640,9 @@ describe LogStash::Pipeline do
676
640
  # in the current instance and was returning an array containing nil values for
677
641
  # the match.
678
642
  expect(pipeline1.output_func(LogStash::Event.new)).not_to include(nil)
679
- expect(pipeline1.filter_func([LogStash::Event.new])).not_to include(nil)
643
+ expect(pipeline1.filter_func(LogStash::Event.new)).not_to include(nil)
680
644
  expect(pipeline2.output_func(LogStash::Event.new)).not_to include(nil)
681
- expect(pipeline1.filter_func([LogStash::Event.new])).not_to include(nil)
645
+ expect(pipeline1.filter_func(LogStash::Event.new)).not_to include(nil)
682
646
  end
683
647
  end
684
648
 
@@ -736,12 +700,9 @@ describe LogStash::Pipeline do
736
700
  # subject must be first call outside the thread context because of lazy initialization
737
701
  s = subject
738
702
  t = Thread.new { s.run }
739
- Timeout.timeout(timeout) do
740
- sleep(0.1) until subject.ready?
741
- end
742
- Timeout.timeout(timeout) do
743
- sleep(0.1)
744
- end
703
+ sleep(0.1) until subject.ready?
704
+
705
+ sleep(0.1)
745
706
  expect(subject.uptime).to be > 0
746
707
  subject.shutdown
747
708
  t.join
@@ -805,17 +766,13 @@ describe LogStash::Pipeline do
805
766
  allow(LogStash::Plugin).to receive(:lookup).with("output", "dummyoutput").and_return(::LogStash::Outputs::DummyOutput)
806
767
 
807
768
  pipeline_thread
808
- Timeout.timeout(timeout) do
809
- sleep(0.1) until subject.ready?
810
- end
769
+ sleep(0.1) until subject.ready?
811
770
 
812
771
  # make sure we have received all the generated events
813
- Stud.try(max_retry.times, [StandardError, RSpec::Expectations::ExpectationNotMetError]) do
814
- wait(3).for do
815
- # give us a bit of time to flush the events
816
- dummyoutput.events.size >= number_of_events
817
- end.to be_truthy
818
- end
772
+ wait(3).for do
773
+ # give us a bit of time to flush the events
774
+ dummyoutput.events.size >= number_of_events
775
+ end.to be_truthy
819
776
  end
820
777
 
821
778
  after :each do
@@ -872,33 +829,6 @@ describe LogStash::Pipeline do
872
829
  expect(collected_metric[:stats][:pipelines][:main][:plugins][:filters][plugin_name][:name].value).to eq(LogStash::Filters::Multiline.config_name)
873
830
  end
874
831
  end
875
-
876
- context 'when dlq is disabled' do
877
- let (:collect_stats) { subject.collect_dlq_stats}
878
- let (:collected_stats) { collected_metric[:stats][:pipelines][:main][:dlq]}
879
- let (:available_stats) {[:path, :queue_size_in_bytes]}
880
-
881
- it 'should show not show any dlq stats' do
882
- collect_stats
883
- expect(collected_stats).to be_nil
884
- end
885
-
886
- end
887
-
888
- context 'when dlq is enabled' do
889
- let (:dead_letter_queue_enabled) { true }
890
- let (:dead_letter_queue_path) { Stud::Temporary.directory }
891
- let (:pipeline_dlq_path) { "#{dead_letter_queue_path}/#{pipeline_id}"}
892
-
893
- let (:collect_stats) { subject.collect_dlq_stats }
894
- let (:collected_stats) { collected_metric[:stats][:pipelines][:main][:dlq]}
895
-
896
- it 'should show dlq stats' do
897
- collect_stats
898
- # A newly created dead letter queue with no entries will have a size of 1 (the version 'header')
899
- expect(collected_stats[:queue_size_in_bytes].value).to eq(1)
900
- end
901
- end
902
832
  end
903
833
  end
904
834
 
@@ -925,38 +855,52 @@ describe LogStash::Pipeline do
925
855
  end
926
856
  end
927
857
 
928
- describe "#system?" do
858
+ context "#system" do
929
859
  after do
930
860
  pipeline.close # close the queue
931
861
  end
932
862
 
933
- let(:pipeline) { LogStash::Pipeline.new(config_string, settings) }
934
- let(:config_string) { "input { generator {} } output { null {} }" }
935
-
936
863
  context "when the pipeline is a system pipeline" do
937
- let(:settings) do
938
- s = LogStash::SETTINGS.clone
939
- s.set("pipeline.system", true)
940
- s.set("config.string", config_string)
941
- s
864
+ let(:pipeline) { LogStash::Pipeline.new("input { generator {} } output { null {} }", mock_settings("pipeline.system" => true)) }
865
+ it "returns true" do
866
+ expect(pipeline.system?).to be_truthy
942
867
  end
868
+ end
943
869
 
870
+ context "when the pipeline is not a system pipeline" do
871
+ let(:pipeline) { LogStash::Pipeline.new("input { generator {} } output { null {} }", mock_settings("pipeline.system" => false)) }
872
+ it "returns true" do
873
+ expect(pipeline.system?).to be_falsey
874
+ end
875
+ end
876
+ end
877
+
878
+ context "#reloadable?" do
879
+ after do
880
+ pipeline.close # close the queue
881
+ end
882
+
883
+ context "when all plugins are reloadable and pipeline is configured as reloadable" do
884
+ let(:pipeline) { LogStash::Pipeline.new("input { generator {} } output { null {} }", mock_settings("pipeline.reloadable" => true)) }
944
885
 
945
886
  it "returns true" do
946
- expect(pipeline.system?).to be_truthy
887
+ expect(pipeline.reloadable?).to be_truthy
947
888
  end
948
889
  end
949
890
 
950
- context "when the pipeline is not a system pipeline" do
951
- let(:settings) do
952
- s = LogStash::SETTINGS.clone
953
- s.set("pipeline.system", false)
954
- s.set("config.string", config_string)
955
- s
891
+ context "when the plugins are not reloadable and pipeline is configured as reloadable" do
892
+ let(:pipeline) { LogStash::Pipeline.new("input { stdin {} } output { null {} }", mock_settings("pipeline.reloadable" => true)) }
893
+
894
+ it "returns true" do
895
+ expect(pipeline.reloadable?).to be_falsey
956
896
  end
897
+ end
898
+
899
+ context "when all plugins are reloadable and pipeline is configured as non-reloadable" do
900
+ let(:pipeline) { LogStash::Pipeline.new("input { generator {} } output { null {} }", mock_settings("pipeline.reloadable" => false)) }
957
901
 
958
902
  it "returns true" do
959
- expect(pipeline.system?).to be_falsey
903
+ expect(pipeline.reloadable?).to be_falsey
960
904
  end
961
905
  end
962
906
  end
@@ -6,6 +6,7 @@ require "logstash/codecs/base"
6
6
  require "logstash/inputs/base"
7
7
  require "logstash/filters/base"
8
8
  require "logstash/execution_context"
9
+ require "support/shared_contexts"
9
10
 
10
11
  describe LogStash::Plugin do
11
12
  context "reloadable" do
@@ -45,10 +46,10 @@ describe LogStash::Plugin do
45
46
 
46
47
  context "#execution_context" do
47
48
  subject { Class.new(LogStash::Plugin).new({}) }
49
+ include_context "execution_context"
48
50
 
49
51
  it "can be set and get" do
50
52
  expect(subject.execution_context).to be_nil
51
- execution_context = LogStash::ExecutionContext.new(:main, "id", "type", nil)
52
53
  subject.execution_context = execution_context
53
54
  expect(subject.execution_context).to eq(execution_context)
54
55
  end
@@ -48,6 +48,12 @@ describe LogStash::Plugins::HooksRegistry do
48
48
  expect { subject.register_hooks(emitter.class, listener) }.to change { subject.hooks_count(emitter.class) }.by(1)
49
49
  end
50
50
 
51
+ it "verifies if a hook is registered to a specific emitter scope" do
52
+ subject.register_hooks(emitter.class, listener)
53
+ expect(subject.registered_hook?(emitter.class, listener.class)).to be_truthy
54
+ expect(subject.registered_hook?(Class.new, listener.class)).to be_falsey
55
+ end
56
+
51
57
  it "link the emitter class to the listener" do
52
58
  subject.register_emitter(emitter.class, emitter.dispatcher)
53
59
  subject.register_hooks(emitter.class, listener)
@@ -8,7 +8,7 @@ describe LogStash::QueueFactory do
8
8
  let(:settings_array) do
9
9
  [
10
10
  LogStash::Setting::WritableDirectory.new("path.queue", Stud::Temporary.pathname),
11
- LogStash::Setting::String.new("queue.type", "memory", true, ["persisted", "memory"]),
11
+ LogStash::Setting::String.new("queue.type", "memory", true, ["persisted", "memory", "memory_acked"]),
12
12
  LogStash::Setting::Bytes.new("queue.page_capacity", "250mb"),
13
13
  LogStash::Setting::Bytes.new("queue.max_bytes", "1024mb"),
14
14
  LogStash::Setting::Numeric.new("queue.max_events", 0),
@@ -57,6 +57,18 @@ describe LogStash::QueueFactory do
57
57
  end
58
58
  end
59
59
 
60
+ context "when `queue.type` is `memory_acked`" do
61
+ before do
62
+ settings.set("queue.type", "memory_acked")
63
+ end
64
+
65
+ it "returns a `WrappedAckedQueue`" do
66
+ queue = subject.create(settings)
67
+ expect(queue).to be_kind_of(LogStash::Util::WrappedAckedQueue)
68
+ queue.close
69
+ end
70
+ end
71
+
60
72
  context "when `queue.type` is `memory`" do
61
73
  before do
62
74
  settings.set("queue.type", "memory")
@@ -6,12 +6,9 @@ require "stud/trap"
6
6
  require "stud/temporary"
7
7
  require "logstash/util/java_version"
8
8
  require "logstash/logging/json"
9
- require "logstash/config/modules_common"
10
- require "logstash/modules/util"
11
- require "logstash/elasticsearch_client"
9
+ require "logstash/config/source_loader"
12
10
  require "json"
13
11
  require_relative "../support/helpers"
14
- require_relative "../support/matchers"
15
12
 
16
13
  class NullRunner
17
14
  def run(args); end
@@ -24,6 +21,8 @@ describe LogStash::Runner do
24
21
  let(:agent) { double("agent") }
25
22
 
26
23
  before :each do
24
+ clear_data_dir
25
+
27
26
  allow(LogStash::Runner).to receive(:logger).and_return(logger)
28
27
  allow(logger).to receive(:debug?).and_return(true)
29
28
  allow(logger).to receive(:subscribe).with(any_args)
@@ -36,13 +35,16 @@ describe LogStash::Runner do
36
35
  allow(LogStash::Logging::Logger).to receive(:configure_logging) do |level, path|
37
36
  allow(logger).to receive(:level).and_return(level.to_sym)
38
37
  end
39
- allow(LogStash::Logging::Logger).to receive(:reconfigure).with(any_args)
38
+
40
39
  # Make sure we don't start a real pipeline here.
41
40
  # because we cannot easily close the pipeline
42
41
  allow(LogStash::Agent).to receive(:new).with(any_args).and_return(agent)
43
42
  allow(agent).to receive(:execute)
44
43
  allow(agent).to receive(:shutdown)
45
- allow(agent).to receive(:register_pipeline)
44
+ end
45
+
46
+ after :each do
47
+ LogStash::SETTINGS.reset
46
48
  end
47
49
 
48
50
  describe "argument precedence" do
@@ -54,11 +56,15 @@ describe LogStash::Runner do
54
56
  allow(LogStash::SETTINGS).to receive(:read_yaml).and_return(settings_yml_hash)
55
57
  end
56
58
 
59
+ after :each do
60
+ LogStash::SETTINGS.reset
61
+ end
62
+
57
63
  it "favors the last occurence of an option" do
58
64
  expect(LogStash::Agent).to receive(:new) do |settings|
59
65
  expect(settings.get("config.string")).to eq(config)
60
66
  expect(settings.get("pipeline.workers")).to eq(20)
61
- end
67
+ end.and_return(agent)
62
68
  subject.run("bin/logstash", cli_args)
63
69
  end
64
70
  end
@@ -73,7 +79,6 @@ describe LogStash::Runner do
73
79
 
74
80
  before do
75
81
  allow(agent).to receive(:shutdown)
76
- allow(agent).to receive(:register_pipeline)
77
82
  end
78
83
 
79
84
  it "should execute the agent" do
@@ -138,13 +143,16 @@ describe LogStash::Runner do
138
143
  end
139
144
 
140
145
  describe "--config.test_and_exit" do
146
+ before do
147
+ # Reset the source in a clean state before any asserts
148
+ LogStash::Config::SOURCE_LOADER.configure_sources([])
149
+ end
141
150
  subject { LogStash::Runner.new("") }
142
151
  let(:args) { ["-t", "-e", pipeline_string] }
143
152
 
144
153
  context "with a good configuration" do
145
154
  let(:pipeline_string) { "input { } filter { } output { }" }
146
155
  it "should exit successfully" do
147
- expect(logger).not_to receive(:fatal)
148
156
  expect(subject.run(args)).to eq(0)
149
157
  end
150
158
  end
@@ -168,56 +176,38 @@ describe LogStash::Runner do
168
176
  allow(pipeline).to receive(:run).and_return(task)
169
177
  allow(pipeline).to receive(:shutdown)
170
178
  end
171
-
179
+
172
180
  context "when :path.data is defined by the user" do
173
181
  let(:test_data_path) { "/tmp/ls-test-data" }
174
182
  let(:test_queue_path) { test_data_path + "/" + "queue" }
175
- let(:test_dlq_path) { test_data_path + "/" + "dead_letter_queue" }
176
-
183
+
177
184
  it "should set data paths" do
178
185
  expect(LogStash::Agent).to receive(:new) do |settings|
179
186
  expect(settings.get("path.data")).to eq(test_data_path)
180
187
  expect(settings.get("path.queue")).to eq(test_queue_path)
181
- expect(settings.get("path.dead_letter_queue")).to eq(test_dlq_path)
182
188
  end
183
-
189
+
184
190
  args = ["--path.data", test_data_path, "-e", pipeline_string]
185
191
  subject.run("bin/logstash", args)
186
192
  end
187
-
193
+
188
194
  context "and path.queue is manually set" do
189
195
  let(:queue_override_path) { "/tmp/queue-override_path" }
190
-
196
+
191
197
  it "should set data paths" do
192
198
  expect(LogStash::Agent).to receive(:new) do |settings|
193
199
  expect(settings.get("path.data")).to eq(test_data_path)
194
200
  expect(settings.get("path.queue")).to eq(queue_override_path)
195
201
  end
196
-
202
+
197
203
  LogStash::SETTINGS.set("path.queue", queue_override_path)
198
-
199
- args = ["--path.data", test_data_path, "-e", pipeline_string]
200
- subject.run("bin/logstash", args)
201
- end
202
- end
203
-
204
- context "and path.dead_letter_queue is manually set" do
205
- let(:queue_override_path) { "/tmp/queue-override_path" }
206
-
207
- it "should set data paths" do
208
- expect(LogStash::Agent).to receive(:new) do |settings|
209
- expect(settings.get("path.data")).to eq(test_data_path)
210
- expect(settings.get("path.dead_letter_queue")).to eq(queue_override_path)
211
- end
212
-
213
- LogStash::SETTINGS.set("path.dead_letter_queue", queue_override_path)
214
-
204
+
215
205
  args = ["--path.data", test_data_path, "-e", pipeline_string]
216
206
  subject.run("bin/logstash", args)
217
207
  end
218
208
  end
219
209
  end
220
-
210
+
221
211
  context "when :http.host is defined by the user" do
222
212
  it "should pass the value to the webserver" do
223
213
  expect(LogStash::Agent).to receive(:new) do |settings|
@@ -306,9 +296,6 @@ describe LogStash::Runner do
306
296
  end
307
297
 
308
298
  describe "config.debug" do
309
- after(:each) do
310
- LogStash::SETTINGS.set("config.debug", false)
311
- end
312
299
  it "should set 'config.debug' to false by default" do
313
300
  expect(LogStash::Agent).to receive(:new) do |settings|
314
301
  expect(settings.get("config.debug")).to eq(false)
@@ -327,106 +314,6 @@ describe LogStash::Runner do
327
314
  end
328
315
  end
329
316
 
330
- describe "logstash modules" do
331
- before(:each) do
332
- test_modules_dir = File.expand_path(File.join(File.dirname(__FILE__), "..", "modules_test_files"))
333
- LogStash::Modules::Util.register_local_modules(test_modules_dir)
334
- end
335
-
336
- describe "--config.test_and_exit" do
337
- subject { LogStash::Runner.new("") }
338
- let(:args) { ["-t", "--modules", module_string] }
339
-
340
- context "with a good configuration" do
341
- let(:module_string) { "tester" }
342
- it "should exit successfully" do
343
- expect(logger).not_to receive(:fatal)
344
- expect(subject.run(args)).to eq(0)
345
- end
346
- end
347
-
348
- context "with a bad configuration" do
349
- let(:module_string) { "rlwekjhrewlqrkjh" }
350
- it "should fail by returning a bad exit code" do
351
- expect(logger).to receive(:fatal)
352
- expect(subject.run(args)).to eq(1)
353
- end
354
- end
355
- end
356
-
357
- describe "--modules" do
358
- let(:args) { ["--modules", module_string, "--setup"] }
359
-
360
- context "with an available module specified but no connection to elasticsearch" do
361
- let(:module_string) { "tester" }
362
- before do
363
- expect(logger).to receive(:fatal) do |msg, hash|
364
- expect(msg).to eq("An unexpected error occurred!")
365
- expect(hash).to be_a_config_loading_error_hash(
366
- /Failed to import module configurations to Elasticsearch and\/or Kibana. Module: tester has/)
367
- end
368
- end
369
- it "should log fatally and return a bad exit code" do
370
- expect(subject.run("bin/logstash", args)).to eq(1)
371
- end
372
- end
373
-
374
- context "with an available module specified and a mocked connection to elasticsearch" do
375
- let(:module_string) { "tester" }
376
- let(:kbn_version) { "5.6.0" }
377
- let(:esclient) { double(:esclient) }
378
- let(:kbnclient) { double(:kbnclient) }
379
- let(:response) { double(:response) }
380
- before do
381
- allow(response).to receive(:status).and_return(404)
382
- allow(esclient).to receive(:head).and_return(response)
383
- allow(esclient).to receive(:can_connect?).and_return(true)
384
- allow(kbnclient).to receive(:version).and_return(kbn_version)
385
- allow(kbnclient).to receive(:version_parts).and_return(kbn_version.split('.'))
386
- allow(kbnclient).to receive(:can_connect?).and_return(true)
387
- allow(LogStash::ElasticsearchClient).to receive(:build).and_return(esclient)
388
- allow(LogStash::Modules::KibanaClient).to receive(:new).and_return(kbnclient)
389
-
390
- expect(esclient).to receive(:put).once do |path, content|
391
- LogStash::ElasticsearchClient::Response.new(201, "", {})
392
- end
393
- expect(kbnclient).to receive(:post).twice do |path, content|
394
- LogStash::Modules::KibanaClient::Response.new(201, "", {})
395
- end
396
-
397
- expect(LogStash::Agent).to receive(:new) do |settings, source_loader|
398
- pipelines = LogStash::Config::ModulesCommon.pipeline_configs(settings)
399
- expect(pipelines).not_to be_empty
400
- module_pipeline = pipelines.first
401
- expect(module_pipeline).to include("pipeline_id", "config_string")
402
- expect(module_pipeline["pipeline_id"]).to include('tester')
403
- expect(module_pipeline["config_string"]).to include('index => "tester-')
404
- agent
405
- end
406
- expect(logger).not_to receive(:fatal)
407
- expect(logger).not_to receive(:error)
408
- end
409
- xit "should not terminate logstash" do
410
- expect(subject.run("bin/logstash", args)).to be_nil
411
- end
412
- end
413
-
414
- context "with an unavailable module specified" do
415
- let(:module_string) { "fancypants" }
416
- before do
417
- expect(logger).to receive(:fatal) do |msg, hash|
418
- expect(msg).to eq("An unexpected error occurred!")
419
- expect(hash).to be_a_config_loading_error_hash(
420
- /The modules specified are not available yet. Specified modules: \["fancypants"\] Available modules:/)
421
- end
422
- end
423
- it "should log fatally and return a bad exit code" do
424
- expect(subject.run("bin/logstash", args)).to eq(1)
425
- end
426
- end
427
- end
428
- end
429
-
430
317
  describe "--log.level" do
431
318
  before :each do
432
319
  allow_any_instance_of(subject).to receive(:show_version)
@@ -506,10 +393,12 @@ describe LogStash::Runner do
506
393
  describe "path.settings" do
507
394
  subject { LogStash::Runner.new("") }
508
395
  context "if does not exist" do
509
- let(:args) { ["--path.settings", "/tmp/a/a/a/a", "-e", "input {} output {}"] }
396
+ let(:args) { ["--path.settings", "/tmp/a/a/a/a", "-e", "input { generator { count => 1000 }} output {}"] }
510
397
 
511
398
  it "should not terminate logstash" do
512
- expect(subject.run(args)).to eq(nil)
399
+ # The runner should just pass the code from the agent execute
400
+ allow(agent).to receive(:execute).and_return(0)
401
+ expect(subject.run(args)).to eq(0)
513
402
  end
514
403
 
515
404
  context "but if --help is passed" do