logstash-core 6.0.0.alpha2-java → 6.0.0.beta1-java

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (110) hide show
  1. checksums.yaml +5 -5
  2. data/gemspec_jars.rb +6 -4
  3. data/lib/logstash-core/logstash-core.jar +0 -0
  4. data/lib/logstash-core/logstash-core.rb +2 -2
  5. data/lib/logstash-core/version.rb +1 -1
  6. data/lib/logstash-core_jars.rb +14 -10
  7. data/lib/logstash/agent.rb +4 -2
  8. data/lib/logstash/api/commands/default_metadata.rb +1 -1
  9. data/lib/logstash/api/commands/hot_threads_reporter.rb +8 -2
  10. data/lib/logstash/api/commands/node.rb +2 -2
  11. data/lib/logstash/api/commands/stats.rb +2 -2
  12. data/lib/logstash/bootstrap_check/bad_ruby.rb +2 -2
  13. data/lib/logstash/bootstrap_check/default_config.rb +2 -3
  14. data/lib/logstash/compiler.rb +12 -12
  15. data/lib/logstash/compiler/lscl.rb +17 -7
  16. data/lib/logstash/compiler/treetop_monkeypatches.rb +1 -0
  17. data/lib/logstash/config/config_ast.rb +11 -1
  18. data/lib/logstash/config/mixin.rb +5 -0
  19. data/lib/logstash/config/modules_common.rb +101 -0
  20. data/lib/logstash/config/source/base.rb +75 -0
  21. data/lib/logstash/config/source/local.rb +52 -50
  22. data/lib/logstash/config/source/modules.rb +55 -0
  23. data/lib/logstash/config/source/multi_local.rb +54 -10
  24. data/lib/logstash/config/source_loader.rb +1 -0
  25. data/lib/logstash/config/string_escape.rb +27 -0
  26. data/lib/logstash/elasticsearch_client.rb +142 -0
  27. data/lib/logstash/environment.rb +5 -1
  28. data/lib/logstash/event.rb +0 -1
  29. data/lib/logstash/instrument/global_metrics.rb +13 -0
  30. data/lib/logstash/instrument/metric_store.rb +16 -13
  31. data/lib/logstash/instrument/metric_type/counter.rb +6 -18
  32. data/lib/logstash/instrument/metric_type/gauge.rb +6 -12
  33. data/lib/logstash/instrument/periodic_poller/dlq.rb +19 -0
  34. data/lib/logstash/instrument/periodic_pollers.rb +3 -1
  35. data/lib/logstash/logging/logger.rb +43 -14
  36. data/lib/logstash/modules/cli_parser.rb +74 -0
  37. data/lib/logstash/modules/elasticsearch_config.rb +22 -0
  38. data/lib/logstash/modules/elasticsearch_importer.rb +37 -0
  39. data/lib/logstash/modules/elasticsearch_resource.rb +10 -0
  40. data/lib/logstash/modules/file_reader.rb +36 -0
  41. data/lib/logstash/modules/kibana_base.rb +24 -0
  42. data/lib/logstash/modules/kibana_client.rb +122 -0
  43. data/lib/logstash/modules/kibana_config.rb +125 -0
  44. data/lib/logstash/modules/kibana_dashboards.rb +36 -0
  45. data/lib/logstash/modules/kibana_importer.rb +17 -0
  46. data/lib/logstash/modules/kibana_resource.rb +10 -0
  47. data/lib/logstash/modules/kibana_settings.rb +40 -0
  48. data/lib/logstash/modules/logstash_config.rb +120 -0
  49. data/lib/logstash/modules/resource_base.rb +38 -0
  50. data/lib/logstash/modules/scaffold.rb +50 -0
  51. data/lib/logstash/modules/settings_merger.rb +23 -0
  52. data/lib/logstash/modules/util.rb +17 -0
  53. data/lib/logstash/namespace.rb +1 -0
  54. data/lib/logstash/pipeline.rb +66 -27
  55. data/lib/logstash/pipeline_settings.rb +1 -0
  56. data/lib/logstash/plugins/registry.rb +1 -0
  57. data/lib/logstash/runner.rb +47 -3
  58. data/lib/logstash/settings.rb +20 -1
  59. data/lib/logstash/util/dead_letter_queue_manager.rb +1 -1
  60. data/lib/logstash/util/safe_uri.rb +146 -11
  61. data/lib/logstash/util/thread_dump.rb +4 -3
  62. data/lib/logstash/util/wrapped_acked_queue.rb +28 -24
  63. data/lib/logstash/util/wrapped_synchronous_queue.rb +19 -20
  64. data/lib/logstash/version.rb +1 -1
  65. data/locales/en.yml +56 -1
  66. data/logstash-core.gemspec +6 -4
  67. data/spec/logstash/agent/converge_spec.rb +2 -2
  68. data/spec/logstash/agent_spec.rb +11 -3
  69. data/spec/logstash/api/modules/logging_spec.rb +13 -7
  70. data/spec/logstash/api/modules/node_plugins_spec.rb +23 -5
  71. data/spec/logstash/api/modules/node_spec.rb +17 -15
  72. data/spec/logstash/api/modules/node_stats_spec.rb +0 -1
  73. data/spec/logstash/api/modules/plugins_spec.rb +40 -9
  74. data/spec/logstash/api/modules/root_spec.rb +0 -1
  75. data/spec/logstash/api/rack_app_spec.rb +2 -1
  76. data/spec/logstash/compiler/compiler_spec.rb +54 -7
  77. data/spec/logstash/config/config_ast_spec.rb +47 -8
  78. data/spec/logstash/config/mixin_spec.rb +14 -2
  79. data/spec/logstash/config/pipeline_config_spec.rb +7 -7
  80. data/spec/logstash/config/source/local_spec.rb +5 -2
  81. data/spec/logstash/config/source/multi_local_spec.rb +56 -10
  82. data/spec/logstash/config/source_loader_spec.rb +1 -1
  83. data/spec/logstash/config/string_escape_spec.rb +24 -0
  84. data/spec/logstash/event_spec.rb +9 -0
  85. data/spec/logstash/filters/base_spec.rb +1 -1
  86. data/spec/logstash/instrument/metric_store_spec.rb +2 -3
  87. data/spec/logstash/instrument/metric_type/counter_spec.rb +0 -12
  88. data/spec/logstash/instrument/metric_type/gauge_spec.rb +1 -8
  89. data/spec/logstash/instrument/periodic_poller/dlq_spec.rb +17 -0
  90. data/spec/logstash/instrument/periodic_poller/jvm_spec.rb +1 -1
  91. data/spec/logstash/legacy_ruby_event_spec.rb +0 -9
  92. data/spec/logstash/legacy_ruby_timestamp_spec.rb +19 -14
  93. data/spec/logstash/modules/cli_parser_spec.rb +129 -0
  94. data/spec/logstash/modules/logstash_config_spec.rb +56 -0
  95. data/spec/logstash/modules/scaffold_spec.rb +239 -0
  96. data/spec/logstash/pipeline_dlq_commit_spec.rb +1 -1
  97. data/spec/logstash/pipeline_spec.rb +87 -20
  98. data/spec/logstash/runner_spec.rb +122 -5
  99. data/spec/logstash/setting_spec.rb +2 -2
  100. data/spec/logstash/settings/splittable_string_array_spec.rb +51 -0
  101. data/spec/logstash/timestamp_spec.rb +8 -2
  102. data/spec/logstash/util/safe_uri_spec.rb +16 -0
  103. data/spec/logstash/util/wrapped_acked_queue_spec.rb +63 -0
  104. data/spec/logstash/util/wrapped_synchronous_queue_spec.rb +0 -22
  105. data/spec/support/helpers.rb +1 -1
  106. data/spec/support/matchers.rb +21 -4
  107. metadata +102 -19
  108. data/lib/logstash/instrument/metric_type/base.rb +0 -31
  109. data/lib/logstash/program.rb +0 -14
  110. data/lib/logstash/string_interpolation.rb +0 -18
@@ -36,7 +36,7 @@ class DLQCommittingFilter < LogStash::Filters::Base
36
36
  end
37
37
 
38
38
  describe LogStash::Pipeline do
39
- let(:pipeline_settings_obj) { LogStash::SETTINGS }
39
+ let(:pipeline_settings_obj) { LogStash::SETTINGS.clone }
40
40
  let(:pipeline_settings) do
41
41
  {
42
42
  "pipeline.workers" => 2,
@@ -5,6 +5,8 @@ require "logstash/filters/multiline"
5
5
  require_relative "../support/mocks_classes"
6
6
  require_relative "../support/helpers"
7
7
  require_relative "../logstash/pipeline_reporter_spec" # for DummyOutput class
8
+ require "stud/try"
9
+ require 'timeout'
8
10
 
9
11
  class DummyInput < LogStash::Inputs::Base
10
12
  config_name "dummyinput"
@@ -104,12 +106,21 @@ describe LogStash::Pipeline do
104
106
  let(:worker_thread_count) { 5 }
105
107
  let(:safe_thread_count) { 1 }
106
108
  let(:override_thread_count) { 42 }
109
+ let(:dead_letter_queue_enabled) { false }
110
+ let(:dead_letter_queue_path) { }
107
111
  let(:pipeline_settings_obj) { LogStash::SETTINGS }
108
112
  let(:pipeline_settings) { {} }
113
+ let(:max_retry) {10} #times
114
+ let(:timeout) {120} #seconds
109
115
 
110
116
  before :each do
111
117
  pipeline_workers_setting = LogStash::SETTINGS.get_setting("pipeline.workers")
112
118
  allow(pipeline_workers_setting).to receive(:default).and_return(worker_thread_count)
119
+ dlq_enabled_setting = LogStash::SETTINGS.get_setting("dead_letter_queue.enable")
120
+ allow(dlq_enabled_setting).to receive(:value).and_return(dead_letter_queue_enabled)
121
+ dlq_path_setting = LogStash::SETTINGS.get_setting("path.dead_letter_queue")
122
+ allow(dlq_path_setting).to receive(:value).and_return(dead_letter_queue_path)
123
+
113
124
  pipeline_settings.each {|k, v| pipeline_settings_obj.set(k, v) }
114
125
  end
115
126
 
@@ -117,6 +128,18 @@ describe LogStash::Pipeline do
117
128
  pipeline_settings_obj.reset
118
129
  end
119
130
 
131
+ describe "#ephemeral_id" do
132
+ it "creates an ephemeral_id at creation time" do
133
+ pipeline = mock_pipeline_from_string("input { generator { count => 1 } } output { null {} }")
134
+ expect(pipeline.ephemeral_id).to_not be_nil
135
+ pipeline.close
136
+
137
+ second_pipeline = mock_pipeline_from_string("input { generator { count => 1 } } output { null {} }")
138
+ expect(second_pipeline.ephemeral_id).not_to eq(pipeline.ephemeral_id)
139
+ second_pipeline.close
140
+ end
141
+ end
142
+
120
143
 
121
144
  describe "event cancellation" do
122
145
  # test harness for https://github.com/elastic/logstash/issues/6055
@@ -156,12 +179,16 @@ describe LogStash::Pipeline do
156
179
 
157
180
  pipeline = mock_pipeline_from_string(config, pipeline_settings_obj)
158
181
  t = Thread.new { pipeline.run }
159
- sleep(0.1) until pipeline.ready?
160
- wait(3).for do
161
- # give us a bit of time to flush the events
162
- # puts("*****" + output.events.map{|e| e.message}.to_s)
163
- output.events.map{|e| e.get("message")}.include?("END")
164
- end.to be_truthy
182
+ Timeout.timeout(timeout) do
183
+ sleep(0.1) until pipeline.ready?
184
+ end
185
+ Stud.try(max_retry.times, [StandardError, RSpec::Expectations::ExpectationNotMetError]) do
186
+ wait(3).for do
187
+ # give us a bit of time to flush the events
188
+ # puts("*****" + output.events.map{|e| e.message}.to_s)
189
+ output.events.map{|e| e.get("message")}.include?("END")
190
+ end.to be_truthy
191
+ end
165
192
  expect(output.events.size).to eq(2)
166
193
  expect(output.events[0].get("tags")).to eq(["notdropped"])
167
194
  expect(output.events[1].get("tags")).to eq(["notdropped"])
@@ -409,7 +436,9 @@ describe LogStash::Pipeline do
409
436
  # race condition if called in the thread
410
437
  p = pipeline
411
438
  t = Thread.new { p.run }
412
- sleep(0.1) until pipeline.ready?
439
+ Timeout.timeout(timeout) do
440
+ sleep(0.1) until pipeline.ready?
441
+ end
413
442
  pipeline.shutdown
414
443
  t.join
415
444
  end
@@ -607,11 +636,15 @@ describe LogStash::Pipeline do
607
636
  Thread.abort_on_exception = true
608
637
  pipeline = mock_pipeline_from_string(config, pipeline_settings_obj)
609
638
  t = Thread.new { pipeline.run }
610
- sleep(0.1) until pipeline.ready?
611
- wait(10).for do
612
- # give us a bit of time to flush the events
613
- output.events.empty?
614
- end.to be_falsey
639
+ Timeout.timeout(timeout) do
640
+ sleep(0.1) until pipeline.ready?
641
+ end
642
+ Stud.try(max_retry.times, [StandardError, RSpec::Expectations::ExpectationNotMetError]) do
643
+ wait(10).for do
644
+ # give us a bit of time to flush the events
645
+ output.events.empty?
646
+ end.to be_falsey
647
+ end
615
648
 
616
649
  expect(output.events.any? {|e| e.get("message") == "dummy_flush"}).to eq(true)
617
650
 
@@ -711,9 +744,12 @@ describe LogStash::Pipeline do
711
744
  # subject must be first call outside the thread context because of lazy initialization
712
745
  s = subject
713
746
  t = Thread.new { s.run }
714
- sleep(0.1) until subject.ready?
715
-
716
- sleep(0.1)
747
+ Timeout.timeout(timeout) do
748
+ sleep(0.1) until subject.ready?
749
+ end
750
+ Timeout.timeout(timeout) do
751
+ sleep(0.1)
752
+ end
717
753
  expect(subject.uptime).to be > 0
718
754
  subject.shutdown
719
755
  t.join
@@ -777,13 +813,17 @@ describe LogStash::Pipeline do
777
813
  allow(LogStash::Plugin).to receive(:lookup).with("output", "dummyoutput").and_return(::LogStash::Outputs::DummyOutput)
778
814
 
779
815
  pipeline_thread
780
- sleep(0.1) until subject.ready?
816
+ Timeout.timeout(timeout) do
817
+ sleep(0.1) until subject.ready?
818
+ end
781
819
 
782
820
  # make sure we have received all the generated events
783
- wait(3).for do
784
- # give us a bit of time to flush the events
785
- dummyoutput.events.size >= number_of_events
786
- end.to be_truthy
821
+ Stud.try(max_retry.times, [StandardError, RSpec::Expectations::ExpectationNotMetError]) do
822
+ wait(3).for do
823
+ # give us a bit of time to flush the events
824
+ dummyoutput.events.size >= number_of_events
825
+ end.to be_truthy
826
+ end
787
827
  end
788
828
 
789
829
  after :each do
@@ -840,6 +880,33 @@ describe LogStash::Pipeline do
840
880
  expect(collected_metric[:stats][:pipelines][:main][:plugins][:filters][plugin_name][:name].value).to eq(LogStash::Filters::Multiline.config_name)
841
881
  end
842
882
  end
883
+
884
+ context 'when dlq is disabled' do
885
+ let (:collect_stats) { subject.collect_dlq_stats}
886
+ let (:collected_stats) { collected_metric[:stats][:pipelines][:main][:dlq]}
887
+ let (:available_stats) {[:path, :queue_size_in_bytes]}
888
+
889
+ it 'should show not show any dlq stats' do
890
+ collect_stats
891
+ expect(collected_stats).to be_nil
892
+ end
893
+
894
+ end
895
+
896
+ context 'when dlq is enabled' do
897
+ let (:dead_letter_queue_enabled) { true }
898
+ let (:dead_letter_queue_path) { Stud::Temporary.directory }
899
+ let (:pipeline_dlq_path) { "#{dead_letter_queue_path}/#{pipeline_id}"}
900
+
901
+ let (:collect_stats) { subject.collect_dlq_stats }
902
+ let (:collected_stats) { collected_metric[:stats][:pipelines][:main][:dlq]}
903
+
904
+ it 'should show dlq stats' do
905
+ collect_stats
906
+ # A newly created dead letter queue with no entries will have a size of 1 (the version 'header')
907
+ expect(collected_stats[:queue_size_in_bytes].value).to eq(1)
908
+ end
909
+ end
843
910
  end
844
911
  end
845
912
 
@@ -7,12 +7,12 @@ require "stud/temporary"
7
7
  require "logstash/util/java_version"
8
8
  require "logstash/logging/json"
9
9
  require "logstash/config/source_loader"
10
+ require "logstash/config/modules_common"
11
+ require "logstash/modules/util"
12
+ require "logstash/elasticsearch_client"
10
13
  require "json"
11
14
  require_relative "../support/helpers"
12
-
13
- class NullRunner
14
- def run(args); end
15
- end
15
+ require_relative "../support/matchers"
16
16
 
17
17
  describe LogStash::Runner do
18
18
 
@@ -43,6 +43,10 @@ describe LogStash::Runner do
43
43
  allow(agent).to receive(:shutdown)
44
44
  end
45
45
 
46
+ after :each do
47
+ LogStash::Logging::Logger::configure_logging("info")
48
+ end
49
+
46
50
  describe "argument precedence" do
47
51
  let(:config) { "input {} output {}" }
48
52
  let(:cli_args) { ["-e", config, "-w", "20"] }
@@ -126,6 +130,7 @@ describe LogStash::Runner do
126
130
  context "with a good configuration" do
127
131
  let(:pipeline_string) { "input { } filter { } output { }" }
128
132
  it "should exit successfully" do
133
+ expect(logger).not_to receive(:fatal)
129
134
  expect(subject.run(args)).to eq(0)
130
135
  end
131
136
  end
@@ -170,12 +175,14 @@ describe LogStash::Runner do
170
175
  let(:queue_override_path) { "/tmp/queue-override_path" }
171
176
 
172
177
  it "should set data paths" do
178
+ LogStash::SETTINGS.set("path.queue", queue_override_path)
179
+
173
180
  expect(LogStash::Agent).to receive(:new) do |settings|
174
181
  expect(settings.get("path.data")).to eq(test_data_path)
175
182
  expect(settings.get("path.queue")).to eq(queue_override_path)
176
183
  end
177
184
 
178
- LogStash::SETTINGS.set("path.queue", queue_override_path)
185
+
179
186
 
180
187
  args = ["--path.data", test_data_path, "-e", pipeline_string]
181
188
  subject.run("bin/logstash", args)
@@ -305,6 +312,116 @@ describe LogStash::Runner do
305
312
  end
306
313
  end
307
314
 
315
+ describe "logstash modules" do
316
+ before(:each) do
317
+ test_modules_dir = File.expand_path(File.join(File.dirname(__FILE__), "..", "modules_test_files"))
318
+ LogStash::Modules::Util.register_local_modules(test_modules_dir)
319
+ end
320
+
321
+ describe "--config.test_and_exit" do
322
+ subject { LogStash::Runner.new("") }
323
+ let(:args) { ["-t", "--modules", module_string] }
324
+
325
+ context "with a good configuration" do
326
+ let(:module_string) { "tester" }
327
+ it "should exit successfully" do
328
+ expect(logger).not_to receive(:fatal)
329
+ expect(subject.run(args)).to eq(0)
330
+ end
331
+ end
332
+
333
+ context "with a bad configuration" do
334
+ let(:module_string) { "rlwekjhrewlqrkjh" }
335
+ it "should fail by returning a bad exit code" do
336
+ expect(logger).to receive(:fatal)
337
+ expect(subject.run(args)).to eq(1)
338
+ end
339
+ end
340
+ end
341
+
342
+ describe "--modules" do
343
+ let(:args) { ["--modules", module_string] }
344
+
345
+ context "with an available module specified but no connection to elasticsearch" do
346
+ let(:module_string) { "tester" }
347
+ before do
348
+ expect(logger).to receive(:fatal) do |msg, hash|
349
+ expect(msg).to eq("An unexpected error occurred!")
350
+ expect(hash).to be_a_config_loading_error_hash(
351
+ /Failed to import module configurations to Elasticsearch and\/or Kibana. Module: tester has/)
352
+ end
353
+ expect(LogStash::Agent).to receive(:new) do |settings, source_loader|
354
+ pipelines = LogStash::Config::ModulesCommon.pipeline_configs(settings)
355
+ expect(pipelines).to eq([])
356
+ agent
357
+ end
358
+ end
359
+ it "should log fatally and return a bad exit code" do
360
+ expect(subject.run("bin/logstash", args)).to eq(1)
361
+ end
362
+ end
363
+
364
+ context "with an available module specified and a mocked connection to elasticsearch" do
365
+ let(:module_string) { "tester" }
366
+ let(:kbn_version) { "6.0.0" }
367
+ let(:esclient) { double(:esclient) }
368
+ let(:kbnclient) { double(:kbnclient) }
369
+ let(:response) { double(:response) }
370
+ before do
371
+ allow(response).to receive(:status).and_return(404)
372
+ allow(esclient).to receive(:head).and_return(response)
373
+ allow(esclient).to receive(:can_connect?).and_return(true)
374
+ allow(kbnclient).to receive(:version).and_return(kbn_version)
375
+ allow(kbnclient).to receive(:version_parts).and_return(kbn_version.split('.'))
376
+ allow(kbnclient).to receive(:can_connect?).and_return(true)
377
+ allow(LogStash::ElasticsearchClient).to receive(:build).and_return(esclient)
378
+ allow(LogStash::Modules::KibanaClient).to receive(:new).and_return(kbnclient)
379
+
380
+ expect(esclient).to receive(:put).once do |path, content|
381
+ LogStash::ElasticsearchClient::Response.new(201, "", {})
382
+ end
383
+ expect(kbnclient).to receive(:post).twice do |path, content|
384
+ LogStash::Modules::KibanaClient::Response.new(201, "", {})
385
+ end
386
+
387
+ expect(LogStash::Agent).to receive(:new) do |settings, source_loader|
388
+ pipelines = LogStash::Config::ModulesCommon.pipeline_configs(settings)
389
+ expect(pipelines).not_to be_empty
390
+ module_pipeline = pipelines.first
391
+ expect(module_pipeline).to include("pipeline_id", "config_string")
392
+ expect(module_pipeline["pipeline_id"]).to include('tester')
393
+ expect(module_pipeline["config_string"]).to include('index => "tester-')
394
+ agent
395
+ end
396
+ expect(logger).not_to receive(:fatal)
397
+ expect(logger).not_to receive(:error)
398
+ end
399
+ it "should not terminate logstash" do
400
+ expect(subject.run("bin/logstash", args)).to be_nil
401
+ end
402
+ end
403
+
404
+ context "with an unavailable module specified" do
405
+ let(:module_string) { "fancypants" }
406
+ before do
407
+ expect(logger).to receive(:fatal) do |msg, hash|
408
+ expect(msg).to eq("An unexpected error occurred!")
409
+ expect(hash).to be_a_config_loading_error_hash(
410
+ /The modules specified are not available yet. Specified modules: \["fancypants"\] Available modules:/)
411
+ end
412
+ expect(LogStash::Agent).to receive(:new) do |settings, source_loader|
413
+ pipelines = LogStash::Config::ModulesCommon.pipeline_configs(settings)
414
+ expect(pipelines).to eq([])
415
+ agent
416
+ end
417
+ end
418
+ it "should log fatally and return a bad exit code" do
419
+ expect(subject.run("bin/logstash", args)).to eq(1)
420
+ end
421
+ end
422
+ end
423
+ end
424
+
308
425
  describe "--log.level" do
309
426
  before :each do
310
427
  allow_any_instance_of(subject).to receive(:show_version)
@@ -80,7 +80,7 @@ describe LogStash::Setting do
80
80
  end
81
81
  context "when the argument's class does not match @klass" do
82
82
  it "should throw an exception" do
83
- expect { subject.set("not a number") }.to raise_error
83
+ expect { subject.set("not a number") }.to raise_error ArgumentError
84
84
  end
85
85
  end
86
86
  context "when strict=false" do
@@ -131,7 +131,7 @@ describe LogStash::Setting do
131
131
  context "when validation fails" do
132
132
  let(:new_value) { "very very very very very big text" }
133
133
  it "should raise an exception" do
134
- expect { subject.set(new_value) }.to raise_error
134
+ expect { subject.set(new_value) }.to raise_error ArgumentError
135
135
  end
136
136
  it "should not change the value" do
137
137
  subject.set(new_value) rescue nil
@@ -0,0 +1,51 @@
1
+ # encoding: utf-8
2
+ require "spec_helper"
3
+ require "logstash/settings"
4
+
5
+ describe LogStash::Setting::SplittableStringArray do
6
+ let(:element_class) { String }
7
+ let(:default_value) { [] }
8
+
9
+ subject { described_class.new("testing", element_class, default_value) }
10
+
11
+ before do
12
+ subject.set(candidate)
13
+ end
14
+
15
+ context "when giving an array" do
16
+ let(:candidate) { ["hello,", "ninja"] }
17
+
18
+ it "returns the same elements" do
19
+ expect(subject.value).to match(candidate)
20
+ end
21
+ end
22
+
23
+ context "when given a string" do
24
+ context "with 1 element" do
25
+ let(:candidate) { "hello" }
26
+
27
+ it "returns 1 element" do
28
+ expect(subject.value).to match(["hello"])
29
+ end
30
+ end
31
+
32
+ context "with multiple element" do
33
+ let(:candidate) { "hello,ninja" }
34
+
35
+ it "returns an array of string" do
36
+ expect(subject.value).to match(["hello", "ninja"])
37
+ end
38
+ end
39
+ end
40
+
41
+ context "when defining a custom tokenizer" do
42
+ subject { described_class.new("testing", element_class, default_value, strict=true, ";") }
43
+
44
+ let(:candidate) { "hello;ninja" }
45
+
46
+ it "returns an array of string" do
47
+ expect(subject.value).to match(["hello", "ninja"])
48
+ end
49
+ end
50
+ end
51
+
@@ -5,6 +5,12 @@ require "logstash/timestamp"
5
5
 
6
6
  describe LogStash::Timestamp do
7
7
  context "constructors" do
8
+ # Via JRuby 9k time see logstash/issues/7463
9
+ # JRuby 9k now uses Java 8 Time with nanosecond precision but
10
+ # our Timestamp use Joda with millisecond precision
11
+ # expected: 2017-06-15 10:34:08.389999999 +0000
12
+ # got: 2017-06-15 10:34:08.389000000 +0000
13
+ # we may need to use `be_within(0.000999999).of()` in other places too
8
14
  it "should work" do
9
15
  t = LogStash::Timestamp.new
10
16
  expect(t.time.to_i).to be_within(1).of Time.now.to_i
@@ -12,9 +18,9 @@ describe LogStash::Timestamp do
12
18
  t = LogStash::Timestamp.now
13
19
  expect(t.time.to_i).to be_within(1).of Time.now.to_i
14
20
 
15
- now = Time.now.utc
21
+ now = DateTime.now.to_time.utc
16
22
  t = LogStash::Timestamp.new(now)
17
- expect(t.time).to eq(now)
23
+ expect(t.time.to_f).to be_within(0.000999999).of(now.to_f)
18
24
 
19
25
  t = LogStash::Timestamp.at(now.to_i)
20
26
  expect(t.time.to_i).to eq(now.to_i)
@@ -16,5 +16,21 @@ module LogStash module Util
16
16
  expect(cloned_safe_uri.query).to eq("a=b")
17
17
  end
18
18
  end
19
+
20
+ describe "handling escapable fields" do
21
+ let(:user) { "u%20" }
22
+ let(:password) { "p%20ss" }
23
+ let(:path) { "/a%20/path" }
24
+ let(:query) { "a%20query&another=es%3dq" }
25
+ let(:fragment) { "spacey%20fragment" }
26
+ subject { LogStash::Util::SafeURI.new("http://#{user}:#{password}@example.net#{path}?#{query}\##{fragment}") }
27
+
28
+ [:user, :password, :path, :query, :fragment].each do |field|
29
+ it "should not escape the #{field} field" do
30
+ expected = self.send(field)
31
+ expect(subject.send(field)).to eq(expected)
32
+ end
33
+ end
34
+ end
19
35
  end
20
36
  end end