logstash-core 6.0.0.beta2-java → 6.0.0-java

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. checksums.yaml +4 -4
  2. data/gemspec_jars.rb +5 -5
  3. data/lib/logstash-core/logstash-core.jar +0 -0
  4. data/lib/logstash-core/version.rb +8 -4
  5. data/lib/logstash-core_jars.rb +10 -10
  6. data/lib/logstash/agent.rb +3 -2
  7. data/lib/logstash/compiler/lscl.rb +15 -3
  8. data/lib/logstash/config/config_ast.rb +3 -2
  9. data/lib/logstash/config/modules_common.rb +1 -0
  10. data/lib/logstash/config/source/local.rb +2 -1
  11. data/lib/logstash/instrument/periodic_poller/dlq.rb +8 -3
  12. data/lib/logstash/instrument/periodic_poller/pq.rb +7 -3
  13. data/lib/logstash/logging/logger.rb +4 -1
  14. data/lib/logstash/modules/kibana_client.rb +35 -8
  15. data/lib/logstash/modules/logstash_config.rb +1 -1
  16. data/lib/logstash/modules/settings_merger.rb +8 -2
  17. data/lib/logstash/pipeline.rb +10 -19
  18. data/lib/logstash/pipeline_action/stop.rb +1 -0
  19. data/lib/logstash/runner.rb +4 -1
  20. data/lib/logstash/util/cloud_setting_id.rb +46 -12
  21. data/lib/logstash/util/modules_setting_array.rb +1 -1
  22. data/lib/logstash/util/password.rb +2 -4
  23. data/lib/logstash/util/wrapped_acked_queue.rb +6 -0
  24. data/lib/logstash/util/wrapped_synchronous_queue.rb +6 -0
  25. data/lib/logstash/version.rb +8 -10
  26. data/logstash-core.gemspec +25 -3
  27. data/spec/logstash/agent/converge_spec.rb +23 -10
  28. data/spec/logstash/agent_spec.rb +35 -15
  29. data/spec/logstash/api/modules/node_stats_spec.rb +5 -1
  30. data/spec/logstash/compiler/compiler_spec.rb +29 -0
  31. data/spec/logstash/config/source/local_spec.rb +3 -2
  32. data/spec/logstash/event_spec.rb +57 -0
  33. data/spec/logstash/modules/kibana_client_spec.rb +60 -0
  34. data/spec/logstash/modules/logstash_config_spec.rb +7 -1
  35. data/spec/logstash/modules/scaffold_spec.rb +1 -1
  36. data/spec/logstash/modules/settings_merger_spec.rb +32 -2
  37. data/spec/logstash/pipeline_action/create_spec.rb +4 -1
  38. data/spec/logstash/pipeline_action/reload_spec.rb +4 -1
  39. data/spec/logstash/pipeline_dlq_commit_spec.rb +3 -1
  40. data/spec/logstash/pipeline_pq_file_spec.rb +5 -7
  41. data/spec/logstash/pipeline_spec.rb +26 -38
  42. data/spec/logstash/runner_spec.rb +1 -5
  43. data/spec/logstash/settings/modules_spec.rb +13 -2
  44. data/spec/logstash/settings/writable_directory_spec.rb +13 -10
  45. data/spec/logstash/timestamp_spec.rb +2 -2
  46. data/spec/logstash/util/cloud_setting_id_spec.rb +93 -0
  47. data/spec/support/helpers.rb +1 -1
  48. data/spec/support/mocks_classes.rb +14 -0
  49. data/spec/support/shared_contexts.rb +9 -0
  50. data/versions-gem-copy.yml +23 -0
  51. metadata +19 -14
@@ -0,0 +1,60 @@
1
+ # encoding: utf-8
2
+ require "spec_helper"
3
+ require "logstash/modules/kibana_client"
4
+ module LogStash module Modules
5
+ KibanaTestResponse = Struct.new(:code, :body, :headers)
6
+ class KibanaTestClient
7
+ def http(method, endpoint, options)
8
+ self
9
+ end
10
+ def call
11
+ KibanaTestResponse.new(200, '{"version":{"number":"1.2.3","build_snapshot":false}}', {})
12
+ end
13
+ end
14
+ describe KibanaClient do
15
+ let(:settings) { Hash.new }
16
+ let(:test_client) { KibanaTestClient.new }
17
+ let(:kibana_host) { "https://foo.bar:4321" }
18
+ subject(:kibana_client) { described_class.new(settings, test_client) }
19
+
20
+ context "when supplied with conflicting scheme data" do
21
+ let(:settings) { {"var.kibana.scheme" => "http", "var.kibana.host" => kibana_host} }
22
+ it "a new instance will throw an error" do
23
+ expect{described_class.new(settings, test_client)}.to raise_error(ArgumentError, /Detected differing Kibana host schemes as sourced from var\.kibana\.host: 'https' and var\.kibana\.scheme: 'http'/)
24
+ end
25
+ end
26
+
27
+ context "when supplied with invalid schemes" do
28
+ ["httpd", "ftp", "telnet"].each do |uri_scheme|
29
+ it "a new instance will throw an error" do
30
+ re = /Kibana host scheme given is invalid, given value: '#{uri_scheme}' - acceptable values: 'http', 'https'/
31
+ expect{described_class.new({"var.kibana.scheme" => uri_scheme}, test_client)}.to raise_error(ArgumentError, re)
32
+ end
33
+ end
34
+ end
35
+
36
+ context "when supplied with the scheme in the host only" do
37
+ let(:settings) { {"var.kibana.host" => kibana_host} }
38
+ it "has a version and an endpoint" do
39
+ expect(kibana_client.version).to eq("1.2.3")
40
+ expect(kibana_client.endpoint).to eq("https://foo.bar:4321")
41
+ end
42
+ end
43
+
44
+ context "when supplied with the scheme in the scheme setting" do
45
+ let(:settings) { {"var.kibana.scheme" => "https", "var.kibana.host" => "foo.bar:4321"} }
46
+ it "has a version and an endpoint" do
47
+ expect(kibana_client.version).to eq("1.2.3")
48
+ expect(kibana_client.endpoint).to eq(kibana_host)
49
+ end
50
+ end
51
+
52
+ context "when supplied with a no scheme host setting and ssl is enabled" do
53
+ let(:settings) { {"var.kibana.ssl.enabled" => "true", "var.kibana.host" => "foo.bar:4321"} }
54
+ it "has a version and an endpoint" do
55
+ expect(kibana_client.version).to eq("1.2.3")
56
+ expect(kibana_client.endpoint).to eq(kibana_host)
57
+ end
58
+ end
59
+ end
60
+ end end
@@ -3,7 +3,7 @@ require "logstash/modules/logstash_config"
3
3
 
4
4
  describe LogStash::Modules::LogStashConfig do
5
5
  let(:mod) { instance_double("module", :directory => Stud::Temporary.directory, :module_name => "testing") }
6
- let(:settings) { {"var.logstash.testing.pants" => "fancy" }}
6
+ let(:settings) { {"var.logstash.testing.pants" => "fancy", "var.elasticsearch.password" => LogStash::Util::Password.new('correct_horse_battery_staple') }}
7
7
  subject { described_class.new(mod, settings) }
8
8
 
9
9
  describe "configured inputs" do
@@ -36,6 +36,12 @@ describe LogStash::Modules::LogStashConfig do
36
36
  end
37
37
  end
38
38
 
39
+ describe 'elastic_search_config' do
40
+ it 'should put the password in correctly' do
41
+ expect(subject.elasticsearch_output_config()).to include("password => \"correct_horse_battery_staple\"")
42
+ end
43
+ end
44
+
39
45
  describe "alias modules options" do
40
46
  let(:alias_table) do
41
47
  { "var.logstash.testing" => "var.logstash.better" }
@@ -81,7 +81,7 @@ ERB
81
81
 
82
82
  it "provides a logstash config" do
83
83
  expect(test_module.logstash_configuration).to be_nil
84
- test_module.with_settings(module_settings)
84
+ test_module.with_settings(LogStash::Util::ModulesSettingArray.new([module_settings]).first)
85
85
  expect(test_module.logstash_configuration).not_to be_nil
86
86
  config_string = test_module.config_string
87
87
  expect(config_string).to include("port => 5606")
@@ -28,6 +28,36 @@ describe LogStash::Modules::SettingsMerger do
28
28
  end
29
29
  end
30
30
 
31
+ describe "#merge_kibana_auth" do
32
+
33
+ before do
34
+ described_class.merge_kibana_auth!(mod_settings)
35
+ end
36
+
37
+ context 'only elasticsearch username and password is set' do
38
+ let(:mod_settings) { {"name"=>"mod1", "var.input.tcp.port"=>2222, "var.elasticsearch.username"=>"rupert", "var.elasticsearch.password"=>"fotherington" } }
39
+ it "sets kibana username and password" do
40
+ expect(mod_settings["var.elasticsearch.username"]).to eq("rupert")
41
+ expect(mod_settings["var.elasticsearch.password"]).to eq("fotherington")
42
+ expect(mod_settings["var.kibana.username"]).to eq("rupert")
43
+ expect(mod_settings["var.kibana.password"]).to eq("fotherington")
44
+ end
45
+ end
46
+
47
+ context 'elasticsearch and kibana usernames and passwords are set' do
48
+ let(:mod_settings) { {"name"=>"mod1", "var.input.tcp.port"=>2222, "var.elasticsearch.username"=>"rupert", "var.elasticsearch.password"=>"fotherington",
49
+ "var.kibana.username"=>"davey", "var.kibana.password"=>"stott"} }
50
+
51
+ it "keeps existing kibana username and password" do
52
+ expect(mod_settings["var.elasticsearch.username"]).to eq("rupert")
53
+ expect(mod_settings["var.elasticsearch.password"]).to eq("fotherington")
54
+ expect(mod_settings["var.kibana.username"]).to eq("davey")
55
+ expect(mod_settings["var.kibana.password"]).to eq("stott")
56
+ end
57
+ end
58
+
59
+ end
60
+
31
61
  describe "#merge_cloud_settings" do
32
62
  let(:cloud_id) { LogStash::Util::CloudSettingId.new("label:dXMtZWFzdC0xLmF3cy5mb3VuZC5pbyRub3RhcmVhbCRpZGVudGlmaWVy") }
33
63
  let(:cloud_auth) { LogStash::Util::CloudSettingAuth.new("elastix:bigwhoppingfairytail") }
@@ -38,7 +68,7 @@ describe LogStash::Modules::SettingsMerger do
38
68
  {
39
69
  "var.kibana.scheme" => "https",
40
70
  "var.kibana.host" => "identifier.us-east-1.aws.found.io:443",
41
- "var.elasticsearch.hosts" => "notareal.us-east-1.aws.found.io:443",
71
+ "var.elasticsearch.hosts" => "https://notareal.us-east-1.aws.found.io:443",
42
72
  "var.elasticsearch.username" => "elastix",
43
73
  "var.kibana.username" => "elastix"
44
74
  }
@@ -63,7 +93,7 @@ describe LogStash::Modules::SettingsMerger do
63
93
  {
64
94
  "var.kibana.scheme" => "https",
65
95
  "var.kibana.host" => "identifier.us-east-1.aws.found.io:443",
66
- "var.elasticsearch.hosts" => "notareal.us-east-1.aws.found.io:443",
96
+ "var.elasticsearch.hosts" => "https://notareal.us-east-1.aws.found.io:443",
67
97
  }
68
98
  end
69
99
  let(:ls_settings) { SubstituteSettingsForRSpec.new({"cloud.id" => cloud_id}) }
@@ -19,7 +19,10 @@ describe LogStash::PipelineAction::Create do
19
19
  subject { described_class.new(pipeline_config, metric) }
20
20
 
21
21
  after do
22
- pipelines.each { |_, pipeline| pipeline.shutdown }
22
+ pipelines.each do |_, pipeline|
23
+ pipeline.shutdown
24
+ pipeline.thread.join
25
+ end
23
26
  end
24
27
 
25
28
  it "returns the pipeline_id" do
@@ -22,7 +22,10 @@ describe LogStash::PipelineAction::Reload do
22
22
  end
23
23
 
24
24
  after do
25
- pipelines.each { |_, pipeline| pipeline.shutdown }
25
+ pipelines.each do |_, pipeline|
26
+ pipeline.shutdown
27
+ pipeline.thread.join
28
+ end
26
29
  end
27
30
 
28
31
  it "returns the pipeline_id" do
@@ -67,7 +67,7 @@ describe LogStash::Pipeline do
67
67
  end
68
68
 
69
69
  after(:each) do
70
- FileUtils.remove_entry pipeline_settings["path.dead_letter_queue"]
70
+ FileUtils.rm_rf(pipeline_settings["path.dead_letter_queue"])
71
71
  end
72
72
 
73
73
  context "dlq is enabled" do
@@ -85,6 +85,7 @@ describe LogStash::Pipeline do
85
85
  entry = dlq_reader.pollEntry(40)
86
86
  expect(entry).to_not be_nil
87
87
  expect(entry.reason).to eq("my reason")
88
+ subject.shutdown
88
89
  end
89
90
  end
90
91
 
@@ -101,6 +102,7 @@ describe LogStash::Pipeline do
101
102
  subject.run
102
103
  dlq_path = java.nio.file.Paths.get(pipeline_settings_obj.get("path.dead_letter_queue"), pipeline_id)
103
104
  expect(java.nio.file.Files.exists(dlq_path)).to eq(false)
105
+ subject.shutdown
104
106
  end
105
107
  end
106
108
 
@@ -1,8 +1,8 @@
1
1
  # encoding: utf-8
2
2
  require "spec_helper"
3
3
  require "logstash/inputs/generator"
4
- require "logstash/filters/multiline"
5
4
  require_relative "../support/helpers"
5
+ require_relative "../support/mocks_classes"
6
6
 
7
7
  class PipelinePqFileOutput < LogStash::Outputs::Base
8
8
  config_name "pipelinepqfileoutput"
@@ -40,7 +40,7 @@ describe LogStash::Pipeline do
40
40
  let(:pipeline_settings_obj) { LogStash::SETTINGS.clone }
41
41
  let(:pipeline_id) { "main" }
42
42
 
43
- let(:multiline_id) { "my-multiline" }
43
+ let(:dummy_id) { "my-dummyid" }
44
44
  let(:output_id) { "my-pipelinepqfileoutput" }
45
45
  let(:generator_id) { "my-generator" }
46
46
  let(:config) do
@@ -52,10 +52,8 @@ describe LogStash::Pipeline do
52
52
  }
53
53
  }
54
54
  filter {
55
- multiline {
56
- id => "#{multiline_id}"
57
- pattern => "hello"
58
- what => next
55
+ dummyfilter {
56
+ id => "#{dummy_id}"
59
57
  }
60
58
  }
61
59
  output {
@@ -98,7 +96,7 @@ describe LogStash::Pipeline do
98
96
  allow(PipelinePqFileOutput).to receive(:new).with(any_args).and_return(counting_output)
99
97
  allow(LogStash::Plugin).to receive(:lookup).with("input", "generator").and_return(LogStash::Inputs::Generator)
100
98
  allow(LogStash::Plugin).to receive(:lookup).with("codec", "plain").and_return(LogStash::Codecs::Plain)
101
- allow(LogStash::Plugin).to receive(:lookup).with("filter", "multiline").and_return(LogStash::Filters::Multiline)
99
+ allow(LogStash::Plugin).to receive(:lookup).with("filter", "dummyfilter").and_return(LogStash::Filters::DummyFilter)
102
100
  allow(LogStash::Plugin).to receive(:lookup).with("output", "pipelinepqfileoutput").and_return(PipelinePqFileOutput)
103
101
 
104
102
  pipeline_workers_setting = LogStash::SETTINGS.get_setting("pipeline.workers")
@@ -1,7 +1,7 @@
1
1
  # encoding: utf-8
2
2
  require "spec_helper"
3
3
  require "logstash/inputs/generator"
4
- require "logstash/filters/multiline"
4
+ require "logstash/filters/drop"
5
5
  require_relative "../support/mocks_classes"
6
6
  require_relative "../support/helpers"
7
7
  require_relative "../logstash/pipeline_reporter_spec" # for DummyOutput class
@@ -254,6 +254,14 @@ describe LogStash::Pipeline do
254
254
  pipeline = mock_pipeline_from_string(test_config_with_filters, pipeline_settings_obj)
255
255
  pipeline.close
256
256
  end
257
+
258
+ it "should log each filtered event if config.debug is set to true" do
259
+ pipeline_settings_obj.set("config.debug", true)
260
+ pipeline = mock_pipeline_from_string(test_config_with_filters, pipeline_settings_obj)
261
+ expect(logger).to receive(:debug).with(/filter received/, anything)
262
+ pipeline.filter_func([LogStash::Event.new])
263
+ pipeline.close
264
+ end
257
265
  end
258
266
 
259
267
  context "when there is no command line -w N set" do
@@ -411,19 +419,12 @@ describe LogStash::Pipeline do
411
419
  context "cancelled events should not propagate down the filters" do
412
420
  config <<-CONFIG
413
421
  filter {
414
- multiline {
415
- pattern => "hello"
416
- what => next
417
- }
418
- multiline {
419
- pattern => "hello"
420
- what => next
421
- }
422
+ drop {}
422
423
  }
423
424
  CONFIG
424
425
 
425
426
  sample("hello") do
426
- expect(subject.get("message")).to eq("hello")
427
+ expect(subject).to eq(nil)
427
428
  end
428
429
  end
429
430
 
@@ -433,19 +434,10 @@ describe LogStash::Pipeline do
433
434
  clone {
434
435
  clones => ["clone1"]
435
436
  }
436
- multiline {
437
- pattern => "bar"
438
- what => previous
439
- }
440
437
  }
441
438
  CONFIG
442
-
443
439
  sample(["foo", "bar"]) do
444
- expect(subject.size).to eq(2)
445
- expect(subject[0].get("message")).to eq("foo\nbar")
446
- expect(subject[0].get("type")).to be_nil
447
- expect(subject[1].get("message")).to eq("foo\nbar")
448
- expect(subject[1].get("type")).to eq("clone1")
440
+ expect(subject.size).to eq(4)
449
441
  end
450
442
  end
451
443
  end
@@ -634,9 +626,9 @@ describe LogStash::Pipeline do
634
626
 
635
627
  it "should handle evaluating different config" do
636
628
  expect(pipeline1.output_func(LogStash::Event.new)).not_to include(nil)
637
- expect(pipeline1.filter_func(LogStash::Event.new)).not_to include(nil)
629
+ expect(pipeline1.filter_func([LogStash::Event.new])).not_to include(nil)
638
630
  expect(pipeline2.output_func(LogStash::Event.new)).not_to include(nil)
639
- expect(pipeline1.filter_func(LogStash::Event.new)).not_to include(nil)
631
+ expect(pipeline1.filter_func([LogStash::Event.new])).not_to include(nil)
640
632
  end
641
633
  end
642
634
 
@@ -716,9 +708,9 @@ describe LogStash::Pipeline do
716
708
  # in the current instance and was returning an array containing nil values for
717
709
  # the match.
718
710
  expect(pipeline1.output_func(LogStash::Event.new)).not_to include(nil)
719
- expect(pipeline1.filter_func(LogStash::Event.new)).not_to include(nil)
711
+ expect(pipeline1.filter_func([LogStash::Event.new])).not_to include(nil)
720
712
  expect(pipeline2.output_func(LogStash::Event.new)).not_to include(nil)
721
- expect(pipeline1.filter_func(LogStash::Event.new)).not_to include(nil)
713
+ expect(pipeline1.filter_func([LogStash::Event.new])).not_to include(nil)
722
714
  end
723
715
  end
724
716
 
@@ -797,8 +789,8 @@ describe LogStash::Pipeline do
797
789
  let(:pipeline_settings) { { "pipeline.id" => pipeline_id } }
798
790
  let(:pipeline_id) { "main" }
799
791
  let(:number_of_events) { 420 }
800
- let(:multiline_id) { "my-multiline" }
801
- let(:multiline_id_other) { "my-multiline_other" }
792
+ let(:dummy_id) { "my-multiline" }
793
+ let(:dummy_id_other) { "my-multiline_other" }
802
794
  let(:dummy_output_id) { "my-dummyoutput" }
803
795
  let(:generator_id) { "my-generator" }
804
796
  let(:config) do
@@ -810,15 +802,11 @@ describe LogStash::Pipeline do
810
802
  }
811
803
  }
812
804
  filter {
813
- multiline {
814
- id => "#{multiline_id}"
815
- pattern => "hello"
816
- what => next
805
+ dummyfilter {
806
+ id => "#{dummy_id}"
817
807
  }
818
- multiline {
819
- id => "#{multiline_id_other}"
820
- pattern => "hello"
821
- what => next
808
+ dummyfilter {
809
+ id => "#{dummy_id_other}"
822
810
  }
823
811
  }
824
812
  output {
@@ -841,7 +829,7 @@ describe LogStash::Pipeline do
841
829
  allow(::LogStash::Outputs::DummyOutput).to receive(:new).with(any_args).and_return(dummyoutput)
842
830
  allow(LogStash::Plugin).to receive(:lookup).with("input", "generator").and_return(LogStash::Inputs::Generator)
843
831
  allow(LogStash::Plugin).to receive(:lookup).with("codec", "plain").and_return(LogStash::Codecs::Plain)
844
- allow(LogStash::Plugin).to receive(:lookup).with("filter", "multiline").and_return(LogStash::Filters::Multiline)
832
+ allow(LogStash::Plugin).to receive(:lookup).with("filter", "dummyfilter").and_return(LogStash::Filters::DummyFilter)
845
833
  allow(LogStash::Plugin).to receive(:lookup).with("output", "dummyoutput").and_return(::LogStash::Outputs::DummyOutput)
846
834
 
847
835
  pipeline_thread
@@ -885,7 +873,7 @@ describe LogStash::Pipeline do
885
873
  end
886
874
 
887
875
  it "populates the filter metrics" do
888
- [multiline_id, multiline_id_other].map(&:to_sym).each do |id|
876
+ [dummy_id, dummy_id_other].map(&:to_sym).each do |id|
889
877
  [:in, :out].each do |metric_key|
890
878
  plugin_name = id.to_sym
891
879
  expect(collected_metric[:stats][:pipelines][:main][:plugins][:filters][plugin_name][:events][metric_key].value).to eq(number_of_events)
@@ -907,9 +895,9 @@ describe LogStash::Pipeline do
907
895
  end
908
896
 
909
897
  it "populates the name of the filter plugin" do
910
- [multiline_id, multiline_id_other].map(&:to_sym).each do |id|
898
+ [dummy_id, dummy_id_other].map(&:to_sym).each do |id|
911
899
  plugin_name = id.to_sym
912
- expect(collected_metric[:stats][:pipelines][:main][:plugins][:filters][plugin_name][:name].value).to eq(LogStash::Filters::Multiline.config_name)
900
+ expect(collected_metric[:stats][:pipelines][:main][:plugins][:filters][plugin_name][:name].value).to eq(LogStash::Filters::DummyFilter.config_name)
913
901
  end
914
902
  end
915
903
 
@@ -35,7 +35,7 @@ describe LogStash::Runner do
35
35
  allow(LogStash::Logging::Logger).to receive(:configure_logging) do |level, path|
36
36
  allow(logger).to receive(:level).and_return(level.to_sym)
37
37
  end
38
-
38
+ allow(LogStash::Logging::Logger).to receive(:reconfigure).with(any_args)
39
39
  # Make sure we don't start a real pipeline here.
40
40
  # because we cannot easily close the pipeline
41
41
  allow(LogStash::Agent).to receive(:new).with(any_args).and_return(agent)
@@ -43,10 +43,6 @@ describe LogStash::Runner do
43
43
  allow(agent).to receive(:shutdown)
44
44
  end
45
45
 
46
- after :each do
47
- LogStash::Logging::Logger::configure_logging("info")
48
- end
49
-
50
46
  describe "argument precedence" do
51
47
  let(:config) { "input {} output {}" }
52
48
  let(:cli_args) { ["-e", config, "-w", "20"] }
@@ -9,12 +9,23 @@ describe LogStash::Setting::Modules do
9
9
  describe "Modules.Cli" do
10
10
  subject { described_class.new("mycloudid", LogStash::Util::ModulesSettingArray, []) }
11
11
  context "when given an array of hashes that contains a password key" do
12
+ let(:secret) { 'some_secret'}
12
13
  it "should convert password Strings to Password" do
13
- source = [{"var.kibana.password" => "some_secret"}]
14
+ source = [{"var.kibana.password" => secret}]
14
15
  setting = subject.set(source)
15
16
  expect(setting).to be_a(Array)
16
17
  expect(setting.__class__).to eq(LogStash::Util::ModulesSettingArray)
17
18
  expect(setting.first.fetch("var.kibana.password")).to be_a(LogStash::Util::Password)
19
+ expect(setting.first.fetch("var.kibana.password").value).to eq(secret)
20
+ end
21
+
22
+ it 'should not wrap values that are already passwords' do
23
+ source = [{"var.kibana.password" => LogStash::Util::Password.new(secret)}]
24
+ setting = subject.set(source)
25
+ expect(setting).to be_a(Array)
26
+ expect(setting.__class__).to eq(LogStash::Util::ModulesSettingArray)
27
+ expect(setting.first.fetch("var.kibana.password")).to be_a(LogStash::Util::Password)
28
+ expect(setting.first.fetch("var.kibana.password").value).to eq(secret)
18
29
  end
19
30
  end
20
31
  end
@@ -42,7 +53,7 @@ describe LogStash::Setting::Modules do
42
53
  context "when given a badly formatted encoded id" do
43
54
  it "should not raise an error" do
44
55
  encoded = Base64.urlsafe_encode64("foo$$bal")
45
- expect { subject.set(encoded) }.to raise_error(ArgumentError, /Cloud Id, after decoding, is invalid. Format: '<part1>\$<part2>\$<part3>'/)
56
+ expect { subject.set(encoded) }.to raise_error(ArgumentError, "Cloud Id, after decoding, is invalid. Format: '<segment1>$<segment2>$<segment3>'. Received: \"foo$$bal\".")
46
57
  end
47
58
  end
48
59
 
@@ -3,17 +3,17 @@ require "spec_helper"
3
3
  require "logstash/settings"
4
4
  require "tmpdir"
5
5
  require "socket" # for UNIXSocket
6
+ require "fileutils"
6
7
 
7
8
  describe LogStash::Setting::WritableDirectory do
8
- let(:mode_rx) { 0555 }
9
9
  # linux is 108, Macos is 104, so use a safe value
10
10
  # Stud::Temporary.pathname, will exceed that size without adding anything
11
11
  let(:parent) { File.join(Dir.tmpdir, Time.now.to_f.to_s) }
12
12
  let(:path) { File.join(parent, "fancy") }
13
13
 
14
14
  before { Dir.mkdir(parent) }
15
- after { Dir.exist?(path) && Dir.unlink(path) rescue nil }
16
- after { Dir.unlink(parent) }
15
+ after { Dir.exist?(path) && FileUtils.rm_rf(path)}
16
+ after { FileUtils.rm_rf(parent) }
17
17
 
18
18
  shared_examples "failure" do
19
19
  before { subject.set(path) }
@@ -44,8 +44,9 @@ describe LogStash::Setting::WritableDirectory do
44
44
  end
45
45
 
46
46
  context "and the directory cannot be created" do
47
- before { File.chmod(mode_rx, parent) }
48
47
  it "should fail" do
48
+ # using chmod does not work on Windows better mock and_raise("message")
49
+ expect(FileUtils).to receive(:mkdir_p).and_raise("foobar")
49
50
  expect { subject.value }.to raise_error
50
51
  end
51
52
  end
@@ -66,7 +67,8 @@ describe LogStash::Setting::WritableDirectory do
66
67
  end
67
68
 
68
69
  context "but is not writable" do
69
- before { File.chmod(0, path) }
70
+ # chmod does not work on Windows, mock writable? instead
71
+ before { expect(File).to receive(:writable?).and_return(false) }
70
72
  it_behaves_like "failure"
71
73
  end
72
74
  end
@@ -84,12 +86,13 @@ describe LogStash::Setting::WritableDirectory do
84
86
  before { socket } # realize `socket` value
85
87
  after { socket.close }
86
88
  it_behaves_like "failure"
87
- end
89
+ end unless LogStash::Environment.windows?
88
90
 
91
+
89
92
  context "but is a symlink" do
90
- before { File::symlink("whatever", path) }
93
+ before { FileUtils.symlink("whatever", path) }
91
94
  it_behaves_like "failure"
92
- end
95
+ end unless LogStash::Environment.windows?
93
96
  end
94
97
 
95
98
  context "when the directory is missing" do
@@ -114,8 +117,8 @@ describe LogStash::Setting::WritableDirectory do
114
117
 
115
118
  context "and cannot be created" do
116
119
  before do
117
- # Remove write permission on the parent
118
- File.chmod(mode_rx, parent)
120
+ # chmod does not work on Windows, mock writable? instead
121
+ expect(File).to receive(:writable?).and_return(false)
119
122
  end
120
123
 
121
124
  it_behaves_like "failure"