logstash-core 5.5.3-java → 5.6.0-java

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. checksums.yaml +4 -4
  2. data/lib/logstash-core/logstash-core.jar +0 -0
  3. data/lib/logstash-core/version.rb +1 -1
  4. data/lib/logstash/api/commands/node.rb +2 -2
  5. data/lib/logstash/api/commands/stats.rb +2 -2
  6. data/lib/logstash/config/config_ast.rb +24 -1
  7. data/lib/logstash/config/modules_common.rb +47 -15
  8. data/lib/logstash/config/source/modules.rb +55 -0
  9. data/lib/logstash/config/string_escape.rb +27 -0
  10. data/lib/logstash/elasticsearch_client.rb +24 -2
  11. data/lib/logstash/environment.rb +2 -0
  12. data/lib/logstash/filter_delegator.rb +9 -6
  13. data/lib/logstash/instrument/collector.rb +7 -5
  14. data/lib/logstash/instrument/metric_store.rb +11 -11
  15. data/lib/logstash/instrument/namespaced_metric.rb +4 -0
  16. data/lib/logstash/instrument/namespaced_null_metric.rb +4 -0
  17. data/lib/logstash/instrument/null_metric.rb +10 -0
  18. data/lib/logstash/instrument/periodic_poller/dlq.rb +19 -0
  19. data/lib/logstash/instrument/periodic_pollers.rb +3 -1
  20. data/lib/logstash/instrument/wrapped_write_client.rb +33 -24
  21. data/lib/logstash/logging/logger.rb +26 -19
  22. data/lib/logstash/modules/{importer.rb → elasticsearch_importer.rb} +3 -3
  23. data/lib/logstash/modules/kibana_base.rb +24 -0
  24. data/lib/logstash/modules/kibana_client.rb +124 -0
  25. data/lib/logstash/modules/kibana_config.rb +29 -28
  26. data/lib/logstash/modules/kibana_dashboards.rb +36 -0
  27. data/lib/logstash/modules/kibana_importer.rb +17 -0
  28. data/lib/logstash/modules/kibana_settings.rb +40 -0
  29. data/lib/logstash/modules/logstash_config.rb +89 -17
  30. data/lib/logstash/modules/resource_base.rb +6 -5
  31. data/lib/logstash/modules/scaffold.rb +11 -3
  32. data/lib/logstash/modules/settings_merger.rb +23 -0
  33. data/lib/logstash/modules/util.rb +17 -0
  34. data/lib/logstash/output_delegator.rb +7 -5
  35. data/lib/logstash/pipeline.rb +34 -2
  36. data/lib/logstash/runner.rb +8 -13
  37. data/lib/logstash/settings.rb +20 -1
  38. data/lib/logstash/util/wrapped_acked_queue.rb +5 -24
  39. data/lib/logstash/util/wrapped_synchronous_queue.rb +14 -24
  40. data/lib/logstash/version.rb +1 -1
  41. data/locales/en.yml +11 -4
  42. data/spec/logstash/agent_spec.rb +19 -6
  43. data/spec/logstash/api/modules/node_spec.rb +2 -1
  44. data/spec/logstash/config/config_ast_spec.rb +47 -8
  45. data/spec/logstash/config/string_escape_spec.rb +24 -0
  46. data/spec/logstash/event_spec.rb +9 -0
  47. data/spec/logstash/filter_delegator_spec.rb +21 -7
  48. data/spec/logstash/instrument/periodic_poller/dlq_spec.rb +17 -0
  49. data/spec/logstash/instrument/periodic_poller/jvm_spec.rb +1 -1
  50. data/spec/logstash/legacy_ruby_event_spec.rb +4 -4
  51. data/spec/logstash/modules/logstash_config_spec.rb +56 -0
  52. data/spec/logstash/modules/scaffold_spec.rb +234 -0
  53. data/spec/logstash/output_delegator_spec.rb +15 -5
  54. data/spec/logstash/pipeline_spec.rb +76 -26
  55. data/spec/logstash/runner_spec.rb +46 -25
  56. data/spec/logstash/settings/splittable_string_array_spec.rb +51 -0
  57. data/spec/logstash/util/wrapped_synchronous_queue_spec.rb +0 -22
  58. metadata +22 -4
  59. data/lib/logstash/modules/kibana_base_resource.rb +0 -10
  60. data/lib/logstash/program.rb +0 -14
@@ -0,0 +1,24 @@
1
+
2
+ require "logstash/config/string_escape"
3
+
4
+ describe LogStash::Config::StringEscape do
5
+ let(:result) { described_class.process_escapes(text) }
6
+
7
+ table = {
8
+ '\\"' => '"',
9
+ "\\'" => "'",
10
+ "\\n" => "\n",
11
+ "\\r" => "\r",
12
+ "\\t" => "\t",
13
+ "\\\\" => "\\",
14
+ }
15
+
16
+ table.each do |input, expected|
17
+ context "when processing #{input.inspect}" do
18
+ let(:text) { input }
19
+ it "should produce #{expected.inspect}" do
20
+ expect(result).to be == expected
21
+ end
22
+ end
23
+ end
24
+ end
@@ -84,6 +84,15 @@ describe LogStash::Event do
84
84
  expect(e.get("foo")).to eq("bar")
85
85
  end
86
86
 
87
+ it "should propagate changes to mutable strings to java APIs" do
88
+ e = LogStash::Event.new()
89
+ e.to_java.setField("foo", "bar")
90
+ expect(e.get("foo")).to eq("bar")
91
+ e.get("foo").gsub!(/bar/, 'pff')
92
+ expect(e.get("foo")).to eq("pff")
93
+ expect(e.to_java.getField("foo")).to eq("pff")
94
+ end
95
+
87
96
  it "should set deep hash values" do
88
97
  e = LogStash::Event.new()
89
98
  expect(e.set("[foo][bar]", "baz")).to eq("baz")
@@ -6,12 +6,23 @@ require "logstash/event"
6
6
  require "logstash/execution_context"
7
7
 
8
8
  describe LogStash::FilterDelegator do
9
+
10
+ class MockGauge
11
+ def increment(_)
12
+ end
13
+ end
14
+
15
+ include_context "execution_context"
16
+
9
17
  let(:logger) { double(:logger) }
10
18
  let(:filter_id) { "my-filter" }
11
19
  let(:config) do
12
20
  { "host" => "127.0.0.1", "id" => filter_id }
13
21
  end
14
22
  let(:collector) { [] }
23
+ let(:counter_in) { MockGauge.new }
24
+ let(:counter_out) { MockGauge.new }
25
+ let(:counter_time) { MockGauge.new }
15
26
  let(:metric) { LogStash::Instrument::NamespacedNullMetric.new(collector, :null) }
16
27
  let(:events) { [LogStash::Event.new, LogStash::Event.new] }
17
28
  let(:default_execution_context) { LogStash::ExecutionContext.new(:main, filter_id, "filter",
@@ -19,6 +30,9 @@ describe LogStash::FilterDelegator do
19
30
 
20
31
  before :each do
21
32
  allow(metric).to receive(:namespace).with(anything).and_return(metric)
33
+ allow(metric).to receive(:counter).with(:in).and_return(counter_in)
34
+ allow(metric).to receive(:counter).with(:out).and_return(counter_out)
35
+ allow(metric).to receive(:counter).with(:duration_in_millis).and_return(counter_time)
22
36
  end
23
37
 
24
38
  let(:plugin_klass) do
@@ -58,7 +72,7 @@ describe LogStash::FilterDelegator do
58
72
  context "when the flush return events" do
59
73
  it "increments the out" do
60
74
  subject.multi_filter([LogStash::Event.new])
61
- expect(metric).to receive(:increment).with(:out, 1)
75
+ expect(counter_out).to receive(:increment).with(1)
62
76
  subject.flush({})
63
77
  end
64
78
  end
@@ -76,12 +90,12 @@ describe LogStash::FilterDelegator do
76
90
  end
77
91
 
78
92
  it "has incremented :in" do
79
- expect(metric).to receive(:increment).with(:in, events.size)
93
+ expect(counter_in).to receive(:increment).with(events.size)
80
94
  subject.multi_filter(events)
81
95
  end
82
96
 
83
97
  it "has not incremented :out" do
84
- expect(metric).not_to receive(:increment).with(:out, anything)
98
+ expect(counter_out).not_to receive(:increment).with(anything)
85
99
  subject.multi_filter(events)
86
100
  end
87
101
  end
@@ -107,8 +121,8 @@ describe LogStash::FilterDelegator do
107
121
  end
108
122
 
109
123
  it "increments the in/out of the metric" do
110
- expect(metric).to receive(:increment).with(:in, events.size)
111
- expect(metric).to receive(:increment).with(:out, events.size * 2)
124
+ expect(counter_in).to receive(:increment).with(events.size)
125
+ expect(counter_out).to receive(:increment).with(events.size * 2)
112
126
 
113
127
  subject.multi_filter(events)
114
128
  end
@@ -136,8 +150,8 @@ describe LogStash::FilterDelegator do
136
150
  end
137
151
 
138
152
  it "increments the in/out of the metric" do
139
- expect(metric).to receive(:increment).with(:in, events.size)
140
- expect(metric).to receive(:increment).with(:out, events.size)
153
+ expect(counter_in).to receive(:increment).with(events.size)
154
+ expect(counter_out).to receive(:increment).with(events.size)
141
155
 
142
156
  subject.multi_filter(events)
143
157
  end
@@ -0,0 +1,17 @@
1
+ # encoding: utf-8
2
+ require "spec_helper"
3
+ require "logstash/instrument/periodic_poller/dlq"
4
+ require "logstash/instrument/collector"
5
+
6
+ describe LogStash::Instrument::PeriodicPoller::DeadLetterQueue do
7
+ subject { LogStash::Instrument::PeriodicPoller::DeadLetterQueue }
8
+
9
+ let(:metric) { LogStash::Instrument::Metric.new(LogStash::Instrument::Collector.new) }
10
+ let(:agent) { double("agent")}
11
+ let(:options) { {} }
12
+ subject(:dlq) { described_class.new(metric, agent, options) }
13
+
14
+ it "should initialize cleanly" do
15
+ expect { dlq }.not_to raise_error
16
+ end
17
+ end
@@ -44,7 +44,7 @@ describe LogStash::Instrument::PeriodicPoller::JVM do
44
44
  before do
45
45
  expect(LogStash::Environment).to receive(:windows?).and_return(false)
46
46
  expect(LogStash::Environment).to receive(:linux?).and_return(true)
47
- expect(::File).to receive(:read).with("/proc/loadavg").and_raise("Didnt work out so well")
47
+ expect(::File).to receive(:read).with("/proc/loadavg").at_least(:once).and_raise("Didnt work out so well")
48
48
  end
49
49
 
50
50
  it "doesn't raise an exception" do
@@ -399,17 +399,17 @@ describe LogStash::Event do
399
399
  end
400
400
 
401
401
  it "should assign current time when no timestamp" do
402
- expect(LogStash::Event.new({}).timestamp.to_i).to be_within(1).of (Time.now.to_i)
402
+ expect(LogStash::Event.new({}).timestamp.to_i).to be_within(2).of (Time.now.to_i)
403
403
  end
404
404
 
405
405
  it "should tag for invalid value" do
406
406
  event = LogStash::Event.new("@timestamp" => "foo")
407
- expect(event.timestamp.to_i).to be_within(1).of Time.now.to_i
407
+ expect(event.timestamp.to_i).to be_within(2).of Time.now.to_i
408
408
  expect(event.get("tags")).to eq([LogStash::Event::TIMESTAMP_FAILURE_TAG])
409
409
  expect(event.get(LogStash::Event::TIMESTAMP_FAILURE_FIELD)).to eq("foo")
410
410
 
411
411
  event = LogStash::Event.new("@timestamp" => 666)
412
- expect(event.timestamp.to_i).to be_within(1).of Time.now.to_i
412
+ expect(event.timestamp.to_i).to be_within(2).of Time.now.to_i
413
413
  expect(event.get("tags")).to eq([LogStash::Event::TIMESTAMP_FAILURE_TAG])
414
414
  expect(event.get(LogStash::Event::TIMESTAMP_FAILURE_FIELD)).to eq(666)
415
415
  end
@@ -421,7 +421,7 @@ describe LogStash::Event do
421
421
 
422
422
  it "should tag for invalid string format" do
423
423
  event = LogStash::Event.new("@timestamp" => "foo")
424
- expect(event.timestamp.to_i).to be_within(1).of Time.now.to_i
424
+ expect(event.timestamp.to_i).to be_within(2).of Time.now.to_i
425
425
  expect(event.get("tags")).to eq([LogStash::Event::TIMESTAMP_FAILURE_TAG])
426
426
  expect(event.get(LogStash::Event::TIMESTAMP_FAILURE_FIELD)).to eq("foo")
427
427
  end
@@ -0,0 +1,56 @@
1
+ # encoding: utf-8
2
+ require "logstash/modules/logstash_config"
3
+
4
+ describe LogStash::Modules::LogStashConfig do
5
+ let(:mod) { instance_double("Modules", :directory => Stud::Temporary.directory, :module_name => "testing") }
6
+ let(:settings) { {"var.logstash.testing.pants" => "fancy" }}
7
+ subject { described_class.new(mod, settings) }
8
+
9
+ describe "configured inputs" do
10
+ context "when no inputs is send" do
11
+ it "returns the default" do
12
+ expect(subject.configured_inputs(["kafka"])).to include("kafka")
13
+ end
14
+ end
15
+
16
+ context "when inputs are send" do
17
+ let(:settings) { { "var.inputs" => "tcp" } }
18
+
19
+ it "returns the configured inputs" do
20
+ expect(subject.configured_inputs(["kafka"])).to include("tcp")
21
+ end
22
+
23
+ context "when alias is specified" do
24
+ let(:settings) { { "var.inputs" => "smartconnector" } }
25
+
26
+ it "returns the configured inputs" do
27
+ expect(subject.configured_inputs(["kafka"], { "smartconnector" => "tcp" })).to include("tcp", "smartconnector")
28
+ end
29
+ end
30
+ end
31
+ end
32
+
33
+ describe "array to logstash array string" do
34
+ it "return an escaped string" do
35
+ expect(subject.array_to_string(["hello", "ninja"])).to eq("['hello', 'ninja']")
36
+ end
37
+ end
38
+
39
+ describe "alias modules options" do
40
+ let(:alias_table) do
41
+ { "var.logstash.testing" => "var.logstash.better" }
42
+ end
43
+
44
+ before do
45
+ subject.alias_settings_keys!(alias_table)
46
+ end
47
+
48
+ it "allow to retrieve settings" do
49
+ expect(subject.setting("var.logstash.better.pants", "dont-exist")).to eq("fancy")
50
+ end
51
+
52
+ it "allow to retrieve settings with the original name" do
53
+ expect(subject.setting("var.logstash.testing.pants", "dont-exist")).to eq("fancy")
54
+ end
55
+ end
56
+ end
@@ -0,0 +1,234 @@
1
+ # encoding: utf-8
2
+ #
3
+ require "logstash/namespace"
4
+ require "logstash/elasticsearch_client"
5
+ require "logstash/modules/kibana_client"
6
+ require "logstash/modules/kibana_config"
7
+ require "logstash/modules/scaffold"
8
+ require "logstash/modules/elasticsearch_importer"
9
+ require "logstash/modules/kibana_importer"
10
+
11
+ require_relative "../../support/helpers"
12
+
13
+ describe LogStash::Modules::Scaffold do
14
+ let(:base_dir) { "gem-home" }
15
+ let(:mname) { "foo" }
16
+ subject(:test_module) { described_class.new(mname, base_dir) }
17
+ let(:module_settings) do
18
+ {
19
+ "var.elasticsearch.hosts" => "es.mycloud.com:9200",
20
+ "var.elasticsearch.user" => "foo",
21
+ "var.elasticsearch.password" => "password",
22
+ "var.input.tcp.port" => 5606,
23
+ }
24
+ end
25
+ let(:dashboard_hash) do
26
+ {
27
+ "hits" => 0,
28
+ "timeRestore" => false,
29
+ "description" => "",
30
+ "title" => "Filebeat Apache2 Dashboard",
31
+ "uiStateJSON" => "{}",
32
+ "panelsJSON" => '[{"col":1,"id":"foo-c","panelIndex":1,"row":1,"size_x":12,"size_y":3,"type":"visualization"},{"id":"foo-d","type":"search","panelIndex":7,"size_x":12,"size_y":3,"col":1,"row":11,"columns":["apache2.error.client","apache2.error.level","apache2.error.module","apache2.error.message"],"sort":["@timestamp","desc"]}]',
33
+ "optionsJSON" => "{}",
34
+ "version" => 1,
35
+ "kibanaSavedObjectMeta" => {
36
+ "searchSourceJSON" => "{}"
37
+ }
38
+ }
39
+ end
40
+ let(:viz_hash) do
41
+ {
42
+ "visState" => "",
43
+ "description" => "",
44
+ "title" => "foo-c",
45
+ "uiStateJSON" => "",
46
+ "version" => 1,
47
+ "savedSearchId" => "foo-e",
48
+ "kibanaSavedObjectMeta" => {}
49
+ }
50
+ end
51
+ let(:index_pattern_hash) do
52
+ {
53
+ "title" => "foo-*",
54
+ "timeFieldName" =>"time",
55
+ "fieldFormatMap" => "{some map}",
56
+ "fields" => "[some array]"
57
+ }
58
+ end
59
+ context "logstash operation" do
60
+ let(:ls_conf) do
61
+ <<-ERB
62
+ input {
63
+ tcp {
64
+ port => <%= setting("var.input.tcp.port", 45) %>
65
+ host => <%= setting("var.input.tcp.host", "localhost") %>
66
+ type => <%= setting("var.input.tcp.type", "server") %>
67
+ }
68
+ }
69
+ filter {
70
+
71
+ }
72
+ output {
73
+ <%= elasticsearch_output_config() %>
74
+ }
75
+ ERB
76
+ end
77
+
78
+ before do
79
+ allow(LogStash::Modules::FileReader).to receive(:read).and_return(ls_conf)
80
+ end
81
+
82
+ it "provides a logstash config" do
83
+ expect(test_module.logstash_configuration).to be_nil
84
+ test_module.with_settings(module_settings)
85
+ expect(test_module.logstash_configuration).not_to be_nil
86
+ config_string = test_module.config_string
87
+ expect(config_string).to include("port => 5606")
88
+ expect(config_string).to include("hosts => ['es.mycloud.com:9200']")
89
+ end
90
+ end
91
+
92
+ context "elasticsearch operation" do
93
+ it "provides the elasticsearch mapping file paths" do
94
+ test_module.with_settings(module_settings)
95
+ expect(test_module.elasticsearch_configuration).not_to be_nil
96
+ files = test_module.elasticsearch_configuration.resources
97
+ expect(files.size).to eq(1)
98
+ expect(files.first).to be_a(LogStash::Modules::ElasticsearchResource)
99
+ expect(files.first.content_path).to eq("gem-home/elasticsearch/foo.json")
100
+ expect(files.first.import_path).to eq("_template/foo")
101
+ end
102
+ end
103
+
104
+ context "kibana operation" do
105
+ before do
106
+ # allow(LogStash::Modules::FileReader).to receive(:read_json).and_return({})
107
+ allow(LogStash::Modules::FileReader).to receive(:read_json).with("gem-home/kibana/dashboard/foo.json").and_return(["Foo-Dashboard"])
108
+ allow(LogStash::Modules::FileReader).to receive(:read_json).with("gem-home/kibana/dashboard/Foo-Dashboard.json").and_return(dashboard_hash)
109
+ allow(LogStash::Modules::FileReader).to receive(:read_json).with("gem-home/kibana/visualization/foo-c.json").and_return(viz_hash)
110
+ allow(LogStash::Modules::FileReader).to receive(:read_json).with("gem-home/kibana/search/foo-d.json").and_return({"d" => "search"})
111
+ allow(LogStash::Modules::FileReader).to receive(:read_json).with("gem-home/kibana/search/foo-e.json").and_return({"e" => "search"})
112
+ allow(LogStash::Modules::FileReader).to receive(:read_json).with("gem-home/kibana/index-pattern/foo.json").and_return(index_pattern_hash)
113
+ end
114
+
115
+ it "provides a list of importable files" do
116
+ expect(test_module.kibana_configuration).to be_nil
117
+ test_module.with_settings(module_settings)
118
+ expect(test_module.kibana_configuration).not_to be_nil
119
+ resources = test_module.kibana_configuration.resources
120
+ expect(resources.size).to eq(2)
121
+ resource1 = resources[0]
122
+ resource2 = resources[1]
123
+ expect(resource1).to be_a(LogStash::Modules::KibanaSettings)
124
+ expect(resource2).to be_a(LogStash::Modules::KibanaDashboards)
125
+ expect(resource1.import_path).to eq("api/kibana/settings")
126
+ expect(resource1.content).to be_a(Array)
127
+ expect(resource1.content.size).to eq(1)
128
+
129
+ test_object = resource1.content[0]
130
+ expect(test_object).to be_a(LogStash::Modules::KibanaSettings::Setting)
131
+ expect(test_object.name).to eq("defaultIndex")
132
+ expect(test_object.value).to eq("foo-*")
133
+
134
+ expect(resource2.import_path).to eq("api/kibana/dashboards/import")
135
+ expect(resource2.content).to be_a(Array)
136
+ expect(resource2.content.size).to eq(5)
137
+ expect(resource2.content.map{|o| o.class}.uniq).to eq([LogStash::Modules::KibanaResource])
138
+
139
+ test_object = resource2.content[0]
140
+ expect(test_object.content_id).to eq("foo-*")
141
+ expect(test_object.content_type).to eq("index-pattern")
142
+ expect(test_object.content_as_object).to eq(index_pattern_hash)
143
+
144
+ test_object = resource2.content[1]
145
+ expect(test_object.content_id).to eq("Foo-Dashboard")
146
+ expect(test_object.content_type).to eq("dashboard")
147
+ expect(test_object.content_as_object).to eq(dashboard_hash)
148
+
149
+ test_object = resource2.content[2]
150
+ expect(test_object.content_id).to eq("foo-c") #<- the panels can contain items from other folders
151
+ expect(test_object.content_type).to eq("visualization")
152
+ expect(test_object.content_as_object).to eq(viz_hash)
153
+ expect(test_object.content_as_object["savedSearchId"]).to eq("foo-e")
154
+
155
+ test_object = resource2.content[3]
156
+ expect(test_object.content_id).to eq("foo-d") #<- the panels can contain items from other folders
157
+ expect(test_object.content_type).to eq("search")
158
+ expect(test_object.content_as_object).to eq("d"=>"search")
159
+
160
+ test_object = resource2.content[4]
161
+ expect(test_object.content_id).to eq("foo-e") # <- the visualization can contain items from the search folder
162
+ expect(test_object.content_type).to eq("search")
163
+ expect(test_object.content_as_object).to eq("e"=>"search")
164
+ end
165
+ end
166
+
167
+ context "importing to elasticsearch stubbed client" do
168
+ let(:mname) { "tester" }
169
+ let(:base_dir) { File.expand_path(File.join(File.dirname(__FILE__), "..", "..", "modules_test_files", "modules", "#{mname}", "configuration")) }
170
+ let(:response) { double(:response) }
171
+ let(:client) { double(:client) }
172
+ let(:kbnclient) { double(:kbnclient) }
173
+ let(:paths) { [] }
174
+ let(:expected_paths) { ["_template/tester", "api/kibana/settings", "api/kibana/dashboards/import"] }
175
+ let(:contents) { [] }
176
+ let(:expected_objects) do
177
+ [
178
+ "index-pattern tester-*",
179
+ "dashboard FW-Dashboard",
180
+ "visualization FW-Viz-1",
181
+ "visualization FW-Viz-2",
182
+ "search Search-Tester"
183
+ ]
184
+ end
185
+
186
+ before do
187
+ allow(response).to receive(:status).and_return(404)
188
+ allow(client).to receive(:head).and_return(response)
189
+ allow(kbnclient).to receive(:version).and_return("9.8.7-6")
190
+ end
191
+
192
+ it "calls the import method" do
193
+ expect(client).to receive(:put).once do |path, content|
194
+ paths << path
195
+ LogStash::ElasticsearchClient::Response.new(201, "", {})
196
+ end
197
+ expect(kbnclient).to receive(:post).twice do |path, content|
198
+ paths << path
199
+ contents << content
200
+ LogStash::Modules::KibanaClient::Response.new(201, "", {})
201
+ end
202
+ test_module.with_settings(module_settings)
203
+ test_module.import(LogStash::Modules::ElasticsearchImporter.new(client), LogStash::Modules::KibanaImporter.new(kbnclient))
204
+ expect(paths).to eq(expected_paths)
205
+ expect(contents[0]).to eq({"changes"=>{"defaultIndex"=>"tester-*"}})
206
+ second_kbn_post = contents[1]
207
+ expect(second_kbn_post[:version]).to eq("9.8.7-6")
208
+ expect(second_kbn_post[:objects]).to be_a(Array)
209
+ expect(second_kbn_post[:objects].size).to eq(5)
210
+ objects_types_ids = second_kbn_post[:objects].map {|h| "#{h["type"]} #{h["id"]}"}
211
+ expect(objects_types_ids).to eq(expected_objects)
212
+ end
213
+ end
214
+
215
+ context "import 4 realz", :skip => "integration" do
216
+ let(:mname) { "cef" }
217
+ let(:base_dir) { File.expand_path(File.join(File.dirname(__FILE__), "..", "..", "modules_test_files", "#{mname}")) }
218
+ let(:module_settings) do
219
+ {
220
+ "var.elasticsearch.hosts" => "localhost:9200",
221
+ "var.elasticsearch.user" => "foo",
222
+ "var.elasticsearch.password" => "password",
223
+ "var.input.tcp.port" => 5606,
224
+ }
225
+ end
226
+ it "puts stuff in ES" do
227
+ test_module.with_settings(module_settings)
228
+ client = LogStash::ElasticsearchClient.build(module_settings)
229
+ import_engine = LogStash::Modules::Importer.new(client)
230
+ test_module.import(import_engine)
231
+ expect(1).to eq(1)
232
+ end
233
+ end
234
+ end