logstash-output-opensearch 1.0.0-java

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. checksums.yaml +7 -0
  2. checksums.yaml.gz.sig +0 -0
  3. data.tar.gz.sig +0 -0
  4. data/ADMINS.md +29 -0
  5. data/CODE_OF_CONDUCT.md +25 -0
  6. data/CONTRIBUTING.md +99 -0
  7. data/DEVELOPER_GUIDE.md +208 -0
  8. data/Gemfile +20 -0
  9. data/LICENSE +202 -0
  10. data/MAINTAINERS.md +71 -0
  11. data/NOTICE +2 -0
  12. data/README.md +37 -0
  13. data/RELEASING.md +36 -0
  14. data/SECURITY.md +3 -0
  15. data/lib/logstash/outputs/opensearch.rb +449 -0
  16. data/lib/logstash/outputs/opensearch/distribution_checker.rb +44 -0
  17. data/lib/logstash/outputs/opensearch/http_client.rb +465 -0
  18. data/lib/logstash/outputs/opensearch/http_client/manticore_adapter.rb +140 -0
  19. data/lib/logstash/outputs/opensearch/http_client/pool.rb +467 -0
  20. data/lib/logstash/outputs/opensearch/http_client_builder.rb +182 -0
  21. data/lib/logstash/outputs/opensearch/template_manager.rb +60 -0
  22. data/lib/logstash/outputs/opensearch/templates/ecs-disabled/1x.json +44 -0
  23. data/lib/logstash/outputs/opensearch/templates/ecs-disabled/7x.json +44 -0
  24. data/lib/logstash/plugin_mixins/opensearch/api_configs.rb +168 -0
  25. data/lib/logstash/plugin_mixins/opensearch/common.rb +294 -0
  26. data/lib/logstash/plugin_mixins/opensearch/noop_distribution_checker.rb +18 -0
  27. data/logstash-output-opensearch.gemspec +40 -0
  28. data/spec/fixtures/_nodes/nodes.json +74 -0
  29. data/spec/fixtures/htpasswd +2 -0
  30. data/spec/fixtures/nginx_reverse_proxy.conf +22 -0
  31. data/spec/fixtures/scripts/painless/scripted_update.painless +2 -0
  32. data/spec/fixtures/scripts/painless/scripted_update_nested.painless +1 -0
  33. data/spec/fixtures/scripts/painless/scripted_upsert.painless +1 -0
  34. data/spec/integration/outputs/compressed_indexing_spec.rb +76 -0
  35. data/spec/integration/outputs/create_spec.rb +76 -0
  36. data/spec/integration/outputs/delete_spec.rb +72 -0
  37. data/spec/integration/outputs/index_spec.rb +164 -0
  38. data/spec/integration/outputs/index_version_spec.rb +110 -0
  39. data/spec/integration/outputs/ingest_pipeline_spec.rb +82 -0
  40. data/spec/integration/outputs/metrics_spec.rb +75 -0
  41. data/spec/integration/outputs/no_opensearch_on_startup_spec.rb +67 -0
  42. data/spec/integration/outputs/painless_update_spec.rb +147 -0
  43. data/spec/integration/outputs/parent_spec.rb +103 -0
  44. data/spec/integration/outputs/retry_spec.rb +182 -0
  45. data/spec/integration/outputs/routing_spec.rb +70 -0
  46. data/spec/integration/outputs/sniffer_spec.rb +70 -0
  47. data/spec/integration/outputs/templates_spec.rb +105 -0
  48. data/spec/integration/outputs/update_spec.rb +123 -0
  49. data/spec/opensearch_spec_helper.rb +141 -0
  50. data/spec/spec_helper.rb +19 -0
  51. data/spec/unit/http_client_builder_spec.rb +194 -0
  52. data/spec/unit/outputs/error_whitelist_spec.rb +62 -0
  53. data/spec/unit/outputs/opensearch/http_client/manticore_adapter_spec.rb +159 -0
  54. data/spec/unit/outputs/opensearch/http_client/pool_spec.rb +306 -0
  55. data/spec/unit/outputs/opensearch/http_client_spec.rb +292 -0
  56. data/spec/unit/outputs/opensearch/template_manager_spec.rb +36 -0
  57. data/spec/unit/outputs/opensearch_proxy_spec.rb +112 -0
  58. data/spec/unit/outputs/opensearch_spec.rb +800 -0
  59. data/spec/unit/outputs/opensearch_ssl_spec.rb +179 -0
  60. metadata +289 -0
  61. metadata.gz.sig +0 -0
@@ -0,0 +1,306 @@
1
+ # SPDX-License-Identifier: Apache-2.0
2
+ #
3
+ # The OpenSearch Contributors require contributions made to
4
+ # this file be licensed under the Apache-2.0 license or a
5
+ # compatible open source license.
6
+ #
7
+ # Modifications Copyright OpenSearch Contributors. See
8
+ # GitHub history for details.
9
+
10
+ require "logstash/devutils/rspec/spec_helper"
11
+ require "logstash/outputs/opensearch/http_client"
12
+ require 'cabin'
13
+
14
+ describe LogStash::Outputs::OpenSearch::HttpClient::Pool do
15
+ let(:logger) { Cabin::Channel.get }
16
+ let(:adapter) { LogStash::Outputs::OpenSearch::HttpClient::ManticoreAdapter.new(logger) }
17
+ let(:initial_urls) { [::LogStash::Util::SafeURI.new("http://localhost:9200")] }
18
+ let(:options) { {:resurrect_delay => 2, :url_normalizer => proc {|u| u}} } # Shorten the delay a bit to speed up tests
19
+ let(:node_versions) { [ "7.0.0" ] }
20
+ let(:get_distribution) { "oss" }
21
+
22
+ subject { described_class.new(logger, adapter, initial_urls, options) }
23
+
24
+ let(:manticore_double) { double("manticore a") }
25
+ before(:each) do
26
+ response_double = double("manticore response").as_null_object
27
+ # Allow healtchecks
28
+ allow(manticore_double).to receive(:head).with(any_args).and_return(response_double)
29
+ allow(manticore_double).to receive(:get).with(any_args).and_return(response_double)
30
+ allow(manticore_double).to receive(:close)
31
+
32
+ allow(::Manticore::Client).to receive(:new).and_return(manticore_double)
33
+
34
+ allow(subject).to receive(:get_version).with(any_args).and_return(*node_versions)
35
+ allow(subject.distribution_checker).to receive(:get_distribution).and_return(get_distribution)
36
+ end
37
+
38
+ after do
39
+ subject.close
40
+ end
41
+
42
+ describe "initialization" do
43
+ it "should be successful" do
44
+ expect { subject }.not_to raise_error
45
+ subject.start
46
+ end
47
+ end
48
+
49
+ describe "the resurrectionist" do
50
+ before(:each) { subject.start }
51
+ it "should start the resurrectionist when created" do
52
+ expect(subject.resurrectionist_alive?).to eql(true)
53
+ end
54
+
55
+ it "should attempt to resurrect connections after the ressurrect delay" do
56
+ expect(subject).to receive(:healthcheck!).once
57
+ sleep(subject.resurrect_delay + 1)
58
+ end
59
+
60
+ describe "healthcheck url handling" do
61
+ let(:initial_urls) { [::LogStash::Util::SafeURI.new("http://localhost:9200")] }
62
+
63
+ context "and not setting healthcheck_path" do
64
+ it "performs the healthcheck to the root" do
65
+ expect(adapter).to receive(:perform_request) do |url, method, req_path, _, _|
66
+ expect(method).to eq(:head)
67
+ expect(url.path).to be_empty
68
+ expect(req_path).to eq("/")
69
+ end
70
+ subject.healthcheck!
71
+ end
72
+ end
73
+
74
+ context "and setting healthcheck_path" do
75
+ let(:healthcheck_path) { "/my/health" }
76
+ let(:options) { super().merge(:healthcheck_path => healthcheck_path) }
77
+ it "performs the healthcheck to the healthcheck_path" do
78
+ expect(adapter).to receive(:perform_request) do |url, method, req_path, _, _|
79
+ expect(method).to eq(:head)
80
+ expect(url.path).to be_empty
81
+ expect(req_path).to eq(healthcheck_path)
82
+ end
83
+ subject.healthcheck!
84
+ end
85
+ end
86
+ end
87
+ end
88
+
89
+ describe 'resolving the address from OpenSearch node info' do
90
+ let(:host) { "unit-test-node"}
91
+ let(:ip_address) { "192.168.1.0"}
92
+ let(:port) { 9200 }
93
+
94
+ context 'with host and ip address' do
95
+ let(:publish_address) { "#{host}/#{ip_address}:#{port}"}
96
+ it 'should correctly extract the host' do
97
+ expect(subject.address_str_to_uri(publish_address)).to eq (LogStash::Util::SafeURI.new("#{host}:#{port}"))
98
+ end
99
+ end
100
+ context 'with ip address' do
101
+ let(:publish_address) { "#{ip_address}:#{port}"}
102
+ it 'should correctly extract the ip address' do
103
+ expect(subject.address_str_to_uri(publish_address)).to eq (LogStash::Util::SafeURI.new("#{ip_address}:#{port}"))
104
+ end
105
+ end
106
+ end
107
+
108
+ describe "the sniffer" do
109
+ before(:each) { subject.start }
110
+ it "should not start the sniffer by default" do
111
+ expect(subject.sniffer_alive?).to eql(nil)
112
+ end
113
+
114
+ context "when enabled" do
115
+ let(:options) { super().merge(:sniffing => true)}
116
+
117
+ it "should start the sniffer" do
118
+ expect(subject.sniffer_alive?).to eql(true)
119
+ end
120
+ end
121
+ end
122
+
123
+ describe "closing" do
124
+ before do
125
+ subject.start
126
+ # Simulate a single in use connection on the first check of this
127
+ allow(adapter).to receive(:close).and_call_original
128
+ allow(subject).to receive(:wait_for_in_use_connections).and_call_original
129
+ allow(subject).to receive(:in_use_connections).and_return([subject.empty_url_meta()],[])
130
+ allow(subject).to receive(:start)
131
+ subject.close
132
+ end
133
+
134
+ it "should close the adapter" do
135
+ expect(adapter).to have_received(:close)
136
+ end
137
+
138
+ it "should stop the resurrectionist" do
139
+ expect(subject.resurrectionist_alive?).to eql(false)
140
+ end
141
+
142
+ it "should stop the sniffer" do
143
+ # If no sniffer (the default) returns nil
144
+ expect(subject.sniffer_alive?).to be_falsey
145
+ end
146
+
147
+ it "should wait for in use connections to terminate" do
148
+ expect(subject).to have_received(:wait_for_in_use_connections).once
149
+ expect(subject).to have_received(:in_use_connections).twice
150
+ end
151
+ end
152
+
153
+ describe "connection management" do
154
+ before(:each) { subject.start }
155
+ context "with only one URL in the list" do
156
+ it "should use the only URL in 'with_connection'" do
157
+ subject.with_connection do |c|
158
+ expect(c).to eq(initial_urls.first)
159
+ end
160
+ end
161
+ end
162
+
163
+ context "with multiple URLs in the list" do
164
+ before :each do
165
+ allow(adapter).to receive(:perform_request).with(anything, :head, subject.healthcheck_path, {}, nil)
166
+ end
167
+ let(:initial_urls) { [ ::LogStash::Util::SafeURI.new("http://localhost:9200"), ::LogStash::Util::SafeURI.new("http://localhost:9201"), ::LogStash::Util::SafeURI.new("http://localhost:9202") ] }
168
+
169
+ it "should minimize the number of connections to a single URL" do
170
+ connected_urls = []
171
+
172
+ # If we make 2x the number requests as we have URLs we should
173
+ # connect to each URL exactly 2 times
174
+ (initial_urls.size*2).times do
175
+ u, meta = subject.get_connection
176
+ connected_urls << u
177
+ end
178
+
179
+ connected_urls.each {|u| subject.return_connection(u) }
180
+ initial_urls.each do |url|
181
+ conn_count = connected_urls.select {|u| u == url}.size
182
+ expect(conn_count).to eql(2)
183
+ end
184
+ end
185
+
186
+ it "should correctly resurrect the dead" do
187
+ u,m = subject.get_connection
188
+
189
+ # The resurrectionist will call this to check on the backend
190
+ response = double("response")
191
+ expect(adapter).to receive(:perform_request).with(u, :head, subject.healthcheck_path, {}, nil).and_return(response)
192
+
193
+ subject.return_connection(u)
194
+ subject.mark_dead(u, Exception.new)
195
+
196
+ expect(subject.url_meta(u)[:state]).to eql(:dead)
197
+ sleep subject.resurrect_delay + 1
198
+ expect(subject.url_meta(u)[:state]).to eql(:alive)
199
+ end
200
+ end
201
+ end
202
+
203
+ describe "version tracking" do
204
+ let(:initial_urls) { [
205
+ ::LogStash::Util::SafeURI.new("http://somehost:9200"),
206
+ ::LogStash::Util::SafeURI.new("http://otherhost:9201")
207
+ ] }
208
+
209
+ before(:each) do
210
+ allow(subject).to receive(:perform_request_to_url).and_return(nil)
211
+ subject.start
212
+ end
213
+
214
+ it "picks the largest major version" do
215
+ expect(subject.maximum_seen_major_version).to eq(7)
216
+ end
217
+
218
+ context "if there are nodes with multiple major versions" do
219
+ let(:node_versions) { [ "0.0.0", "7.0.0" ] }
220
+ it "picks the largest major version" do
221
+ expect(subject.maximum_seen_major_version).to eq(7)
222
+ end
223
+ end
224
+ end
225
+ describe "distribution checking" do
226
+ before(:each) do
227
+ allow(subject).to receive(:health_check_request)
228
+ end
229
+
230
+ let(:options) do
231
+ super().merge(:distribution_checker => distribution_checker)
232
+ end
233
+
234
+ context 'when DistributionChecker#is_supported? returns false' do
235
+ let(:distribution_checker) { double('DistributionChecker', :is_supported? => false) }
236
+
237
+ it 'does not mark the URL as active' do
238
+ subject.update_initial_urls
239
+ expect(subject.alive_urls_count).to eq(0)
240
+ end
241
+ end
242
+
243
+ context 'when DistributionChecker#is_supported? returns true' do
244
+ let(:distribution_checker) { double('DistributionChecker', :is_supported? => true) }
245
+
246
+ it 'marks the URL as active' do
247
+ subject.update_initial_urls
248
+ expect(subject.alive_urls_count).to eq(1)
249
+ end
250
+ end
251
+ end
252
+ describe 'distribution checking with cluster output' do
253
+ let(:options) do
254
+ super().merge(:distribution_checker => LogStash::Outputs::OpenSearch::DistributionChecker.new(logger))
255
+ end
256
+
257
+ before(:each) do
258
+ allow(subject).to receive(:health_check_request)
259
+ end
260
+
261
+ context 'when using opensearch' do
262
+
263
+ context "if cluster doesn't return a valid distribution" do
264
+ let(:get_distribution) { nil }
265
+
266
+ it 'marks the url as dead' do
267
+ subject.update_initial_urls
268
+ expect(subject.alive_urls_count).to eq(0)
269
+ end
270
+
271
+ it 'logs message' do
272
+ expect(subject.distribution_checker).to receive(:log_not_supported).once.and_call_original
273
+ subject.update_initial_urls
274
+ end
275
+ end
276
+
277
+ context 'if cluster returns opensearch' do
278
+ let(:get_distribution) { 'opensearch' }
279
+
280
+ it "marks the url as active" do
281
+ subject.update_initial_urls
282
+ expect(subject.alive_urls_count).to eq(1)
283
+ end
284
+
285
+ it 'does not log message' do
286
+ expect(subject.distribution_checker).to_not receive(:log_not_supported)
287
+ subject.update_initial_urls
288
+ end
289
+ end
290
+
291
+ context 'if cluster returns oss' do
292
+ let(:get_distribution) { 'oss' }
293
+
294
+ it 'marks the url as active' do
295
+ subject.update_initial_urls
296
+ expect(subject.alive_urls_count).to eq(1)
297
+ end
298
+
299
+ it 'does not log message' do
300
+ expect(subject.distribution_checker).to_not receive(:log_not_supported)
301
+ subject.update_initial_urls
302
+ end
303
+ end
304
+ end
305
+ end
306
+ end
@@ -0,0 +1,292 @@
1
+ # SPDX-License-Identifier: Apache-2.0
2
+ #
3
+ # The OpenSearch Contributors require contributions made to
4
+ # this file be licensed under the Apache-2.0 license or a
5
+ # compatible open source license.
6
+ #
7
+ # Modifications Copyright OpenSearch Contributors. See
8
+ # GitHub history for details.
9
+
10
+ require_relative "../../../../spec/spec_helper"
11
+ require "logstash/outputs/opensearch/http_client"
12
+ require "cabin"
13
+
14
+ describe LogStash::Outputs::OpenSearch::HttpClient do
15
+ let(:ssl) { nil }
16
+ let(:base_options) do
17
+ opts = {
18
+ :hosts => [::LogStash::Util::SafeURI.new("127.0.0.1")],
19
+ :logger => Cabin::Channel.get,
20
+ :metric => ::LogStash::Instrument::NullMetric.new(:dummy).namespace(:alsodummy)
21
+ }
22
+
23
+ if !ssl.nil? # Shortcut to set this
24
+ opts[:client_settings] = {:ssl => {:enabled => ssl}}
25
+ end
26
+
27
+ opts
28
+ end
29
+
30
+ describe "Host/URL Parsing" do
31
+ subject { described_class.new(base_options) }
32
+
33
+ let(:true_hostname) { "my-dash.hostname" }
34
+ let(:ipv6_hostname) { "[::1]" }
35
+ let(:ipv4_hostname) { "127.0.0.1" }
36
+ let(:port) { 9202 }
37
+ let(:hostname_port) { "#{hostname}:#{port}" }
38
+ let(:hostname_port_uri) { ::LogStash::Util::SafeURI.new("//#{hostname_port}") }
39
+ let(:http_hostname_port) { ::LogStash::Util::SafeURI.new("http://#{hostname_port}") }
40
+ let(:https_hostname_port) { ::LogStash::Util::SafeURI.new("https://#{hostname_port}") }
41
+ let(:http_hostname_port_path) { ::LogStash::Util::SafeURI.new("http://#{hostname_port}/path") }
42
+
43
+ shared_examples("proper host handling") do
44
+ it "should properly transform a host:port string to a URL" do
45
+ expect(subject.host_to_url(hostname_port_uri).to_s).to eq(http_hostname_port.to_s + "/")
46
+ end
47
+
48
+ it "should not raise an error with a / for a path" do
49
+ expect(subject.host_to_url(::LogStash::Util::SafeURI.new("#{http_hostname_port}/"))).to eq(LogStash::Util::SafeURI.new("#{http_hostname_port}/"))
50
+ end
51
+
52
+ it "should parse full URLs correctly" do
53
+ expect(subject.host_to_url(http_hostname_port).to_s).to eq(http_hostname_port.to_s + "/")
54
+ end
55
+
56
+ describe "ssl" do
57
+ context "when SSL is true" do
58
+ let(:ssl) { true }
59
+ let(:base_options) { super().merge(:hosts => [http_hostname_port]) }
60
+
61
+ it "should refuse to handle an http url" do
62
+ expect {
63
+ subject.host_to_url(http_hostname_port)
64
+ }.to raise_error(LogStash::ConfigurationError)
65
+ end
66
+ end
67
+
68
+ context "when SSL is false" do
69
+ let(:ssl) { false }
70
+ let(:base_options) { super().merge(:hosts => [https_hostname_port]) }
71
+
72
+ it "should refuse to handle an https url" do
73
+ expect {
74
+ subject.host_to_url(https_hostname_port)
75
+ }.to raise_error(LogStash::ConfigurationError)
76
+ end
77
+ end
78
+
79
+ describe "ssl is nil" do
80
+ let(:base_options) { super().merge(:hosts => [https_hostname_port]) }
81
+ it "should handle an ssl url correctly when SSL is nil" do
82
+ subject
83
+ expect(subject.host_to_url(https_hostname_port).to_s).to eq(https_hostname_port.to_s + "/")
84
+ end
85
+ end
86
+ end
87
+
88
+ describe "path" do
89
+ let(:url) { http_hostname_port_path }
90
+ let(:base_options) { super().merge(:hosts => [url]) }
91
+
92
+ it "should allow paths in a url" do
93
+ expect(subject.host_to_url(url)).to eq(url)
94
+ end
95
+
96
+ context "with the path option set" do
97
+ let(:base_options) { super().merge(:client_settings => {:path => "/otherpath"}) }
98
+
99
+ it "should not allow paths in two places" do
100
+ expect {
101
+ subject.host_to_url(url)
102
+ }.to raise_error(LogStash::ConfigurationError)
103
+ end
104
+ end
105
+
106
+ context "with a path missing a leading /" do
107
+ let(:url) { http_hostname_port }
108
+ let(:base_options) { super().merge(:client_settings => {:path => "otherpath"}) }
109
+
110
+
111
+ it "should automatically insert a / in front of path overlays" do
112
+ expected = url.clone
113
+ expected.path = url.path + "/otherpath"
114
+ expect(subject.host_to_url(url)).to eq(expected)
115
+ end
116
+ end
117
+ end
118
+ end
119
+
120
+ describe "an regular hostname" do
121
+ let(:hostname) { true_hostname }
122
+ include_examples("proper host handling")
123
+ end
124
+
125
+ describe "an ipv4 host" do
126
+ let(:hostname) { ipv4_hostname }
127
+ include_examples("proper host handling")
128
+ end
129
+
130
+ describe "an ipv6 host" do
131
+ let(:hostname) { ipv6_hostname }
132
+ include_examples("proper host handling")
133
+ end
134
+ end
135
+
136
+ describe "get" do
137
+ subject { described_class.new(base_options) }
138
+ let(:body) { "foobar" }
139
+ let(:path) { "/hello-id" }
140
+ let(:get_response) {
141
+ double("response", :body => LogStash::Json::dump( { "body" => body }))
142
+ }
143
+
144
+ it "returns the hash response" do
145
+ expect(subject.pool).to receive(:get).with(path, nil).and_return(get_response)
146
+ expect(subject.get(path)["body"]).to eq(body)
147
+ end
148
+ end
149
+
150
+ describe "index template" do
151
+ subject { described_class.new(base_options) }
152
+ let(:template_name) { "logstash" }
153
+ let(:template) { {} }
154
+ let(:get_response) {
155
+ double("response", :body => {})
156
+ }
157
+
158
+ it "should call index template" do
159
+ expect(subject.pool).to receive(:put).with("_template/#{template_name}", nil, anything).and_return(get_response)
160
+ subject.template_put(template_name, template)
161
+ end
162
+ end
163
+
164
+ describe "join_bulk_responses" do
165
+ subject { described_class.new(base_options) }
166
+
167
+ context "when items key is available" do
168
+ require "json"
169
+ let(:bulk_response) {
170
+ LogStash::Json.load ('[{
171
+ "items": [{
172
+ "delete": {
173
+ "_index": "website",
174
+ "_type": "blog",
175
+ "_id": "123",
176
+ "_version": 2,
177
+ "status": 200,
178
+ "found": true
179
+ }
180
+ }],
181
+ "errors": false
182
+ }]')
183
+ }
184
+ it "should be handled properly" do
185
+ s = subject.send(:join_bulk_responses, bulk_response)
186
+ expect(s["errors"]).to be false
187
+ expect(s["items"].size).to be 1
188
+ end
189
+ end
190
+
191
+ context "when items key is not available" do
192
+ require "json"
193
+ let(:bulk_response) {
194
+ JSON.parse ('[{
195
+ "took": 4,
196
+ "errors": false
197
+ }]')
198
+ }
199
+ it "should be handled properly" do
200
+ s = subject.send(:join_bulk_responses, bulk_response)
201
+ expect(s["errors"]).to be false
202
+ expect(s["items"].size).to be 0
203
+ end
204
+ end
205
+ end
206
+
207
+ describe "#bulk" do
208
+ subject(:http_client) { described_class.new(base_options) }
209
+
210
+ require "json"
211
+ let(:message) { "hey" }
212
+ let(:actions) { [
213
+ ["index", {:_id=>nil, :_index=>"logstash"}, {"message"=> message}],
214
+ ]}
215
+
216
+ [true,false].each do |http_compression_enabled|
217
+ context "with `http_compression => #{http_compression_enabled}`" do
218
+
219
+ let(:base_options) { super().merge(:client_settings => {:http_compression => http_compression_enabled}) }
220
+
221
+ before(:each) do
222
+ if http_compression_enabled
223
+ expect(http_client).to receive(:gzip_writer).at_least(:once).and_call_original
224
+ else
225
+ expect(http_client).to_not receive(:gzip_writer)
226
+ end
227
+ end
228
+
229
+ context "if a message is over TARGET_BULK_BYTES" do
230
+ let(:target_bulk_bytes) { LogStash::Outputs::OpenSearch::TARGET_BULK_BYTES }
231
+ let(:message) { "a" * (target_bulk_bytes + 1) }
232
+
233
+ it "should be handled properly" do
234
+ allow(subject).to receive(:join_bulk_responses)
235
+ expect(subject).to receive(:bulk_send).once do |data|
236
+ if !http_compression_enabled
237
+ expect(data.size).to be > target_bulk_bytes
238
+ else
239
+ expect(Zlib::gunzip(data.string).size).to be > target_bulk_bytes
240
+ end
241
+ end
242
+ s = subject.send(:bulk, actions)
243
+ end
244
+ end
245
+
246
+ context "with two messages" do
247
+ let(:message1) { "hey" }
248
+ let(:message2) { "you" }
249
+ let(:actions) { [
250
+ ["index", {:_id=>nil, :_index=>"logstash"}, {"message"=> message1}],
251
+ ["index", {:_id=>nil, :_index=>"logstash"}, {"message"=> message2}],
252
+ ]}
253
+ it "executes one bulk_send operation" do
254
+ allow(subject).to receive(:join_bulk_responses)
255
+ expect(subject).to receive(:bulk_send).once
256
+ s = subject.send(:bulk, actions)
257
+ end
258
+
259
+ context "if one exceeds TARGET_BULK_BYTES" do
260
+ let(:target_bulk_bytes) { LogStash::Outputs::OpenSearch::TARGET_BULK_BYTES }
261
+ let(:message1) { "a" * (target_bulk_bytes + 1) }
262
+ it "executes two bulk_send operations" do
263
+ allow(subject).to receive(:join_bulk_responses)
264
+ expect(subject).to receive(:bulk_send).twice
265
+ s = subject.send(:bulk, actions)
266
+ end
267
+ end
268
+ end
269
+ end
270
+ end
271
+ end
272
+
273
+ describe "sniffing" do
274
+ let(:client) { LogStash::Outputs::OpenSearch::HttpClient.new(base_options.merge(client_opts)) }
275
+
276
+ context "with sniffing enabled" do
277
+ let(:client_opts) { {:sniffing => true, :sniffing_delay => 1 } }
278
+
279
+ it "should start the sniffer" do
280
+ expect(client.pool.sniffing).to be_truthy
281
+ end
282
+ end
283
+
284
+ context "with sniffing disabled" do
285
+ let(:client_opts) { {:sniffing => false} }
286
+
287
+ it "should not start the sniffer" do
288
+ expect(client.pool.sniffing).to be_falsey
289
+ end
290
+ end
291
+ end
292
+ end