logstash-output-elasticsearch 10.8.6-java → 11.0.3-java

Sign up to get free protection for your applications and to get access to all the features.
Files changed (49) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +17 -0
  3. data/docs/index.asciidoc +132 -22
  4. data/lib/logstash/outputs/elasticsearch.rb +125 -64
  5. data/lib/logstash/outputs/elasticsearch/data_stream_support.rb +233 -0
  6. data/lib/logstash/outputs/elasticsearch/http_client.rb +9 -7
  7. data/lib/logstash/outputs/elasticsearch/http_client/pool.rb +49 -62
  8. data/lib/logstash/outputs/elasticsearch/ilm.rb +13 -45
  9. data/lib/logstash/outputs/elasticsearch/license_checker.rb +26 -23
  10. data/lib/logstash/outputs/elasticsearch/template_manager.rb +4 -6
  11. data/lib/logstash/outputs/elasticsearch/templates/ecs-v1/elasticsearch-8x.json +1 -0
  12. data/lib/logstash/plugin_mixins/elasticsearch/api_configs.rb +157 -153
  13. data/lib/logstash/plugin_mixins/elasticsearch/common.rb +71 -58
  14. data/logstash-output-elasticsearch.gemspec +3 -3
  15. data/spec/es_spec_helper.rb +7 -12
  16. data/spec/fixtures/_nodes/{5x_6x.json → 6x.json} +5 -5
  17. data/spec/integration/outputs/compressed_indexing_spec.rb +47 -46
  18. data/spec/integration/outputs/data_stream_spec.rb +61 -0
  19. data/spec/integration/outputs/delete_spec.rb +49 -51
  20. data/spec/integration/outputs/ilm_spec.rb +236 -248
  21. data/spec/integration/outputs/index_spec.rb +5 -2
  22. data/spec/integration/outputs/index_version_spec.rb +78 -82
  23. data/spec/integration/outputs/ingest_pipeline_spec.rb +58 -58
  24. data/spec/integration/outputs/painless_update_spec.rb +74 -164
  25. data/spec/integration/outputs/parent_spec.rb +67 -75
  26. data/spec/integration/outputs/retry_spec.rb +6 -6
  27. data/spec/integration/outputs/sniffer_spec.rb +15 -54
  28. data/spec/integration/outputs/templates_spec.rb +79 -81
  29. data/spec/integration/outputs/update_spec.rb +99 -101
  30. data/spec/spec_helper.rb +10 -0
  31. data/spec/unit/outputs/elasticsearch/data_stream_support_spec.rb +528 -0
  32. data/spec/unit/outputs/elasticsearch/http_client/manticore_adapter_spec.rb +1 -0
  33. data/spec/unit/outputs/elasticsearch/http_client/pool_spec.rb +36 -29
  34. data/spec/unit/outputs/elasticsearch/http_client_spec.rb +2 -3
  35. data/spec/unit/outputs/elasticsearch/template_manager_spec.rb +10 -12
  36. data/spec/unit/outputs/elasticsearch_proxy_spec.rb +1 -2
  37. data/spec/unit/outputs/elasticsearch_spec.rb +176 -41
  38. data/spec/unit/outputs/elasticsearch_ssl_spec.rb +1 -2
  39. data/spec/unit/outputs/error_whitelist_spec.rb +3 -2
  40. data/spec/unit/outputs/license_check_spec.rb +0 -16
  41. metadata +29 -36
  42. data/lib/logstash/outputs/elasticsearch/templates/ecs-disabled/elasticsearch-2x.json +0 -95
  43. data/lib/logstash/outputs/elasticsearch/templates/ecs-disabled/elasticsearch-5x.json +0 -46
  44. data/spec/fixtures/_nodes/2x_1x.json +0 -27
  45. data/spec/fixtures/scripts/groovy/scripted_update.groovy +0 -2
  46. data/spec/fixtures/scripts/groovy/scripted_update_nested.groovy +0 -2
  47. data/spec/fixtures/scripts/groovy/scripted_upsert.groovy +0 -2
  48. data/spec/integration/outputs/groovy_update_spec.rb +0 -150
  49. data/spec/integration/outputs/templates_5x_spec.rb +0 -98
@@ -5,7 +5,7 @@ module LogStash; module PluginMixins; module ElasticSearch
5
5
 
6
6
  # This module defines common methods that can be reused by alternate elasticsearch output plugins such as the elasticsearch_data_streams output.
7
7
 
8
- attr_reader :client, :hosts
8
+ attr_reader :hosts
9
9
 
10
10
  # These codes apply to documents, not at the request level
11
11
  DOC_DLQ_CODES = [400, 404]
@@ -31,7 +31,7 @@ module LogStash; module PluginMixins; module ElasticSearch
31
31
  if @proxy.eql?('')
32
32
  @logger.warn "Supplied proxy setting (proxy => '') has no effect"
33
33
  end
34
- @client ||= ::LogStash::Outputs::ElasticSearch::HttpClientBuilder.build(@logger, @hosts, params)
34
+ ::LogStash::Outputs::ElasticSearch::HttpClientBuilder.build(@logger, @hosts, params)
35
35
  end
36
36
 
37
37
  def validate_authentication
@@ -115,6 +115,15 @@ module LogStash; module PluginMixins; module ElasticSearch
115
115
  end
116
116
  private :parse_user_password_from_cloud_auth
117
117
 
118
+ # Plugin initialization extension point (after a successful ES connection).
119
+ def finish_register
120
+ end
121
+ protected :finish_register
122
+
123
+ def last_es_version
124
+ client.last_es_version
125
+ end
126
+
118
127
  def maximum_seen_major_version
119
128
  client.maximum_seen_major_version
120
129
  end
@@ -126,25 +135,24 @@ module LogStash; module PluginMixins; module ElasticSearch
126
135
  # launch a thread that waits for an initial successful connection to the ES cluster to call the given block
127
136
  # @param block [Proc] the block to execute upon initial successful connection
128
137
  # @return [Thread] the successful connection wait thread
129
- def setup_after_successful_connection(&block)
138
+ def after_successful_connection(&block)
130
139
  Thread.new do
131
140
  sleep_interval = @retry_initial_interval
132
141
  until successful_connection? || @stopping.true?
133
- @logger.debug("Waiting for connectivity to Elasticsearch cluster. Retrying in #{sleep_interval}s")
134
- Stud.stoppable_sleep(sleep_interval) { @stopping.true? }
135
- sleep_interval = next_sleep_interval(sleep_interval)
142
+ @logger.debug("Waiting for connectivity to Elasticsearch cluster, retrying in #{sleep_interval}s")
143
+ sleep_interval = sleep_for_interval(sleep_interval)
136
144
  end
137
145
  block.call if successful_connection?
138
146
  end
139
147
  end
148
+ private :after_successful_connection
140
149
 
141
150
  def discover_cluster_uuid
142
151
  return unless defined?(plugin_metadata)
143
152
  cluster_info = client.get('/')
144
153
  plugin_metadata.set(:cluster_uuid, cluster_info['cluster_uuid'])
145
154
  rescue => e
146
- # TODO introducing this logging message breaks many tests that need refactoring
147
- # @logger.error("Unable to retrieve elasticsearch cluster uuid", error => e.message)
155
+ @logger.error("Unable to retrieve Elasticsearch cluster uuid", message: e.message, exception: e.class, backtrace: e.backtrace)
148
156
  end
149
157
 
150
158
  def retrying_submit(actions)
@@ -159,13 +167,11 @@ module LogStash; module PluginMixins; module ElasticSearch
159
167
  begin
160
168
  submit_actions = submit(submit_actions)
161
169
  if submit_actions && submit_actions.size > 0
162
- @logger.info("Retrying individual bulk actions that failed or were rejected by the previous bulk request.", :count => submit_actions.size)
170
+ @logger.info("Retrying individual bulk actions that failed or were rejected by the previous bulk request", count: submit_actions.size)
163
171
  end
164
172
  rescue => e
165
- @logger.error("Encountered an unexpected error submitting a bulk request! Will retry.",
166
- :error_message => e.message,
167
- :class => e.class.name,
168
- :backtrace => e.backtrace)
173
+ @logger.error("Encountered an unexpected error submitting a bulk request, will retry",
174
+ message: e.message, exception: e.class, backtrace: e.backtrace)
169
175
  end
170
176
 
171
177
  # Everything was a success!
@@ -173,21 +179,42 @@ module LogStash; module PluginMixins; module ElasticSearch
173
179
 
174
180
  # If we're retrying the action sleep for the recommended interval
175
181
  # Double the interval for the next time through to achieve exponential backoff
176
- Stud.stoppable_sleep(sleep_interval) { @stopping.true? }
177
- sleep_interval = next_sleep_interval(sleep_interval)
182
+ sleep_interval = sleep_for_interval(sleep_interval)
178
183
  end
179
184
  end
180
185
 
181
186
  def sleep_for_interval(sleep_interval)
182
- Stud.stoppable_sleep(sleep_interval) { @stopping.true? }
187
+ stoppable_sleep(sleep_interval)
183
188
  next_sleep_interval(sleep_interval)
184
189
  end
185
190
 
191
+ def stoppable_sleep(interval)
192
+ Stud.stoppable_sleep(interval) { @stopping.true? }
193
+ end
194
+
186
195
  def next_sleep_interval(current_interval)
187
196
  doubled = current_interval * 2
188
197
  doubled > @retry_max_interval ? @retry_max_interval : doubled
189
198
  end
190
199
 
200
+ def handle_dlq_status(message, action, status, response)
201
+ # To support bwc, we check if DLQ exists. otherwise we log and drop event (previous behavior)
202
+ if @dlq_writer
203
+ event, action = action.event, [action[0], action[1], action[2]]
204
+ # TODO: Change this to send a map with { :status => status, :action => action } in the future
205
+ @dlq_writer.write(event, "#{message} status: #{status}, action: #{action}, response: #{response}")
206
+ else
207
+ if dig_value(response, 'index', 'error', 'type') == 'invalid_index_name_exception'
208
+ level = :error
209
+ else
210
+ level = :warn
211
+ end
212
+ @logger.send level, message, status: status, action: action, response: response
213
+ end
214
+ end
215
+
216
+ private
217
+
191
218
  def submit(actions)
192
219
  bulk_response = safe_bulk(actions)
193
220
 
@@ -217,7 +244,7 @@ module LogStash; module PluginMixins; module ElasticSearch
217
244
  action_type, action_props = response.first
218
245
 
219
246
  status = action_props["status"]
220
- failure = action_props["error"]
247
+ error = action_props["error"]
221
248
  action = actions[idx]
222
249
  action_params = action[1]
223
250
 
@@ -230,7 +257,7 @@ module LogStash; module PluginMixins; module ElasticSearch
230
257
  next
231
258
  elsif DOC_CONFLICT_CODE == status
232
259
  @document_level_metrics.increment(:non_retryable_failures)
233
- @logger.warn "Failed action.", status: status, action: action, response: response if !failure_type_logging_whitelist.include?(failure["type"])
260
+ @logger.warn "Failed action", status: status, action: action, response: response if log_failure_type?(error)
234
261
  next
235
262
  elsif DOC_DLQ_CODES.include?(status)
236
263
  handle_dlq_status("Could not index event to Elasticsearch.", action, status, response)
@@ -239,7 +266,7 @@ module LogStash; module PluginMixins; module ElasticSearch
239
266
  else
240
267
  # only log what the user whitelisted
241
268
  @document_level_metrics.increment(:retryable_failures)
242
- @logger.info "retrying failed action with response code: #{status} (#{failure})" if !failure_type_logging_whitelist.include?(failure["type"])
269
+ @logger.info "Retrying failed action", status: status, action: action, error: error if log_failure_type?(error)
243
270
  actions_to_retry << action
244
271
  end
245
272
  end
@@ -247,40 +274,25 @@ module LogStash; module PluginMixins; module ElasticSearch
247
274
  actions_to_retry
248
275
  end
249
276
 
250
- def handle_dlq_status(message, action, status, response)
251
- # To support bwc, we check if DLQ exists. otherwise we log and drop event (previous behavior)
252
- if @dlq_writer
253
- # TODO: Change this to send a map with { :status => status, :action => action } in the future
254
- @dlq_writer.write(action[2], "#{message} status: #{status}, action: #{action}, response: #{response}")
255
- else
256
- error_type = response.fetch('index', {}).fetch('error', {})['type']
257
- if 'invalid_index_name_exception' == error_type
258
- level = :error
259
- else
260
- level = :warn
261
- end
262
- @logger.send level, message, status: status, action: action, response: response
263
- end
277
+ def log_failure_type?(failure)
278
+ !failure_type_logging_whitelist.include?(failure["type"])
264
279
  end
265
280
 
266
281
  # Rescue retryable errors during bulk submission
282
+ # @param actions a [action, params, event.to_hash] tuple
283
+ # @return response [Hash] which contains 'errors' and processed 'items' entries
267
284
  def safe_bulk(actions)
268
285
  sleep_interval = @retry_initial_interval
269
286
  begin
270
- es_actions = actions.map {|action_type, params, event| [action_type, params, event.to_hash]}
271
- response = @client.bulk(es_actions)
272
- response
287
+ @client.bulk(actions) # returns { 'errors': ..., 'items': ... }
273
288
  rescue ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::HostUnreachableError => e
274
289
  # If we can't even connect to the server let's just print out the URL (:hosts is actually a URL)
275
290
  # and let the user sort it out from there
276
291
  @logger.error(
277
- "Attempted to send a bulk request to elasticsearch'"+
278
- " but Elasticsearch appears to be unreachable or down!",
279
- :error_message => e.message,
280
- :class => e.class.name,
281
- :will_retry_in_seconds => sleep_interval
292
+ "Attempted to send a bulk request but Elasticsearch appears to be unreachable or down",
293
+ message: e.message, exception: e.class, will_retry_in_seconds: sleep_interval
282
294
  )
283
- @logger.debug("Failed actions for last bad bulk request!", :actions => actions)
295
+ @logger.debug? && @logger.debug("Failed actions for last bad bulk request", :actions => actions)
284
296
 
285
297
  # We retry until there are no errors! Errors should all go to the retry queue
286
298
  sleep_interval = sleep_for_interval(sleep_interval)
@@ -288,20 +300,19 @@ module LogStash; module PluginMixins; module ElasticSearch
288
300
  retry unless @stopping.true?
289
301
  rescue ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::NoConnectionAvailableError => e
290
302
  @logger.error(
291
- "Attempted to send a bulk request to elasticsearch, but no there are no living connections in the connection pool. Perhaps Elasticsearch is unreachable or down?",
292
- :error_message => e.message,
293
- :class => e.class.name,
294
- :will_retry_in_seconds => sleep_interval
303
+ "Attempted to send a bulk request but there are no living connections in the pool " +
304
+ "(perhaps Elasticsearch is unreachable or down?)",
305
+ message: e.message, exception: e.class, will_retry_in_seconds: sleep_interval
295
306
  )
296
- Stud.stoppable_sleep(sleep_interval) { @stopping.true? }
297
- sleep_interval = next_sleep_interval(sleep_interval)
307
+
308
+ sleep_interval = sleep_for_interval(sleep_interval)
298
309
  @bulk_request_metrics.increment(:failures)
299
310
  retry unless @stopping.true?
300
311
  rescue ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError => e
301
312
  @bulk_request_metrics.increment(:failures)
302
313
  log_hash = {:code => e.response_code, :url => e.url.sanitized.to_s, :content_length => e.request_body.bytesize}
303
314
  log_hash[:body] = e.response_body if @logger.debug? # Generally this is too verbose
304
- message = "Encountered a retryable error. Will Retry with exponential backoff "
315
+ message = "Encountered a retryable error (will retry with exponential backoff)"
305
316
 
306
317
  # We treat 429s as a special case because these really aren't errors, but
307
318
  # rather just ES telling us to back off a bit, which we do.
@@ -315,17 +326,12 @@ module LogStash; module PluginMixins; module ElasticSearch
315
326
 
316
327
  sleep_interval = sleep_for_interval(sleep_interval)
317
328
  retry
318
- rescue => e
319
- # Stuff that should never happen
320
- # For all other errors print out full connection issues
329
+ rescue => e # Stuff that should never happen - print out full connection issues
321
330
  @logger.error(
322
- "An unknown error occurred sending a bulk request to Elasticsearch. We will retry indefinitely",
323
- :error_message => e.message,
324
- :error_class => e.class.name,
325
- :backtrace => e.backtrace
331
+ "An unknown error occurred sending a bulk request to Elasticsearch (will retry indefinitely)",
332
+ message: e.message, exception: e.class, backtrace: e.backtrace
326
333
  )
327
-
328
- @logger.debug("Failed actions for last bad bulk request!", :actions => actions)
334
+ @logger.debug? && @logger.debug("Failed actions for last bad bulk request", :actions => actions)
329
335
 
330
336
  sleep_interval = sleep_for_interval(sleep_interval)
331
337
  @bulk_request_metrics.increment(:failures)
@@ -339,5 +345,12 @@ module LogStash; module PluginMixins; module ElasticSearch
339
345
  respond_to?(:execution_context) && execution_context.respond_to?(:dlq_writer) &&
340
346
  !execution_context.dlq_writer.inner_writer.is_a?(::LogStash::Util::DummyDeadLetterQueueWriter)
341
347
  end
348
+
349
+ def dig_value(val, first_key, *rest_keys)
350
+ fail(TypeError, "cannot dig value from #{val.class}") unless val.kind_of?(Hash)
351
+ val = val[first_key]
352
+ return val if rest_keys.empty? || val == nil
353
+ dig_value(val, *rest_keys)
354
+ end
342
355
  end
343
356
  end; end; end
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-output-elasticsearch'
3
- s.version = '10.8.6'
3
+ s.version = '11.0.3'
4
4
 
5
5
  s.licenses = ['apache-2.0']
6
6
  s.summary = "Stores logs in Elasticsearch"
@@ -21,15 +21,15 @@ Gem::Specification.new do |s|
21
21
  # Special flag to let us know this is actually a logstash plugin
22
22
  s.metadata = { "logstash_plugin" => "true", "logstash_group" => "output" }
23
23
 
24
- s.add_runtime_dependency "manticore", '>= 0.5.4', '< 1.0.0'
24
+ s.add_runtime_dependency "manticore", '>= 0.7.1', '< 1.0.0'
25
25
  s.add_runtime_dependency 'stud', ['>= 0.0.17', '~> 0.0']
26
- s.add_runtime_dependency 'cabin', ['~> 0.6']
27
26
  s.add_runtime_dependency "logstash-core-plugin-api", ">= 1.60", "<= 2.99"
28
27
  s.add_runtime_dependency 'logstash-mixin-ecs_compatibility_support', '~>1.0'
29
28
 
30
29
  s.add_development_dependency 'logstash-codec-plain'
31
30
  s.add_development_dependency 'logstash-devutils'
32
31
  s.add_development_dependency 'flores'
32
+ s.add_development_dependency 'cabin', ['~> 0.6']
33
33
  # Still used in some specs, we should remove this ASAP
34
34
  s.add_development_dependency 'elasticsearch'
35
35
  end
@@ -1,5 +1,5 @@
1
- require "logstash/devutils/rspec/spec_helper"
2
- require 'manticore'
1
+ require_relative './spec_helper'
2
+
3
3
  require 'elasticsearch'
4
4
  require_relative "support/elasticsearch/api/actions/delete_ilm_policy"
5
5
  require_relative "support/elasticsearch/api/actions/get_alias"
@@ -8,10 +8,7 @@ require_relative "support/elasticsearch/api/actions/get_ilm_policy"
8
8
  require_relative "support/elasticsearch/api/actions/put_ilm_policy"
9
9
 
10
10
  require 'json'
11
-
12
- unless defined?(LogStash::OSS)
13
- LogStash::OSS = ENV['DISTRIBUTION'] != "default"
14
- end
11
+ require 'cabin'
15
12
 
16
13
  module ESHelper
17
14
  def get_host_port
@@ -23,7 +20,9 @@ module ESHelper
23
20
  end
24
21
 
25
22
  def get_client
26
- Elasticsearch::Client.new(:hosts => [get_host_port])
23
+ Elasticsearch::Client.new(:hosts => [get_host_port]).tap do |client|
24
+ allow(client).to receive(:verify_elasticsearch).and_return(true) # bypass client side version checking
25
+ end
27
26
  end
28
27
 
29
28
  def doc_type
@@ -56,11 +55,7 @@ module ESHelper
56
55
  end
57
56
 
58
57
  def routing_field_name
59
- if ESHelper.es_version_satisfies?(">=6")
60
- :routing
61
- else
62
- :_routing
63
- end
58
+ :routing
64
59
  end
65
60
 
66
61
  def self.es_version
@@ -11,7 +11,7 @@
11
11
  "transport_address" : "http://localhost:9200",
12
12
  "host" : "localhost",
13
13
  "ip" : "127.0.0.1",
14
- "version" : "5.5.1",
14
+ "version" : "6.8.10",
15
15
  "build_hash" : "19c13d0",
16
16
  "roles" : [
17
17
  "master"
@@ -29,7 +29,7 @@
29
29
  "transport_address" : "http://localhost:9201",
30
30
  "host" : "localhost",
31
31
  "ip" : "127.0.0.1",
32
- "version" : "5.5.1",
32
+ "version" : "6.8.10",
33
33
  "build_hash" : "19c13d0",
34
34
  "roles" : [
35
35
  "data"
@@ -47,7 +47,7 @@
47
47
  "transport_address" : "http://localhost:9202",
48
48
  "host" : "localhost",
49
49
  "ip" : "127.0.0.1",
50
- "version" : "5.5.1",
50
+ "version" : "6.8.10",
51
51
  "build_hash" : "19c13d0",
52
52
  "roles" : [
53
53
  "data",
@@ -66,7 +66,7 @@
66
66
  "transport_address" : "http://localhost:9203",
67
67
  "host" : "localhost",
68
68
  "ip" : "127.0.0.1",
69
- "version" : "5.5.1",
69
+ "version" : "6.8.10",
70
70
  "build_hash" : "19c13d0",
71
71
  "roles" : [ ],
72
72
  "http" : {
@@ -78,4 +78,4 @@
78
78
  }
79
79
  }
80
80
  }
81
- }
81
+ }
@@ -8,62 +8,63 @@ RSpec::Matchers.define :a_valid_gzip_encoded_string do
8
8
  }
9
9
  end
10
10
 
11
- if ESHelper.es_version_satisfies?(">= 5")
12
- describe "indexing with http_compression turned on", :integration => true do
13
- let(:event) { LogStash::Event.new("message" => "Hello World!", "type" => type) }
14
- let(:index) { 10.times.collect { rand(10).to_s }.join("") }
15
- let(:type) { ESHelper.es_version_satisfies?("< 7") ? "doc" : "_doc" }
16
- let(:event_count) { 10000 + rand(500) }
17
- let(:events) { event_count.times.map { event }.to_a }
18
- let(:config) {
19
- {
20
- "hosts" => get_host_port,
21
- "index" => index,
22
- "http_compression" => true
23
- }
11
+ describe "indexing with http_compression turned on", :integration => true do
12
+ let(:event) { LogStash::Event.new("message" => "Hello World!", "type" => type) }
13
+ let(:index) { 10.times.collect { rand(10).to_s }.join("") }
14
+ let(:type) { ESHelper.es_version_satisfies?("< 7") ? "doc" : "_doc" }
15
+ let(:event_count) { 10000 + rand(500) }
16
+ let(:events) { event_count.times.map { event }.to_a }
17
+ let(:config) {
18
+ {
19
+ "hosts" => get_host_port,
20
+ "index" => index,
21
+ "http_compression" => true
24
22
  }
25
- subject { LogStash::Outputs::ElasticSearch.new(config) }
23
+ }
24
+ subject { LogStash::Outputs::ElasticSearch.new(config) }
26
25
 
27
- let(:es_url) { "http://#{get_host_port}" }
28
- let(:index_url) {"#{es_url}/#{index}"}
29
- let(:http_client_options) { {} }
30
- let(:http_client) do
31
- Manticore::Client.new(http_client_options)
32
- end
26
+ let(:es_url) { "http://#{get_host_port}" }
27
+ let(:index_url) {"#{es_url}/#{index}"}
28
+ let(:http_client_options) { {} }
29
+ let(:http_client) do
30
+ Manticore::Client.new(http_client_options)
31
+ end
33
32
 
34
- before do
35
- subject.register
36
- subject.multi_receive([])
37
- end
33
+ before do
34
+ subject.register
35
+ subject.multi_receive([])
36
+ end
38
37
 
39
- shared_examples "an indexer" do
40
- it "ships events" do
41
- subject.multi_receive(events)
38
+ shared_examples "an indexer" do
39
+ it "ships events" do
40
+ subject.multi_receive(events)
42
41
 
43
- http_client.post("#{es_url}/_refresh").call
42
+ http_client.post("#{es_url}/_refresh").call
44
43
 
45
- response = http_client.get("#{index_url}/_count?q=*")
46
- result = LogStash::Json.load(response.body)
47
- cur_count = result["count"]
48
- expect(cur_count).to eq(event_count)
44
+ response = http_client.get("#{index_url}/_count?q=*")
45
+ result = LogStash::Json.load(response.body)
46
+ cur_count = result["count"]
47
+ expect(cur_count).to eq(event_count)
49
48
 
50
- response = http_client.get("#{index_url}/_search?q=*&size=1000")
51
- result = LogStash::Json.load(response.body)
52
- result["hits"]["hits"].each do |doc|
53
- expect(doc["_type"]).to eq(type) if ESHelper.es_version_satisfies?(">= 6", "< 8")
54
- expect(doc).not_to include("_type") if ESHelper.es_version_satisfies?(">= 8")
55
- expect(doc["_index"]).to eq(index)
49
+ response = http_client.get("#{index_url}/_search?q=*&size=1000")
50
+ result = LogStash::Json.load(response.body)
51
+ result["hits"]["hits"].each do |doc|
52
+ if ESHelper.es_version_satisfies?("< 8")
53
+ expect(doc["_type"]).to eq(type)
54
+ else
55
+ expect(doc).not_to include("_type")
56
56
  end
57
+ expect(doc["_index"]).to eq(index)
57
58
  end
58
59
  end
60
+ end
59
61
 
60
- it "sets the correct content-encoding header and body is compressed" do
61
- expect(subject.client.pool.adapter.client).to receive(:send).
62
- with(anything, anything, {:headers=>{"Content-Encoding"=>"gzip", "Content-Type"=>"application/json"}, :body => a_valid_gzip_encoded_string}).
63
- and_call_original
64
- subject.multi_receive(events)
65
- end
66
-
67
- it_behaves_like("an indexer")
62
+ it "sets the correct content-encoding header and body is compressed" do
63
+ expect(subject.client.pool.adapter.client).to receive(:send).
64
+ with(anything, anything, {:headers=>{"Content-Encoding"=>"gzip", "Content-Type"=>"application/json"}, :body => a_valid_gzip_encoded_string}).
65
+ and_call_original
66
+ subject.multi_receive(events)
68
67
  end
68
+
69
+ it_behaves_like("an indexer")
69
70
  end