logstash-input-elasticsearch 4.9.3 → 4.12.1

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: dc71917516d5297b151b09c3b26324272ee472fa8ae987fad6b30fa99492d382
4
- data.tar.gz: cd72359f663375ad910dbf02a70d151160e449311bff81c0f77d062c7625c597
3
+ metadata.gz: e2427b28640265b075a0e21240cf410b2e2252d7516ac7bd0955d48087317f7f
4
+ data.tar.gz: dbc7d84f18348e7fa2292d2b68a5db59a5ffda724eb090ff17503e10e3ff130d
5
5
  SHA512:
6
- metadata.gz: ea7bca9aa226958479061efab4172224039d10a93579ef482418ad6955c493c00a45b1372ce6bd61aaba07e6c89bc2e96d1236cfd4be2e326193b52805ca448f
7
- data.tar.gz: 0f08585317a26f8a647be73e4e70b9a421025f1c81bcfa8518c1188cb60a0efea49f131a1e80645bd920c0806bb8c54542da07ab1cab44b540313a062b8b351d
6
+ metadata.gz: dd3f9693c355505fbe5a971a46899ba285e057406db5807d4226b4ace61449f41f20409926d49a5a69c19338020069de13e4a4da7028db9b8f7db6d3ce4e0e6c
7
+ data.tar.gz: 0c170d69801feac7d0df3a79ef4351a6237fa30d5ad3b69e0c8af660981f40a1d814e17d60cebd0e555cdee6613de93c7e407d2cba28f0092f85f5da7446b966
data/CHANGELOG.md CHANGED
@@ -1,3 +1,15 @@
1
+ ## 4.12.1
2
+ - Fixed too_long_frame_exception by passing scroll_id in the body [#159](https://github.com/logstash-plugins/logstash-input-elasticsearch/pull/159)
3
+
4
+ ## 4.12.0
5
+ - Feat: Update Elasticsearch client to 7.14.0 [#157](https://github.com/logstash-plugins/logstash-input-elasticsearch/pull/157)
6
+
7
+ ## 4.11.0
8
+ - Feat: add user-agent header passed to the Elasticsearch HTTP connection [#158](https://github.com/logstash-plugins/logstash-input-elasticsearch/pull/158)
9
+
10
+ ## 4.10.0
11
+ - Feat: added ecs_compatibility + event_factory support [#149](https://github.com/logstash-plugins/logstash-input-elasticsearch/pull/149)
12
+
1
13
  ## 4.9.3
2
14
  - Fixed SSL handshake hang indefinitely with proxy setup [#156](https://github.com/logstash-plugins/logstash-input-elasticsearch/pull/156)
3
15
 
@@ -6,6 +18,7 @@
6
18
  header isn't passed, this leads to the plugin not being able to leverage `user`/`password` credentials set by the user.
7
19
  [#153](https://github.com/logstash-plugins/logstash-input-elasticsearch/pull/153)
8
20
 
21
+
9
22
  ## 4.9.1
10
23
  - [DOC] Replaced hard-coded links with shared attributes [#143](https://github.com/logstash-plugins/logstash-input-elasticsearch/pull/143)
11
24
  - [DOC] Added missing quote to docinfo_fields example [#145](https://github.com/logstash-plugins/logstash-input-elasticsearch/pull/145)
data/docs/index.asciidoc CHANGED
@@ -83,8 +83,18 @@ Authentication to a secure Elasticsearch cluster is possible using _one_ of the
83
83
  Authorization to a secure Elasticsearch cluster requires `read` permission at index level and `monitoring` permissions at cluster level.
84
84
  The `monitoring` permission at cluster level is necessary to perform periodic connectivity checks.
85
85
 
86
+ [id="plugins-{type}s-{plugin}-ecs"]
87
+ ==== Compatibility with the Elastic Common Schema (ECS)
88
+
89
+ When ECS compatibility is disabled, `docinfo_target` uses the `"@metadata"` field as a default, with ECS enabled the plugin
90
+ uses a naming convention `"[@metadata][input][elasticsearch]"` as a default target for placing document information.
91
+
92
+ The plugin logs a warning when ECS is enabled and `target` isn't set.
93
+
94
+ TIP: Set the `target` option to avoid potential schema conflicts.
95
+
86
96
  [id="plugins-{type}s-{plugin}-options"]
87
- ==== Elasticsearch Input Configuration Options
97
+ ==== Elasticsearch Input configuration options
88
98
 
89
99
  This plugin supports the following configuration options plus the <<plugins-{type}s-{plugin}-common-options>> described later.
90
100
 
@@ -99,6 +109,7 @@ This plugin supports the following configuration options plus the <<plugins-{typ
99
109
  | <<plugins-{type}s-{plugin}-docinfo>> |<<boolean,boolean>>|No
100
110
  | <<plugins-{type}s-{plugin}-docinfo_fields>> |<<array,array>>|No
101
111
  | <<plugins-{type}s-{plugin}-docinfo_target>> |<<string,string>>|No
112
+ | <<plugins-{type}s-{plugin}-ecs_compatibility>> |<<string,string>>|No
102
113
  | <<plugins-{type}s-{plugin}-hosts>> |<<array,array>>|No
103
114
  | <<plugins-{type}s-{plugin}-index>> |<<string,string>>|No
104
115
  | <<plugins-{type}s-{plugin}-password>> |<<password,password>>|No
@@ -197,13 +208,14 @@ Example
197
208
  size => 500
198
209
  scroll => "5m"
199
210
  docinfo => true
211
+ docinfo_target => "[@metadata][doc]"
200
212
  }
201
213
  }
202
214
  output {
203
215
  elasticsearch {
204
- index => "copy-of-production.%{[@metadata][_index]}"
205
- document_type => "%{[@metadata][_type]}"
206
- document_id => "%{[@metadata][_id]}"
216
+ index => "copy-of-production.%{[@metadata][doc][_index]}"
217
+ document_type => "%{[@metadata][doc][_type]}"
218
+ document_id => "%{[@metadata][doc][_id]}"
207
219
  }
208
220
  }
209
221
 
@@ -214,8 +226,9 @@ Example
214
226
  input {
215
227
  elasticsearch {
216
228
  docinfo => true
229
+ docinfo_target => "[@metadata][doc]"
217
230
  add_field => {
218
- identifier => "%{[@metadata][_index]}:%{[@metadata][_type]}:%{[@metadata][_id]}"
231
+ identifier => "%{[@metadata][doc][_index]}:%{[@metadata][doc][_type]}:%{[@metadata][doc][_id]}"
219
232
  }
220
233
  }
221
234
  }
@@ -236,11 +249,25 @@ more information.
236
249
  ===== `docinfo_target`
237
250
 
238
251
  * Value type is <<string,string>>
239
- * Default value is `"@metadata"`
252
+ * Default value depends on whether <<plugins-{type}s-{plugin}-ecs_compatibility>> is enabled:
253
+ ** ECS Compatibility disabled: `"@metadata"`
254
+ ** ECS Compatibility enabled: `"[@metadata][input][elasticsearch]"`
255
+
256
+ If document metadata storage is requested by enabling the `docinfo` option,
257
+ this option names the field under which to store the metadata fields as subfields.
258
+
259
+ [id="plugins-{type}s-{plugin}-ecs_compatibility"]
260
+ ===== `ecs_compatibility`
261
+
262
+ * Value type is <<string,string>>
263
+ * Supported values are:
264
+ ** `disabled`: CSV data added at root level
265
+ ** `v1`,`v8`: Elastic Common Schema compliant behavior
266
+ * Default value depends on which version of Logstash is running:
267
+ ** When Logstash provides a `pipeline.ecs_compatibility` setting, its value is used as the default
268
+ ** Otherwise, the default value is `disabled`
240
269
 
241
- If document metadata storage is requested by enabling the `docinfo`
242
- option, this option names the field under which to store the metadata
243
- fields as subfields.
270
+ Controls this plugin's compatibility with the {ecs-ref}[Elastic Common Schema (ECS)].
244
271
 
245
272
  [id="plugins-{type}s-{plugin}-hosts"]
246
273
  ===== `hosts`
@@ -402,4 +429,4 @@ empty string authentication will be disabled.
402
429
  [id="plugins-{type}s-{plugin}-common-options"]
403
430
  include::{include_path}/{type}.asciidoc[]
404
431
 
405
- :default_codec!:
432
+ :no_codec!:
@@ -20,7 +20,17 @@ if es_client_version >= Gem::Version.new('7.2') && es_client_version < Gem::Vers
20
20
  def apply_headers(request_options, options)
21
21
  headers = (options && options[:headers]) || {}
22
22
  headers[CONTENT_TYPE_STR] = find_value(headers, CONTENT_TYPE_REGEX) || DEFAULT_CONTENT_TYPE
23
- headers[USER_AGENT_STR] = find_value(headers, USER_AGENT_REGEX) || user_agent_header
23
+
24
+ # this code is necessary to grab the correct user-agent header
25
+ # when this method is invoked with apply_headers(@request_options, options)
26
+ # from https://github.com/elastic/elasticsearch-ruby/blob/v7.14.0/elasticsearch-transport/lib/elasticsearch/transport/transport/http/manticore.rb#L113-L114
27
+ transport_user_agent = nil
28
+ if (options && options[:transport_options] && options[:transport_options][:headers])
29
+ transport_headers = options[:transport_options][:headers]
30
+ transport_user_agent = find_value(transport_headers, USER_AGENT_REGEX)
31
+ end
32
+
33
+ headers[USER_AGENT_STR] = transport_user_agent || find_value(headers, USER_AGENT_REGEX) || user_agent_header
24
34
  headers[ACCEPT_ENCODING] = GZIP if use_compression?
25
35
  (request_options[:headers] ||= {}).merge!(headers) # this line was changed
26
36
  end
@@ -4,6 +4,9 @@ require "logstash/namespace"
4
4
  require "logstash/json"
5
5
  require "logstash/util/safe_uri"
6
6
  require 'logstash/plugin_mixins/validator_support/field_reference_validation_adapter'
7
+ require 'logstash/plugin_mixins/event_support/event_factory_adapter'
8
+ require 'logstash/plugin_mixins/ecs_compatibility_support'
9
+ require 'logstash/plugin_mixins/ecs_compatibility_support/target_check'
7
10
  require "base64"
8
11
 
9
12
  require "elasticsearch"
@@ -66,12 +69,16 @@ require_relative "elasticsearch/patches/_elasticsearch_transport_connections_sel
66
69
  #
67
70
  #
68
71
  class LogStash::Inputs::Elasticsearch < LogStash::Inputs::Base
72
+
73
+ include LogStash::PluginMixins::ECSCompatibilitySupport(:disabled, :v1, :v8 => :v1)
74
+ include LogStash::PluginMixins::ECSCompatibilitySupport::TargetCheck
75
+
76
+ include LogStash::PluginMixins::EventSupport::EventFactoryAdapter
77
+
69
78
  extend LogStash::PluginMixins::ValidatorSupport::FieldReferenceValidationAdapter
70
79
 
71
80
  config_name "elasticsearch"
72
81
 
73
- default :codec, "json"
74
-
75
82
  # List of elasticsearch hosts to use for querying.
76
83
  # Each host can be either IP, HOST, IP:port or HOST:port.
77
84
  # Port defaults to 9200
@@ -128,8 +135,9 @@ class LogStash::Inputs::Elasticsearch < LogStash::Inputs::Base
128
135
  #
129
136
  config :docinfo, :validate => :boolean, :default => false
130
137
 
131
- # Where to move the Elasticsearch document information. By default we use the @metadata field.
132
- config :docinfo_target, :validate=> :string, :default => LogStash::Event::METADATA
138
+ # Where to move the Elasticsearch document information.
139
+ # default: [@metadata][input][elasticsearch] in ECS mode, @metadata field otherwise
140
+ config :docinfo_target, :validate=> :field_reference
133
141
 
134
142
  # List of document metadata to move to the `docinfo_target` field.
135
143
  # To learn more about Elasticsearch metadata fields read
@@ -184,6 +192,14 @@ class LogStash::Inputs::Elasticsearch < LogStash::Inputs::Base
184
192
  # If set, the _source of each hit will be added nested under the target instead of at the top-level
185
193
  config :target, :validate => :field_reference
186
194
 
195
+ def initialize(params={})
196
+ super(params)
197
+
198
+ if docinfo_target.nil?
199
+ @docinfo_target = ecs_select[disabled: '@metadata', v1: '[@metadata][input][elasticsearch]']
200
+ end
201
+ end
202
+
187
203
  def register
188
204
  require "rufus/scheduler"
189
205
 
@@ -206,6 +222,7 @@ class LogStash::Inputs::Elasticsearch < LogStash::Inputs::Base
206
222
  transport_options = {:headers => {}}
207
223
  transport_options[:headers].merge!(setup_basic_auth(user, password))
208
224
  transport_options[:headers].merge!(setup_api_key(api_key))
225
+ transport_options[:headers].merge!({'user-agent' => prepare_user_agent()})
209
226
  transport_options[:request_timeout] = @request_timeout_seconds unless @request_timeout_seconds.nil?
210
227
  transport_options[:connect_timeout] = @connect_timeout_seconds unless @connect_timeout_seconds.nil?
211
228
  transport_options[:socket_timeout] = @socket_timeout_seconds unless @socket_timeout_seconds.nil?
@@ -223,6 +240,8 @@ class LogStash::Inputs::Elasticsearch < LogStash::Inputs::Base
223
240
  :transport_class => ::Elasticsearch::Transport::Transport::HTTP::Manticore,
224
241
  :ssl => ssl_options
225
242
  )
243
+ test_connection!
244
+ @client
226
245
  end
227
246
 
228
247
 
@@ -297,47 +316,41 @@ class LogStash::Inputs::Elasticsearch < LogStash::Inputs::Base
297
316
  [r['hits']['hits'].any?, r['_scroll_id']]
298
317
  rescue => e
299
318
  # this will typically be triggered by a scroll timeout
300
- logger.error("Scroll request error, aborting scroll", error: e.inspect)
319
+ logger.error("Scroll request error, aborting scroll", message: e.message, exception: e.class)
301
320
  # return no hits and original scroll_id so we can try to clear it
302
321
  [false, scroll_id]
303
322
  end
304
323
 
305
324
  def push_hit(hit, output_queue)
306
- if @target.nil?
307
- event = LogStash::Event.new(hit['_source'])
308
- else
309
- event = LogStash::Event.new
310
- event.set(@target, hit['_source'])
311
- end
312
-
313
- if @docinfo
314
- # do not assume event[@docinfo_target] to be in-place updatable. first get it, update it, then at the end set it in the event.
315
- docinfo_target = event.get(@docinfo_target) || {}
316
-
317
- unless docinfo_target.is_a?(Hash)
318
- @logger.error("Elasticsearch Input: Incompatible Event, incompatible type for the docinfo_target=#{@docinfo_target} field in the `_source` document, expected a hash got:", :docinfo_target_type => docinfo_target.class, :event => event)
325
+ event = targeted_event_factory.new_event hit['_source']
326
+ set_docinfo_fields(hit, event) if @docinfo
327
+ decorate(event)
328
+ output_queue << event
329
+ end
319
330
 
320
- # TODO: (colin) I am not sure raising is a good strategy here?
321
- raise Exception.new("Elasticsearch input: incompatible event")
322
- end
331
+ def set_docinfo_fields(hit, event)
332
+ # do not assume event[@docinfo_target] to be in-place updatable. first get it, update it, then at the end set it in the event.
333
+ docinfo_target = event.get(@docinfo_target) || {}
323
334
 
324
- @docinfo_fields.each do |field|
325
- docinfo_target[field] = hit[field]
326
- end
335
+ unless docinfo_target.is_a?(Hash)
336
+ @logger.error("Incompatible Event, incompatible type for the docinfo_target=#{@docinfo_target} field in the `_source` document, expected a hash got:", :docinfo_target_type => docinfo_target.class, :event => event.to_hash_with_metadata)
327
337
 
328
- event.set(@docinfo_target, docinfo_target)
338
+ # TODO: (colin) I am not sure raising is a good strategy here?
339
+ raise Exception.new("Elasticsearch input: incompatible event")
329
340
  end
330
341
 
331
- decorate(event)
342
+ @docinfo_fields.each do |field|
343
+ docinfo_target[field] = hit[field]
344
+ end
332
345
 
333
- output_queue << event
346
+ event.set(@docinfo_target, docinfo_target)
334
347
  end
335
348
 
336
349
  def clear_scroll(scroll_id)
337
- @client.clear_scroll(scroll_id: scroll_id) if scroll_id
350
+ @client.clear_scroll(:body => { :scroll_id => scroll_id }) if scroll_id
338
351
  rescue => e
339
352
  # ignore & log any clear_scroll errors
340
- logger.warn("Ignoring clear_scroll exception", message: e.message)
353
+ logger.warn("Ignoring clear_scroll exception", message: e.message, exception: e.class)
341
354
  end
342
355
 
343
356
  def scroll_request scroll_id
@@ -397,6 +410,18 @@ class LogStash::Inputs::Elasticsearch < LogStash::Inputs::Base
397
410
  { 'Authorization' => "ApiKey #{token}" }
398
411
  end
399
412
 
413
+ def prepare_user_agent
414
+ os_name = java.lang.System.getProperty('os.name')
415
+ os_version = java.lang.System.getProperty('os.version')
416
+ os_arch = java.lang.System.getProperty('os.arch')
417
+ jvm_vendor = java.lang.System.getProperty('java.vendor')
418
+ jvm_version = java.lang.System.getProperty('java.version')
419
+
420
+ plugin_version = Gem.loaded_specs["logstash-input-elasticsearch"].version
421
+ # example: logstash/7.14.1 (OS=Linux-5.4.0-84-generic-amd64; JVM=AdoptOpenJDK-11.0.11) logstash-input-elasticsearch/4.10.0
422
+ "logstash/#{LOGSTASH_VERSION} (OS=#{os_name}-#{os_version}-#{os_arch}; JVM=#{jvm_vendor}-#{jvm_version}) logstash-#{@plugin_type}-#{config_name}/#{plugin_version}"
423
+ end
424
+
400
425
  def fill_user_password_from_cloud_auth
401
426
  return unless @cloud_auth
402
427
 
@@ -447,6 +472,15 @@ class LogStash::Inputs::Elasticsearch < LogStash::Inputs::Base
447
472
  [ cloud_auth.username, cloud_auth.password ]
448
473
  end
449
474
 
475
+ # @private used by unit specs
476
+ attr_reader :client
477
+
478
+ def test_connection!
479
+ @client.ping
480
+ rescue Elasticsearch::UnsupportedProductError
481
+ raise LogStash::ConfigurationError, "Could not connect to a compatible version of Elasticsearch"
482
+ end
483
+
450
484
  module URIOrEmptyValidator
451
485
  ##
452
486
  # @override to provide :uri_or_empty validator
@@ -1,7 +1,7 @@
1
1
  Gem::Specification.new do |s|
2
2
 
3
3
  s.name = 'logstash-input-elasticsearch'
4
- s.version = '4.9.3'
4
+ s.version = '4.12.1'
5
5
  s.licenses = ['Apache License (2.0)']
6
6
  s.summary = "Reads query results from an Elasticsearch cluster"
7
7
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
@@ -20,18 +20,22 @@ Gem::Specification.new do |s|
20
20
  s.metadata = { "logstash_plugin" => "true", "logstash_group" => "input" }
21
21
 
22
22
  # Gem dependencies
23
- s.add_runtime_dependency "logstash-mixin-validator_support", '~> 1.0'
24
23
  s.add_runtime_dependency "logstash-core-plugin-api", ">= 1.60", "<= 2.99"
24
+ s.add_runtime_dependency 'logstash-mixin-ecs_compatibility_support', '~> 1.3'
25
+ s.add_runtime_dependency 'logstash-mixin-event_support', '~> 1.0'
26
+ s.add_runtime_dependency "logstash-mixin-validator_support", '~> 1.0'
25
27
 
26
- s.add_runtime_dependency 'elasticsearch', '>= 5.0.5' # LS >= 6.7 and < 7.14 all used version 5.0.5
28
+ s.add_runtime_dependency 'elasticsearch', '>= 7.14.0' # LS >= 6.7 and < 7.14 all used version 5.0.5
27
29
 
28
- s.add_runtime_dependency 'logstash-codec-json'
29
- s.add_runtime_dependency 'sequel'
30
30
  s.add_runtime_dependency 'tzinfo'
31
31
  s.add_runtime_dependency 'tzinfo-data'
32
32
  s.add_runtime_dependency 'rufus-scheduler'
33
33
  s.add_runtime_dependency 'manticore', ">= 0.7.1"
34
- s.add_development_dependency 'faraday', "~> 0.15.4"
34
+
35
+ s.add_development_dependency 'logstash-codec-plain'
36
+ s.add_development_dependency 'faraday', "~> 1"
35
37
  s.add_development_dependency 'logstash-devutils'
36
38
  s.add_development_dependency 'timecop'
39
+ s.add_development_dependency 'cabin', ['~> 0.6']
40
+ s.add_development_dependency 'webrick'
37
41
  end
@@ -7,16 +7,45 @@ require "timecop"
7
7
  require "stud/temporary"
8
8
  require "time"
9
9
  require "date"
10
+ require "cabin"
11
+ require "webrick"
12
+ require "uri"
10
13
 
11
- class LogStash::Inputs::TestableElasticsearch < LogStash::Inputs::Elasticsearch
12
- attr_reader :client
13
- end
14
+ require 'logstash/plugin_mixins/ecs_compatibility_support/spec_helper'
14
15
 
15
- describe LogStash::Inputs::TestableElasticsearch do
16
+ describe LogStash::Inputs::Elasticsearch, :ecs_compatibility_support do
16
17
 
17
- let(:plugin) { LogStash::Inputs::TestableElasticsearch.new(config) }
18
+ let(:plugin) { described_class.new(config) }
18
19
  let(:queue) { Queue.new }
19
20
 
21
+ before(:each) do
22
+ Elasticsearch::Client.send(:define_method, :ping) { } # define no-action ping method
23
+ end
24
+
25
+ context "register" do
26
+ let(:config) do
27
+ {
28
+ "schedule" => "* * * * * UTC"
29
+ }
30
+ end
31
+
32
+ context "against authentic Elasticsearch" do
33
+ it "should not raise an exception" do
34
+ expect { plugin.register }.to_not raise_error
35
+ end
36
+ end
37
+
38
+ context "against not authentic Elasticsearch" do
39
+ before(:each) do
40
+ Elasticsearch::Client.send(:define_method, :ping) { raise Elasticsearch::UnsupportedProductError.new("Fake error") } # define error ping method
41
+ end
42
+
43
+ it "should raise ConfigurationError" do
44
+ expect { plugin.register }.to raise_error(LogStash::ConfigurationError)
45
+ end
46
+ end
47
+ end
48
+
20
49
  it_behaves_like "an interruptible input plugin" do
21
50
  let(:esclient) { double("elasticsearch-client") }
22
51
  let(:config) do
@@ -37,10 +66,17 @@ describe LogStash::Inputs::TestableElasticsearch do
37
66
  allow(esclient).to receive(:search) { { "hits" => { "hits" => [hit] } } }
38
67
  allow(esclient).to receive(:scroll) { { "hits" => { "hits" => [hit] } } }
39
68
  allow(esclient).to receive(:clear_scroll).and_return(nil)
69
+ allow(esclient).to receive(:ping)
40
70
  end
41
71
  end
42
72
 
43
- context 'creating events from Elasticsearch' do
73
+
74
+ ecs_compatibility_matrix(:disabled, :v1, :v8) do |ecs_select|
75
+
76
+ before(:each) do
77
+ allow_any_instance_of(described_class).to receive(:ecs_compatibility).and_return(ecs_compatibility)
78
+ end
79
+
44
80
  let(:config) do
45
81
  %q[
46
82
  input {
@@ -89,6 +125,7 @@ describe LogStash::Inputs::TestableElasticsearch do
89
125
  expect(client).to receive(:search).with(any_args).and_return(mock_response)
90
126
  expect(client).to receive(:scroll).with({ :body => { :scroll_id => "cXVlcnlUaGVuRmV0Y2g" }, :scroll=> "1m" }).and_return(mock_scroll_response)
91
127
  expect(client).to receive(:clear_scroll).and_return(nil)
128
+ expect(client).to receive(:ping)
92
129
  end
93
130
 
94
131
  it 'creates the events from the hits' do
@@ -97,7 +134,6 @@ describe LogStash::Inputs::TestableElasticsearch do
97
134
  end
98
135
 
99
136
  expect(event).to be_a(LogStash::Event)
100
- puts event.to_hash_with_metadata
101
137
  expect(event.get("message")).to eql [ "ohayo" ]
102
138
  end
103
139
 
@@ -120,10 +156,10 @@ describe LogStash::Inputs::TestableElasticsearch do
120
156
  end
121
157
 
122
158
  expect(event).to be_a(LogStash::Event)
123
- puts event.to_hash_with_metadata
124
159
  expect(event.get("[@metadata][_source][message]")).to eql [ "ohayo" ]
125
160
  end
126
161
  end
162
+
127
163
  end
128
164
 
129
165
  # This spec is an adapter-spec, ensuring that we send the right sequence of messages to our Elasticsearch Client
@@ -135,6 +171,7 @@ describe LogStash::Inputs::TestableElasticsearch do
135
171
  'query' => "#{LogStash::Json.dump(query)}",
136
172
  'slices' => slices,
137
173
  'docinfo' => true, # include ids
174
+ 'docinfo_target' => '[@metadata]'
138
175
  }
139
176
  end
140
177
  let(:query) do
@@ -304,6 +341,7 @@ describe LogStash::Inputs::TestableElasticsearch do
304
341
  expect(client).to receive(:search).with(hash_including(:body => slice0_query)).and_return(slice0_response0)
305
342
  expect(client).to receive(:scroll).with(hash_including(:body => { :scroll_id => slice0_scroll1 })).and_return(slice0_response1)
306
343
  expect(client).to receive(:scroll).with(hash_including(:body => { :scroll_id => slice0_scroll2 })).and_return(slice0_response2)
344
+ allow(client).to receive(:ping)
307
345
 
308
346
  # SLICE1 is a two-page scroll in which the last page has no next scroll id
309
347
  slice1_query = LogStash::Json.dump(query.merge('slice' => { 'id' => 1, 'max' => 2}))
@@ -403,129 +441,143 @@ describe LogStash::Inputs::TestableElasticsearch do
403
441
  expect(client).to receive(:search).with(any_args).and_return(response)
404
442
  allow(client).to receive(:scroll).with({ :body => {:scroll_id => "cXVlcnlUaGVuRmV0Y2g"}, :scroll => "1m" }).and_return(scroll_reponse)
405
443
  allow(client).to receive(:clear_scroll).and_return(nil)
444
+ allow(client).to receive(:ping).and_return(nil)
406
445
  end
407
446
 
408
- context 'when defining docinfo' do
409
- let(:config_metadata) do
410
- %q[
411
- input {
412
- elasticsearch {
413
- hosts => ["localhost"]
414
- query => '{ "query": { "match": { "city_name": "Okinawa" } }, "fields": ["message"] }'
415
- docinfo => true
416
- }
417
- }
418
- ]
419
- end
447
+ ecs_compatibility_matrix(:disabled, :v1, :v8) do |ecs_select|
420
448
 
421
- it 'merges the values if the `docinfo_target` already exist in the `_source` document' do
422
- config_metadata_with_hash = %Q[
423
- input {
424
- elasticsearch {
425
- hosts => ["localhost"]
426
- query => '{ "query": { "match": { "city_name": "Okinawa" } }, "fields": ["message"] }'
427
- docinfo => true
428
- docinfo_target => 'metadata_with_hash'
449
+ before(:each) do
450
+ allow_any_instance_of(described_class).to receive(:ecs_compatibility).and_return(ecs_compatibility)
451
+ end
452
+
453
+ context 'with docinfo enabled' do
454
+ let(:config_metadata) do
455
+ %q[
456
+ input {
457
+ elasticsearch {
458
+ hosts => ["localhost"]
459
+ query => '{ "query": { "match": { "city_name": "Okinawa" } }, "fields": ["message"] }'
460
+ docinfo => true
461
+ }
429
462
  }
430
- }
431
- ]
432
-
433
- event = input(config_metadata_with_hash) do |pipeline, queue|
434
- queue.pop
463
+ ]
435
464
  end
436
465
 
437
- expect(event.get("[metadata_with_hash][_index]")).to eq('logstash-2014.10.12')
438
- expect(event.get("[metadata_with_hash][_type]")).to eq('logs')
439
- expect(event.get("[metadata_with_hash][_id]")).to eq('C5b2xLQwTZa76jBmHIbwHQ')
440
- expect(event.get("[metadata_with_hash][awesome]")).to eq("logstash")
441
- end
442
-
443
- context 'if the `docinfo_target` exist but is not of type hash' do
444
- let (:config) { {
445
- "hosts" => ["localhost"],
446
- "query" => '{ "query": { "match": { "city_name": "Okinawa" } }, "fields": ["message"] }',
447
- "docinfo" => true,
448
- "docinfo_target" => 'metadata_with_string'
449
- } }
450
- it 'thows an exception if the `docinfo_target` exist but is not of type hash' do
451
- expect(client).not_to receive(:clear_scroll)
452
- plugin.register
453
- expect { plugin.run([]) }.to raise_error(Exception, /incompatible event/)
466
+ it "provides document info under metadata" do
467
+ event = input(config_metadata) do |pipeline, queue|
468
+ queue.pop
469
+ end
470
+
471
+ if ecs_select.active_mode == :disabled
472
+ expect(event.get("[@metadata][_index]")).to eq('logstash-2014.10.12')
473
+ expect(event.get("[@metadata][_type]")).to eq('logs')
474
+ expect(event.get("[@metadata][_id]")).to eq('C5b2xLQwTZa76jBmHIbwHQ')
475
+ else
476
+ expect(event.get("[@metadata][input][elasticsearch][_index]")).to eq('logstash-2014.10.12')
477
+ expect(event.get("[@metadata][input][elasticsearch][_type]")).to eq('logs')
478
+ expect(event.get("[@metadata][input][elasticsearch][_id]")).to eq('C5b2xLQwTZa76jBmHIbwHQ')
479
+ end
454
480
  end
455
- end
456
481
 
457
- it "should move the document info to the @metadata field" do
458
- event = input(config_metadata) do |pipeline, queue|
459
- queue.pop
482
+ it 'merges values if the `docinfo_target` already exist in the `_source` document' do
483
+ config_metadata_with_hash = %Q[
484
+ input {
485
+ elasticsearch {
486
+ hosts => ["localhost"]
487
+ query => '{ "query": { "match": { "city_name": "Okinawa" } }, "fields": ["message"] }'
488
+ docinfo => true
489
+ docinfo_target => 'metadata_with_hash'
490
+ }
491
+ }
492
+ ]
493
+
494
+ event = input(config_metadata_with_hash) do |pipeline, queue|
495
+ queue.pop
496
+ end
497
+
498
+ expect(event.get("[metadata_with_hash][_index]")).to eq('logstash-2014.10.12')
499
+ expect(event.get("[metadata_with_hash][_type]")).to eq('logs')
500
+ expect(event.get("[metadata_with_hash][_id]")).to eq('C5b2xLQwTZa76jBmHIbwHQ')
501
+ expect(event.get("[metadata_with_hash][awesome]")).to eq("logstash")
460
502
  end
461
503
 
462
- expect(event.get("[@metadata][_index]")).to eq('logstash-2014.10.12')
463
- expect(event.get("[@metadata][_type]")).to eq('logs')
464
- expect(event.get("[@metadata][_id]")).to eq('C5b2xLQwTZa76jBmHIbwHQ')
465
- end
504
+ context 'if the `docinfo_target` exist but is not of type hash' do
505
+ let (:config) { {
506
+ "hosts" => ["localhost"],
507
+ "query" => '{ "query": { "match": { "city_name": "Okinawa" } }, "fields": ["message"] }',
508
+ "docinfo" => true,
509
+ "docinfo_target" => 'metadata_with_string'
510
+ } }
511
+ it 'thows an exception if the `docinfo_target` exist but is not of type hash' do
512
+ expect(client).not_to receive(:clear_scroll)
513
+ plugin.register
514
+ expect { plugin.run([]) }.to raise_error(Exception, /incompatible event/)
515
+ end
516
+ end
466
517
 
467
- it 'should move the document information to the specified field' do
468
- config = %q[
469
- input {
470
- elasticsearch {
471
- hosts => ["localhost"]
472
- query => '{ "query": { "match": { "city_name": "Okinawa" } }, "fields": ["message"] }'
473
- docinfo => true
474
- docinfo_target => 'meta'
518
+ it 'should move the document information to the specified field' do
519
+ config = %q[
520
+ input {
521
+ elasticsearch {
522
+ hosts => ["localhost"]
523
+ query => '{ "query": { "match": { "city_name": "Okinawa" } }, "fields": ["message"] }'
524
+ docinfo => true
525
+ docinfo_target => 'meta'
526
+ }
475
527
  }
476
- }
477
- ]
478
- event = input(config) do |pipeline, queue|
479
- queue.pop
528
+ ]
529
+ event = input(config) do |pipeline, queue|
530
+ queue.pop
531
+ end
532
+
533
+ expect(event.get("[meta][_index]")).to eq('logstash-2014.10.12')
534
+ expect(event.get("[meta][_type]")).to eq('logs')
535
+ expect(event.get("[meta][_id]")).to eq('C5b2xLQwTZa76jBmHIbwHQ')
480
536
  end
481
537
 
482
- expect(event.get("[meta][_index]")).to eq('logstash-2014.10.12')
483
- expect(event.get("[meta][_type]")).to eq('logs')
484
- expect(event.get("[meta][_id]")).to eq('C5b2xLQwTZa76jBmHIbwHQ')
485
- end
538
+ it "allows to specify which fields from the document info to save to metadata" do
539
+ fields = ["_index"]
540
+ config = %Q[
541
+ input {
542
+ elasticsearch {
543
+ hosts => ["localhost"]
544
+ query => '{ "query": { "match": { "city_name": "Okinawa" } }, "fields": ["message"] }'
545
+ docinfo => true
546
+ docinfo_fields => #{fields}
547
+ }
548
+ }]
549
+
550
+ event = input(config) do |pipeline, queue|
551
+ queue.pop
552
+ end
553
+
554
+ meta_base = event.get(ecs_select.active_mode == :disabled ? "@metadata" : "[@metadata][input][elasticsearch]")
555
+ expect(meta_base.keys).to eq(fields)
556
+ end
486
557
 
487
- it "should allow to specify which fields from the document info to save to the @metadata field" do
488
- fields = ["_index"]
489
- config = %Q[
558
+ it 'should be able to reference metadata fields in `add_field` decorations' do
559
+ config = %q[
490
560
  input {
491
561
  elasticsearch {
492
562
  hosts => ["localhost"]
493
563
  query => '{ "query": { "match": { "city_name": "Okinawa" } }, "fields": ["message"] }'
494
564
  docinfo => true
495
- docinfo_fields => #{fields}
496
- }
497
- }]
498
-
499
- event = input(config) do |pipeline, queue|
500
- queue.pop
501
- end
502
-
503
- expect(event.get("@metadata").keys).to eq(fields)
504
- expect(event.get("[@metadata][_type]")).to eq(nil)
505
- expect(event.get("[@metadata][_index]")).to eq('logstash-2014.10.12')
506
- expect(event.get("[@metadata][_id]")).to eq(nil)
507
- end
508
-
509
- it 'should be able to reference metadata fields in `add_field` decorations' do
510
- config = %q[
511
- input {
512
- elasticsearch {
513
- hosts => ["localhost"]
514
- query => '{ "query": { "match": { "city_name": "Okinawa" } }, "fields": ["message"] }'
515
- docinfo => true
516
- add_field => {
517
- 'identifier' => "foo:%{[@metadata][_type]}:%{[@metadata][_id]}"
565
+ add_field => {
566
+ 'identifier' => "foo:%{[@metadata][_type]}:%{[@metadata][_id]}"
567
+ }
518
568
  }
519
569
  }
520
- }
521
- ]
570
+ ]
522
571
 
523
- event = input(config) do |pipeline, queue|
524
- queue.pop
525
- end
572
+ event = input(config) do |pipeline, queue|
573
+ queue.pop
574
+ end
575
+
576
+ expect(event.get('identifier')).to eq('foo:logs:C5b2xLQwTZa76jBmHIbwHQ')
577
+ end if ecs_select.active_mode == :disabled
526
578
 
527
- expect(event.get('identifier')).to eq('foo:logs:C5b2xLQwTZa76jBmHIbwHQ')
528
579
  end
580
+
529
581
  end
530
582
 
531
583
  context "when not defining the docinfo" do
@@ -542,9 +594,7 @@ describe LogStash::Inputs::TestableElasticsearch do
542
594
  queue.pop
543
595
  end
544
596
 
545
- expect(event.get("[@metadata][_index]")).to eq(nil)
546
- expect(event.get("[@metadata][_type]")).to eq(nil)
547
- expect(event.get("[@metadata][_id]")).to eq(nil)
597
+ expect(event.get("[@metadata]")).to be_empty
548
598
  end
549
599
  end
550
600
  end
@@ -568,7 +618,8 @@ describe LogStash::Inputs::TestableElasticsearch do
568
618
  it "should set host(s)" do
569
619
  plugin.register
570
620
  client = plugin.send(:client)
571
- expect( client.transport.instance_variable_get(:@hosts) ).to eql [{
621
+
622
+ expect( client.transport.instance_variable_get(:@seeds) ).to eql [{
572
623
  :scheme => "https",
573
624
  :host => "ac31ebb90241773157043c34fd26fd46.us-central1.gcp.cloud.es.io",
574
625
  :port => 9243,
@@ -600,7 +651,7 @@ describe LogStash::Inputs::TestableElasticsearch do
600
651
  it "should set authorization" do
601
652
  plugin.register
602
653
  client = plugin.send(:client)
603
- auth_header = client.transport.instance_variable_get(:@options)[:transport_options][:headers]['Authorization']
654
+ auth_header = extract_transport(client).options[:transport_options][:headers]['Authorization']
604
655
 
605
656
  expect( auth_header ).to eql "Basic #{Base64.encode64('elastic:my-passwd-00').rstrip}"
606
657
  end
@@ -637,7 +688,7 @@ describe LogStash::Inputs::TestableElasticsearch do
637
688
  it "should set authorization" do
638
689
  plugin.register
639
690
  client = plugin.send(:client)
640
- auth_header = client.transport.instance_variable_get(:@options)[:transport_options][:headers]['Authorization']
691
+ auth_header = extract_transport(client).options[:transport_options][:headers]['Authorization']
641
692
 
642
693
  expect( auth_header ).to eql "ApiKey #{Base64.strict_encode64('foo:bar')}"
643
694
  end
@@ -658,7 +709,7 @@ describe LogStash::Inputs::TestableElasticsearch do
658
709
  it "should set proxy" do
659
710
  plugin.register
660
711
  client = plugin.send(:client)
661
- proxy = client.transport.instance_variable_get(:@options)[:transport_options][:proxy]
712
+ proxy = extract_transport(client).options[:transport_options][:proxy]
662
713
 
663
714
  expect( proxy ).to eql "http://localhost:1234"
664
715
  end
@@ -670,12 +721,162 @@ describe LogStash::Inputs::TestableElasticsearch do
670
721
  plugin.register
671
722
  client = plugin.send(:client)
672
723
 
673
- expect( client.transport.instance_variable_get(:@options)[:transport_options] ).to_not include(:proxy)
724
+ expect( extract_transport(client).options[:transport_options] ).to_not include(:proxy)
674
725
  end
675
726
  end
676
727
  end
677
728
 
678
- shared_examples'configurable timeout' do |config_name, manticore_transport_option|
729
+ class StoppableServer
730
+
731
+ attr_reader :port
732
+
733
+ def initialize()
734
+ queue = Queue.new
735
+ @first_req_waiter = java.util.concurrent.CountDownLatch.new(1)
736
+ @first_request = nil
737
+
738
+ @t = java.lang.Thread.new(
739
+ proc do
740
+ begin
741
+ @server = WEBrick::HTTPServer.new :Port => 0, :DocumentRoot => ".",
742
+ :Logger => Cabin::Channel.get, # silence WEBrick logging
743
+ :StartCallback => Proc.new {
744
+ queue.push("started")
745
+ }
746
+ @port = @server.config[:Port]
747
+ @server.mount_proc '/' do |req, res|
748
+ res.body = '''
749
+ {
750
+ "name": "ce7ccfb438e8",
751
+ "cluster_name": "docker-cluster",
752
+ "cluster_uuid": "DyR1hN03QvuCWXRy3jtb0g",
753
+ "version": {
754
+ "number": "7.13.1",
755
+ "build_flavor": "default",
756
+ "build_type": "docker",
757
+ "build_hash": "9a7758028e4ea59bcab41c12004603c5a7dd84a9",
758
+ "build_date": "2021-05-28T17:40:59.346932922Z",
759
+ "build_snapshot": false,
760
+ "lucene_version": "8.8.2",
761
+ "minimum_wire_compatibility_version": "6.8.0",
762
+ "minimum_index_compatibility_version": "6.0.0-beta1"
763
+ },
764
+ "tagline": "You Know, for Search"
765
+ }
766
+ '''
767
+ res.status = 200
768
+ res['Content-Type'] = 'application/json'
769
+ @first_request = req
770
+ @first_req_waiter.countDown()
771
+ end
772
+
773
+ @server.mount_proc '/logstash_unit_test/_search' do |req, res|
774
+ res.body = '''
775
+ {
776
+ "took" : 1,
777
+ "timed_out" : false,
778
+ "_shards" : {
779
+ "total" : 1,
780
+ "successful" : 1,
781
+ "skipped" : 0,
782
+ "failed" : 0
783
+ },
784
+ "hits" : {
785
+ "total" : {
786
+ "value" : 10000,
787
+ "relation" : "gte"
788
+ },
789
+ "max_score" : 1.0,
790
+ "hits" : [
791
+ {
792
+ "_index" : "test_bulk_index_2",
793
+ "_type" : "_doc",
794
+ "_id" : "sHe6A3wBesqF7ydicQvG",
795
+ "_score" : 1.0,
796
+ "_source" : {
797
+ "@timestamp" : "2021-09-20T15:02:02.557Z",
798
+ "message" : "{\"name\": \"Andrea\"}",
799
+ "@version" : "1",
800
+ "host" : "kalispera",
801
+ "sequence" : 5
802
+ }
803
+ }
804
+ ]
805
+ }
806
+ }
807
+ '''
808
+ res.status = 200
809
+ res['Content-Type'] = 'application/json'
810
+ @first_request = req
811
+ @first_req_waiter.countDown()
812
+ end
813
+
814
+
815
+
816
+ @server.start
817
+ rescue => e
818
+ puts "Error in webserver thread #{e}"
819
+ # ignore
820
+ end
821
+ end
822
+ )
823
+ @t.daemon = true
824
+ @t.start
825
+ queue.pop # blocks until the server is up
826
+ end
827
+
828
+ def stop
829
+ @server.shutdown
830
+ end
831
+
832
+ def wait_receive_request
833
+ @first_req_waiter.await(2, java.util.concurrent.TimeUnit::SECONDS)
834
+ @first_request
835
+ end
836
+ end
837
+
838
+ describe "'user-agent' header" do
839
+ let!(:webserver) { StoppableServer.new } # webserver must be started before the call, so no lazy "let"
840
+
841
+ after :each do
842
+ webserver.stop
843
+ end
844
+
845
+ it "server should be started" do
846
+ require 'net/http'
847
+ response = nil
848
+ Net::HTTP.start('localhost', webserver.port) {|http|
849
+ response = http.request_get('/')
850
+ }
851
+ expect(response.code.to_i).to eq(200)
852
+ end
853
+
854
+ context "used by plugin" do
855
+ let(:config) do
856
+ {
857
+ "hosts" => ["localhost:#{webserver.port}"],
858
+ "query" => '{ "query": { "match": { "statuscode": 200 } }, "sort": [ "_doc" ] }',
859
+ "index" => "logstash_unit_test"
860
+ }
861
+ end
862
+ let(:plugin) { described_class.new(config) }
863
+ let(:event) { LogStash::Event.new({}) }
864
+
865
+ it "client should sent the expect user-agent" do
866
+ plugin.register
867
+
868
+ queue = []
869
+ plugin.run(queue)
870
+
871
+ request = webserver.wait_receive_request
872
+
873
+ expect(request.header['user-agent'].size).to eq(1)
874
+ expect(request.header['user-agent'][0]).to match(/logstash\/\d*\.\d*\.\d* \(OS=.*; JVM=.*\) logstash-input-elasticsearch\/\d*\.\d*\.\d*/)
875
+ end
876
+ end
877
+ end
878
+
879
+ shared_examples 'configurable timeout' do |config_name, manticore_transport_option|
679
880
  let(:config_value) { fail NotImplementedError }
680
881
  let(:config) { super().merge(config_name => config_value) }
681
882
  {
@@ -706,6 +907,9 @@ describe LogStash::Inputs::TestableElasticsearch do
706
907
  transport_options = new_elasticsearch_client_params[:transport_options]
707
908
  expect(transport_options).to include(manticore_transport_option)
708
909
  expect(transport_options[manticore_transport_option]).to eq(config_value.to_i)
910
+ mock_client = double("fake_client")
911
+ allow(mock_client).to receive(:ping)
912
+ mock_client
709
913
  end
710
914
 
711
915
  plugin.register
@@ -756,4 +960,10 @@ describe LogStash::Inputs::TestableElasticsearch do
756
960
  end
757
961
 
758
962
  end
963
+
964
+ # @note can be removed once we depends on elasticsearch gem >= 6.x
965
+ def extract_transport(client) # on 7.x client.transport is a ES::Transport::Client
966
+ client.transport.respond_to?(:transport) ? client.transport.transport : client.transport
967
+ end
968
+
759
969
  end
@@ -24,7 +24,6 @@ describe LogStash::Inputs::Elasticsearch do
24
24
  ESHelper.index_doc(@es, :index => 'logs', :body => { :response => 404, :message=> 'Not Found'})
25
25
  end
26
26
  @es.indices.refresh
27
- plugin.register
28
27
  end
29
28
 
30
29
  after(:each) do
@@ -33,6 +32,10 @@ describe LogStash::Inputs::Elasticsearch do
33
32
  end
34
33
 
35
34
  shared_examples 'an elasticsearch index plugin' do
35
+ before(:each) do
36
+ plugin.register
37
+ end
38
+
36
39
  it 'should retrieve json event from elasticsearch' do
37
40
  queue = []
38
41
  plugin.run(queue)
@@ -43,6 +46,10 @@ describe LogStash::Inputs::Elasticsearch do
43
46
  end
44
47
 
45
48
  describe 'against an unsecured elasticsearch', :integration => true do
49
+ before(:each) do
50
+ plugin.register
51
+ end
52
+
46
53
  it_behaves_like 'an elasticsearch index plugin'
47
54
  end
48
55
 
@@ -66,8 +73,7 @@ describe LogStash::Inputs::Elasticsearch do
66
73
  let(:queue) { [] }
67
74
 
68
75
  it "fails to run the plugin" do
69
- plugin.register
70
- expect { plugin.run queue }.to raise_error Elasticsearch::Transport::Transport::Errors::Unauthorized
76
+ expect { plugin.register }.to raise_error Elasticsearch::Transport::Transport::Errors::Unauthorized
71
77
  end
72
78
  end
73
79
 
metadata CHANGED
@@ -1,29 +1,15 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-input-elasticsearch
3
3
  version: !ruby/object:Gem::Version
4
- version: 4.9.3
4
+ version: 4.12.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Elastic
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2021-08-20 00:00:00.000000000 Z
11
+ date: 2021-10-15 00:00:00.000000000 Z
12
12
  dependencies:
13
- - !ruby/object:Gem::Dependency
14
- requirement: !ruby/object:Gem::Requirement
15
- requirements:
16
- - - "~>"
17
- - !ruby/object:Gem::Version
18
- version: '1.0'
19
- name: logstash-mixin-validator_support
20
- prerelease: false
21
- type: :runtime
22
- version_requirements: !ruby/object:Gem::Requirement
23
- requirements:
24
- - - "~>"
25
- - !ruby/object:Gem::Version
26
- version: '1.0'
27
13
  - !ruby/object:Gem::Dependency
28
14
  requirement: !ruby/object:Gem::Requirement
29
15
  requirements:
@@ -47,45 +33,59 @@ dependencies:
47
33
  - !ruby/object:Gem::Dependency
48
34
  requirement: !ruby/object:Gem::Requirement
49
35
  requirements:
50
- - - ">="
36
+ - - "~>"
51
37
  - !ruby/object:Gem::Version
52
- version: 5.0.5
53
- name: elasticsearch
38
+ version: '1.3'
39
+ name: logstash-mixin-ecs_compatibility_support
54
40
  prerelease: false
55
41
  type: :runtime
56
42
  version_requirements: !ruby/object:Gem::Requirement
57
43
  requirements:
58
- - - ">="
44
+ - - "~>"
59
45
  - !ruby/object:Gem::Version
60
- version: 5.0.5
46
+ version: '1.3'
61
47
  - !ruby/object:Gem::Dependency
62
48
  requirement: !ruby/object:Gem::Requirement
63
49
  requirements:
64
- - - ">="
50
+ - - "~>"
65
51
  - !ruby/object:Gem::Version
66
- version: '0'
67
- name: logstash-codec-json
52
+ version: '1.0'
53
+ name: logstash-mixin-event_support
68
54
  prerelease: false
69
55
  type: :runtime
70
56
  version_requirements: !ruby/object:Gem::Requirement
71
57
  requirements:
72
- - - ">="
58
+ - - "~>"
73
59
  - !ruby/object:Gem::Version
74
- version: '0'
60
+ version: '1.0'
61
+ - !ruby/object:Gem::Dependency
62
+ requirement: !ruby/object:Gem::Requirement
63
+ requirements:
64
+ - - "~>"
65
+ - !ruby/object:Gem::Version
66
+ version: '1.0'
67
+ name: logstash-mixin-validator_support
68
+ prerelease: false
69
+ type: :runtime
70
+ version_requirements: !ruby/object:Gem::Requirement
71
+ requirements:
72
+ - - "~>"
73
+ - !ruby/object:Gem::Version
74
+ version: '1.0'
75
75
  - !ruby/object:Gem::Dependency
76
76
  requirement: !ruby/object:Gem::Requirement
77
77
  requirements:
78
78
  - - ">="
79
79
  - !ruby/object:Gem::Version
80
- version: '0'
81
- name: sequel
80
+ version: 7.14.0
81
+ name: elasticsearch
82
82
  prerelease: false
83
83
  type: :runtime
84
84
  version_requirements: !ruby/object:Gem::Requirement
85
85
  requirements:
86
86
  - - ">="
87
87
  - !ruby/object:Gem::Version
88
- version: '0'
88
+ version: 7.14.0
89
89
  - !ruby/object:Gem::Dependency
90
90
  requirement: !ruby/object:Gem::Requirement
91
91
  requirements:
@@ -142,12 +142,26 @@ dependencies:
142
142
  - - ">="
143
143
  - !ruby/object:Gem::Version
144
144
  version: 0.7.1
145
+ - !ruby/object:Gem::Dependency
146
+ requirement: !ruby/object:Gem::Requirement
147
+ requirements:
148
+ - - ">="
149
+ - !ruby/object:Gem::Version
150
+ version: '0'
151
+ name: logstash-codec-plain
152
+ prerelease: false
153
+ type: :development
154
+ version_requirements: !ruby/object:Gem::Requirement
155
+ requirements:
156
+ - - ">="
157
+ - !ruby/object:Gem::Version
158
+ version: '0'
145
159
  - !ruby/object:Gem::Dependency
146
160
  requirement: !ruby/object:Gem::Requirement
147
161
  requirements:
148
162
  - - "~>"
149
163
  - !ruby/object:Gem::Version
150
- version: 0.15.4
164
+ version: '1'
151
165
  name: faraday
152
166
  prerelease: false
153
167
  type: :development
@@ -155,7 +169,7 @@ dependencies:
155
169
  requirements:
156
170
  - - "~>"
157
171
  - !ruby/object:Gem::Version
158
- version: 0.15.4
172
+ version: '1'
159
173
  - !ruby/object:Gem::Dependency
160
174
  requirement: !ruby/object:Gem::Requirement
161
175
  requirements:
@@ -184,6 +198,34 @@ dependencies:
184
198
  - - ">="
185
199
  - !ruby/object:Gem::Version
186
200
  version: '0'
201
+ - !ruby/object:Gem::Dependency
202
+ requirement: !ruby/object:Gem::Requirement
203
+ requirements:
204
+ - - "~>"
205
+ - !ruby/object:Gem::Version
206
+ version: '0.6'
207
+ name: cabin
208
+ prerelease: false
209
+ type: :development
210
+ version_requirements: !ruby/object:Gem::Requirement
211
+ requirements:
212
+ - - "~>"
213
+ - !ruby/object:Gem::Version
214
+ version: '0.6'
215
+ - !ruby/object:Gem::Dependency
216
+ requirement: !ruby/object:Gem::Requirement
217
+ requirements:
218
+ - - ">="
219
+ - !ruby/object:Gem::Version
220
+ version: '0'
221
+ name: webrick
222
+ prerelease: false
223
+ type: :development
224
+ version_requirements: !ruby/object:Gem::Requirement
225
+ requirements:
226
+ - - ">="
227
+ - !ruby/object:Gem::Version
228
+ version: '0'
187
229
  description: This gem is a Logstash plugin required to be installed on top of the
188
230
  Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This
189
231
  gem is not a stand-alone program