logstash-input-elasticsearch 4.12.1 → 4.13.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: e2427b28640265b075a0e21240cf410b2e2252d7516ac7bd0955d48087317f7f
4
- data.tar.gz: dbc7d84f18348e7fa2292d2b68a5db59a5ffda724eb090ff17503e10e3ff130d
3
+ metadata.gz: d87078bdf63844901c7684624bc15bd6af23c71c728adb20b86ba76738957926
4
+ data.tar.gz: 34d8ce81035665b93623aec9008c511000b80a463c0cf482fb459249c763aeaf
5
5
  SHA512:
6
- metadata.gz: dd3f9693c355505fbe5a971a46899ba285e057406db5807d4226b4ace61449f41f20409926d49a5a69c19338020069de13e4a4da7028db9b8f7db6d3ce4e0e6c
7
- data.tar.gz: 0c170d69801feac7d0df3a79ef4351a6237fa30d5ad3b69e0c8af660981f40a1d814e17d60cebd0e555cdee6613de93c7e407d2cba28f0092f85f5da7446b966
6
+ metadata.gz: 4b2c5aa7a229bdbc89df35276c22a72f1cdbefc03a9069dfb74e64aad64d121aeb3ee67a9b632fd8ab445d1a256cce23f67feabb4ff379ed78a15507f28b1dce
7
+ data.tar.gz: 7c7a45cd3817d9e746a03eb166293e389eee16fc512bb2e0ecc546158d67f1f4e0f54bc191212e02ce1af206fe5f98374d70defa3f8ef218bed9e210ac8a8a65
data/CHANGELOG.md CHANGED
@@ -1,3 +1,12 @@
1
+ ## 4.13.0
2
+ - Added support for `ca_trusted_fingerprint` when run on Logstash 8.3+ [#178](https://github.com/logstash-plugins/logstash-input-elasticsearch/pull/178)
3
+
4
+ ## 4.12.3
5
+ - Fix: update Elasticsearch Ruby client to correctly customize 'user-agent' header [#171](https://github.com/logstash-plugins/logstash-input-elasticsearch/pull/171)
6
+
7
+ ## 4.12.2
8
+ - Fix: hosts => "es_host:port" regression [#168](https://github.com/logstash-plugins/logstash-input-elasticsearch/pull/168)
9
+
1
10
  ## 4.12.1
2
11
  - Fixed too_long_frame_exception by passing scroll_id in the body [#159](https://github.com/logstash-plugins/logstash-input-elasticsearch/pull/159)
3
12
 
@@ -14,11 +23,10 @@
14
23
  - Fixed SSL handshake hang indefinitely with proxy setup [#156](https://github.com/logstash-plugins/logstash-input-elasticsearch/pull/156)
15
24
 
16
25
  ## 4.9.2
17
- - Fix: a regression (in LS 7.14.0) where due the elasticsearch client update (from 5.0.5 to 7.5.0) the `Authorization`
26
+ - Fix: a regression (in LS 7.14.0) where due the elasticsearch client update (from 5.0.5 to 7.5.0) the `Authorization`
18
27
  header isn't passed, this leads to the plugin not being able to leverage `user`/`password` credentials set by the user.
19
28
  [#153](https://github.com/logstash-plugins/logstash-input-elasticsearch/pull/153)
20
29
 
21
-
22
30
  ## 4.9.1
23
31
  - [DOC] Replaced hard-coded links with shared attributes [#143](https://github.com/logstash-plugins/logstash-input-elasticsearch/pull/143)
24
32
  - [DOC] Added missing quote to docinfo_fields example [#145](https://github.com/logstash-plugins/logstash-input-elasticsearch/pull/145)
@@ -58,7 +66,7 @@
58
66
  - Feat: Added support for cloud_id / cloud_auth configuration [#112](https://github.com/logstash-plugins/logstash-input-elasticsearch/pull/112)
59
67
 
60
68
  ## 4.4.0
61
- - Changed Elasticsearch Client transport to use Manticore [#111](https://github.com/logstash-plugins/logstash-input-elasticsearch/pull/111)
69
+ - Changed Elasticsearch Client transport to use Manticore [#111](https://github.com/logstash-plugins/logstash-input-elasticsearch/pull/111)
62
70
 
63
71
  ## 4.3.3
64
72
  - Loosen restrictions on Elasticsearch gem [#110](https://github.com/logstash-plugins/logstash-input-elasticsearch/pull/110)
@@ -97,7 +105,7 @@
97
105
 
98
106
  ## 4.0.2
99
107
  - Bump ES client to 5.0.2 to get content-type: json behavior
100
- - Revert unneeded manticore change
108
+ - Revert unneeded manticore change
101
109
 
102
110
  ## 4.0.1
103
111
  - Switch internal HTTP client to support TLSv1.2
@@ -110,7 +118,7 @@
110
118
  behavior
111
119
  - Improve documentation to show sort by \_doc, and how to add it to custom
112
120
  queries.
113
-
121
+
114
122
  ## 3.0.2
115
123
  - Relax constraint on logstash-core-plugin-api to >= 1.60 <= 2.99
116
124
 
data/Gemfile CHANGED
@@ -9,3 +9,6 @@ if Dir.exist?(logstash_path) && use_logstash_source
9
9
  gem 'logstash-core', :path => "#{logstash_path}/logstash-core"
10
10
  gem 'logstash-core-plugin-api', :path => "#{logstash_path}/logstash-core-plugin-api"
11
11
  end
12
+
13
+ gem 'manticore', ENV['MANTICORE_VERSION'] if ENV['MANTICORE_VERSION']
14
+ gem 'elasticsearch', ENV['ELASTICSEARCH_VERSION'] if ENV['ELASTICSEARCH_VERSION']
data/docs/index.asciidoc CHANGED
@@ -103,6 +103,7 @@ This plugin supports the following configuration options plus the <<plugins-{typ
103
103
  |Setting |Input type|Required
104
104
  | <<plugins-{type}s-{plugin}-api_key>> |<<password,password>>|No
105
105
  | <<plugins-{type}s-{plugin}-ca_file>> |a valid filesystem path|No
106
+ | <<plugins-{type}s-{plugin}-ca_trusted_fingerprint>> |<<string,string>>|No
106
107
  | <<plugins-{type}s-{plugin}-cloud_auth>> |<<password,password>>|No
107
108
  | <<plugins-{type}s-{plugin}-cloud_id>> |<<string,string>>|No
108
109
  | <<plugins-{type}s-{plugin}-connect_timeout_seconds>> | <<number,number>>|No
@@ -152,6 +153,15 @@ API key API].
152
153
 
153
154
  SSL Certificate Authority file in PEM encoded format, must also include any chain certificates as necessary.
154
155
 
156
+ [id="plugins-{type}s-{plugin}-ca_trusted_fingerprint"]
157
+ ===== `ca_trusted_fingerprint`
158
+
159
+ * Value type is <<string,string>>, and must contain exactly 64 hexadecimal characters.
160
+ * There is no default value for this setting.
161
+ * Use of this option _requires_ Logstash 8.3+
162
+
163
+ The SHA-256 fingerprint of an SSL Certificate Authority to trust, such as the autogenerated self-signed CA for an Elasticsearch cluster.
164
+
155
165
  [id="plugins-{type}s-{plugin}-cloud_auth"]
156
166
  ===== `cloud_auth`
157
167
 
@@ -7,6 +7,7 @@ require 'logstash/plugin_mixins/validator_support/field_reference_validation_ada
7
7
  require 'logstash/plugin_mixins/event_support/event_factory_adapter'
8
8
  require 'logstash/plugin_mixins/ecs_compatibility_support'
9
9
  require 'logstash/plugin_mixins/ecs_compatibility_support/target_check'
10
+ require 'logstash/plugin_mixins/ca_trusted_fingerprint_support'
10
11
  require "base64"
11
12
 
12
13
  require "elasticsearch"
@@ -192,6 +193,9 @@ class LogStash::Inputs::Elasticsearch < LogStash::Inputs::Base
192
193
  # If set, the _source of each hit will be added nested under the target instead of at the top-level
193
194
  config :target, :validate => :field_reference
194
195
 
196
+ # config :ca_trusted_fingerprint, :validate => :sha_256_hex
197
+ include LogStash::PluginMixins::CATrustedFingerprintSupport
198
+
195
199
  def initialize(params={})
196
200
  super(params)
197
201
 
@@ -381,18 +385,24 @@ class LogStash::Inputs::Elasticsearch < LogStash::Inputs::Base
381
385
  end
382
386
 
383
387
  def setup_ssl
384
- @ssl && @ca_file ? { :ssl => true, :ca_file => @ca_file } : {}
388
+ ssl_options = {}
389
+
390
+ ssl_options[:ssl] = true if @ssl
391
+ ssl_options[:ca_file] = @ca_file if @ssl && @ca_file
392
+ ssl_options[:trust_strategy] = trust_strategy_for_ca_trusted_fingerprint
393
+
394
+ ssl_options
385
395
  end
386
396
 
387
397
  def setup_hosts
388
398
  @hosts = Array(@hosts).map { |host| host.to_s } # potential SafeURI#to_s
389
- if @ssl
390
- @hosts.map do |h|
391
- host, port = h.split(":")
392
- { :host => host, :scheme => 'https', :port => port }
399
+ @hosts.map do |h|
400
+ if h.start_with?('http:', 'https:')
401
+ h
402
+ else
403
+ host, port = h.split(':')
404
+ { host: host, port: port, scheme: (@ssl ? 'https' : 'http') }
393
405
  end
394
- else
395
- @hosts
396
406
  end
397
407
  end
398
408
 
@@ -1,7 +1,7 @@
1
1
  Gem::Specification.new do |s|
2
2
 
3
3
  s.name = 'logstash-input-elasticsearch'
4
- s.version = '4.12.1'
4
+ s.version = '4.13.0'
5
5
  s.licenses = ['Apache License (2.0)']
6
6
  s.summary = "Reads query results from an Elasticsearch cluster"
7
7
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
@@ -25,7 +25,8 @@ Gem::Specification.new do |s|
25
25
  s.add_runtime_dependency 'logstash-mixin-event_support', '~> 1.0'
26
26
  s.add_runtime_dependency "logstash-mixin-validator_support", '~> 1.0'
27
27
 
28
- s.add_runtime_dependency 'elasticsearch', '>= 7.14.0' # LS >= 6.7 and < 7.14 all used version 5.0.5
28
+ s.add_runtime_dependency 'elasticsearch', '>= 7.17.1'
29
+ s.add_runtime_dependency 'logstash-mixin-ca_trusted_fingerprint_support', '~> 1.0'
29
30
 
30
31
  s.add_runtime_dependency 'tzinfo'
31
32
  s.add_runtime_dependency 'tzinfo-data'
@@ -33,9 +34,11 @@ Gem::Specification.new do |s|
33
34
  s.add_runtime_dependency 'manticore', ">= 0.7.1"
34
35
 
35
36
  s.add_development_dependency 'logstash-codec-plain'
36
- s.add_development_dependency 'faraday', "~> 1"
37
37
  s.add_development_dependency 'logstash-devutils'
38
38
  s.add_development_dependency 'timecop'
39
39
  s.add_development_dependency 'cabin', ['~> 0.6']
40
40
  s.add_development_dependency 'webrick'
41
+
42
+ # 3.8.0 has breaking changes WRT to joining, which break our specs
43
+ s.add_development_dependency 'rufus-scheduler', '~> 3.0.9'
41
44
  end
data/spec/es_helper.rb CHANGED
@@ -7,25 +7,26 @@ module ESHelper
7
7
  end
8
8
  end
9
9
 
10
- def self.get_client(options)
11
- require 'elasticsearch/transport/transport/http/faraday' # supports user/password options
12
- host, port = get_host_port.split(':')
13
- host_opts = { host: host, port: port, scheme: 'http' }
14
- ssl_opts = {}
15
-
16
- if options[:ca_file]
17
- ssl_opts = { ca_file: options[:ca_file], version: 'TLSv1.2', verify: false }
18
- host_opts[:scheme] = 'https'
10
+ def self.curl_and_get_json_response(url, method: :get, args: nil); require 'open3'
11
+ cmd = "curl -s -v --show-error #{args} -X #{method.to_s.upcase} -k #{url}"
12
+ begin
13
+ out, err, status = Open3.capture3(cmd)
14
+ rescue Errno::ENOENT
15
+ fail "curl not available, make sure curl binary is installed and available on $PATH"
19
16
  end
20
17
 
21
- if options[:user] && options[:password]
22
- host_opts[:user] = options[:user]
23
- host_opts[:password] = options[:password]
24
- end
18
+ if status.success?
19
+ http_status = err.match(/< HTTP\/1.1 (.*?)/)[1] || '0' # < HTTP/1.1 200 OK\r\n
20
+ if http_status.strip[0].to_i > 2
21
+ warn out
22
+ fail "#{cmd.inspect} unexpected response: #{http_status}\n\n#{err}"
23
+ end
25
24
 
26
- Elasticsearch::Client.new(hosts: [host_opts],
27
- transport_options: { ssl: ssl_opts },
28
- transport_class: Elasticsearch::Transport::Transport::HTTP::Faraday)
25
+ LogStash::Json.load(out)
26
+ else
27
+ warn out
28
+ fail "#{cmd.inspect} process failed: #{status}\n\n#{err}"
29
+ end
29
30
  end
30
31
 
31
32
  def self.doc_type
@@ -0,0 +1 @@
1
+ 195a7e7b1bc29f3d7913a918a44721704d27fa56facea0cd72a8093c7107c283
@@ -19,7 +19,14 @@ describe LogStash::Inputs::Elasticsearch, :ecs_compatibility_support do
19
19
  let(:queue) { Queue.new }
20
20
 
21
21
  before(:each) do
22
- Elasticsearch::Client.send(:define_method, :ping) { } # define no-action ping method
22
+ Elasticsearch::Client.send(:define_method, :ping) { } # define no-action ping method
23
+ end
24
+
25
+ let(:base_config) do
26
+ {
27
+ 'hosts' => ["localhost"],
28
+ 'query' => '{ "query": { "match": { "city_name": "Okinawa" } }, "fields": ["message"] }'
29
+ }
23
30
  end
24
31
 
25
32
  context "register" do
@@ -47,7 +54,6 @@ describe LogStash::Inputs::Elasticsearch, :ecs_compatibility_support do
47
54
  end
48
55
 
49
56
  it_behaves_like "an interruptible input plugin" do
50
- let(:esclient) { double("elasticsearch-client") }
51
57
  let(:config) do
52
58
  {
53
59
  "schedule" => "* * * * * UTC"
@@ -55,7 +61,8 @@ describe LogStash::Inputs::Elasticsearch, :ecs_compatibility_support do
55
61
  end
56
62
 
57
63
  before :each do
58
- allow(Elasticsearch::Client).to receive(:new).and_return(esclient)
64
+ @esclient = double("elasticsearch-client")
65
+ allow(Elasticsearch::Client).to receive(:new).and_return(@esclient)
59
66
  hit = {
60
67
  "_index" => "logstash-2014.10.12",
61
68
  "_type" => "logs",
@@ -63,10 +70,10 @@ describe LogStash::Inputs::Elasticsearch, :ecs_compatibility_support do
63
70
  "_score" => 1.0,
64
71
  "_source" => { "message" => ["ohayo"] }
65
72
  }
66
- allow(esclient).to receive(:search) { { "hits" => { "hits" => [hit] } } }
67
- allow(esclient).to receive(:scroll) { { "hits" => { "hits" => [hit] } } }
68
- allow(esclient).to receive(:clear_scroll).and_return(nil)
69
- allow(esclient).to receive(:ping)
73
+ allow(@esclient).to receive(:search) { { "hits" => { "hits" => [hit] } } }
74
+ allow(@esclient).to receive(:scroll) { { "hits" => { "hits" => [hit] } } }
75
+ allow(@esclient).to receive(:clear_scroll).and_return(nil)
76
+ allow(@esclient).to receive(:ping)
70
77
  end
71
78
  end
72
79
 
@@ -78,14 +85,10 @@ describe LogStash::Inputs::Elasticsearch, :ecs_compatibility_support do
78
85
  end
79
86
 
80
87
  let(:config) do
81
- %q[
82
- input {
83
- elasticsearch {
84
- hosts => ["localhost"]
85
- query => '{ "query": { "match": { "city_name": "Okinawa" } }, "fields": ["message"] }'
86
- }
87
- }
88
- ]
88
+ {
89
+ 'hosts' => ["localhost"],
90
+ 'query' => '{ "query": { "match": { "city_name": "Okinawa" } }, "fields": ["message"] }'
91
+ }
89
92
  end
90
93
 
91
94
  let(:mock_response) do
@@ -128,10 +131,11 @@ describe LogStash::Inputs::Elasticsearch, :ecs_compatibility_support do
128
131
  expect(client).to receive(:ping)
129
132
  end
130
133
 
134
+ before { plugin.register }
135
+
131
136
  it 'creates the events from the hits' do
132
- event = input(config) do |pipeline, queue|
133
- queue.pop
134
- end
137
+ plugin.run queue
138
+ event = queue.pop
135
139
 
136
140
  expect(event).to be_a(LogStash::Event)
137
141
  expect(event.get("message")).to eql [ "ohayo" ]
@@ -139,21 +143,16 @@ describe LogStash::Inputs::Elasticsearch, :ecs_compatibility_support do
139
143
 
140
144
  context 'when a target is set' do
141
145
  let(:config) do
142
- %q[
143
- input {
144
- elasticsearch {
145
- hosts => ["localhost"]
146
- query => '{ "query": { "match": { "city_name": "Okinawa" } }, "fields": ["message"] }'
147
- target => "[@metadata][_source]"
148
- }
149
- }
150
- ]
146
+ {
147
+ 'hosts' => ["localhost"],
148
+ 'query' => '{ "query": { "match": { "city_name": "Okinawa" } }, "fields": ["message"] }',
149
+ 'target' => "[@metadata][_source]"
150
+ }
151
151
  end
152
152
 
153
153
  it 'creates the event using the target' do
154
- event = input(config) do |pipeline, queue|
155
- queue.pop
156
- end
154
+ plugin.run queue
155
+ event = queue.pop
157
156
 
158
157
  expect(event).to be_a(LogStash::Event)
159
158
  expect(event.get("[@metadata][_source][message]")).to eql [ "ohayo" ]
@@ -450,24 +449,21 @@ describe LogStash::Inputs::Elasticsearch, :ecs_compatibility_support do
450
449
  allow_any_instance_of(described_class).to receive(:ecs_compatibility).and_return(ecs_compatibility)
451
450
  end
452
451
 
453
- context 'with docinfo enabled' do
454
- let(:config_metadata) do
455
- %q[
456
- input {
457
- elasticsearch {
458
- hosts => ["localhost"]
459
- query => '{ "query": { "match": { "city_name": "Okinawa" } }, "fields": ["message"] }'
460
- docinfo => true
461
- }
462
- }
463
- ]
452
+ before do
453
+ if do_register
454
+ plugin.register
455
+ plugin.run queue
464
456
  end
457
+ end
465
458
 
466
- it "provides document info under metadata" do
467
- event = input(config_metadata) do |pipeline, queue|
468
- queue.pop
469
- end
459
+ let(:do_register) { true }
460
+
461
+ let(:event) { queue.pop }
470
462
 
463
+ context 'with docinfo enabled' do
464
+ let(:config) { base_config.merge 'docinfo' => true }
465
+
466
+ it "provides document info under metadata" do
471
467
  if ecs_select.active_mode == :disabled
472
468
  expect(event.get("[@metadata][_index]")).to eq('logstash-2014.10.12')
473
469
  expect(event.get("[@metadata][_type]")).to eq('logs')
@@ -479,123 +475,72 @@ describe LogStash::Inputs::Elasticsearch, :ecs_compatibility_support do
479
475
  end
480
476
  end
481
477
 
482
- it 'merges values if the `docinfo_target` already exist in the `_source` document' do
483
- config_metadata_with_hash = %Q[
484
- input {
485
- elasticsearch {
486
- hosts => ["localhost"]
487
- query => '{ "query": { "match": { "city_name": "Okinawa" } }, "fields": ["message"] }'
488
- docinfo => true
489
- docinfo_target => 'metadata_with_hash'
490
- }
491
- }
492
- ]
478
+ context 'with docinfo_target' do
479
+ let(:config) { base_config.merge 'docinfo' => true, 'docinfo_target' => docinfo_target }
480
+ let(:docinfo_target) { 'metadata_with_hash' }
481
+
482
+ it 'merges values if the `docinfo_target` already exist in the `_source` document' do
483
+ expect(event.get("[metadata_with_hash][_index]")).to eq('logstash-2014.10.12')
484
+ expect(event.get("[metadata_with_hash][_type]")).to eq('logs')
485
+ expect(event.get("[metadata_with_hash][_id]")).to eq('C5b2xLQwTZa76jBmHIbwHQ')
486
+ expect(event.get("[metadata_with_hash][awesome]")).to eq("logstash")
487
+ end
488
+
489
+ context 'non-existent' do
490
+ let(:docinfo_target) { 'meta' }
491
+
492
+ it 'should move the document information to the specified field' do
493
+ expect(event.get("[meta][_index]")).to eq('logstash-2014.10.12')
494
+ expect(event.get("[meta][_type]")).to eq('logs')
495
+ expect(event.get("[meta][_id]")).to eq('C5b2xLQwTZa76jBmHIbwHQ')
496
+ end
493
497
 
494
- event = input(config_metadata_with_hash) do |pipeline, queue|
495
- queue.pop
496
498
  end
497
499
 
498
- expect(event.get("[metadata_with_hash][_index]")).to eq('logstash-2014.10.12')
499
- expect(event.get("[metadata_with_hash][_type]")).to eq('logs')
500
- expect(event.get("[metadata_with_hash][_id]")).to eq('C5b2xLQwTZa76jBmHIbwHQ')
501
- expect(event.get("[metadata_with_hash][awesome]")).to eq("logstash")
502
500
  end
503
501
 
504
502
  context 'if the `docinfo_target` exist but is not of type hash' do
505
- let (:config) { {
506
- "hosts" => ["localhost"],
507
- "query" => '{ "query": { "match": { "city_name": "Okinawa" } }, "fields": ["message"] }',
508
- "docinfo" => true,
509
- "docinfo_target" => 'metadata_with_string'
510
- } }
511
- it 'thows an exception if the `docinfo_target` exist but is not of type hash' do
503
+ let(:config) { base_config.merge 'docinfo' => true, "docinfo_target" => 'metadata_with_string' }
504
+ let(:do_register) { false }
505
+
506
+ it 'raises an exception if the `docinfo_target` exist but is not of type hash' do
512
507
  expect(client).not_to receive(:clear_scroll)
513
508
  plugin.register
514
509
  expect { plugin.run([]) }.to raise_error(Exception, /incompatible event/)
515
510
  end
516
- end
517
511
 
518
- it 'should move the document information to the specified field' do
519
- config = %q[
520
- input {
521
- elasticsearch {
522
- hosts => ["localhost"]
523
- query => '{ "query": { "match": { "city_name": "Okinawa" } }, "fields": ["message"] }'
524
- docinfo => true
525
- docinfo_target => 'meta'
526
- }
527
- }
528
- ]
529
- event = input(config) do |pipeline, queue|
530
- queue.pop
531
- end
532
-
533
- expect(event.get("[meta][_index]")).to eq('logstash-2014.10.12')
534
- expect(event.get("[meta][_type]")).to eq('logs')
535
- expect(event.get("[meta][_id]")).to eq('C5b2xLQwTZa76jBmHIbwHQ')
536
512
  end
537
513
 
538
- it "allows to specify which fields from the document info to save to metadata" do
539
- fields = ["_index"]
540
- config = %Q[
541
- input {
542
- elasticsearch {
543
- hosts => ["localhost"]
544
- query => '{ "query": { "match": { "city_name": "Okinawa" } }, "fields": ["message"] }'
545
- docinfo => true
546
- docinfo_fields => #{fields}
547
- }
548
- }]
514
+ context 'with docinfo_fields' do
515
+ let(:config) { base_config.merge 'docinfo' => true, "docinfo_fields" => ["_index"] }
549
516
 
550
- event = input(config) do |pipeline, queue|
551
- queue.pop
517
+ it "allows to specify which fields from the document info to save to metadata" do
518
+ meta_base = event.get(ecs_select.active_mode == :disabled ? "@metadata" : "[@metadata][input][elasticsearch]")
519
+ expect(meta_base.keys).to eql ["_index"]
552
520
  end
553
521
 
554
- meta_base = event.get(ecs_select.active_mode == :disabled ? "@metadata" : "[@metadata][input][elasticsearch]")
555
- expect(meta_base.keys).to eq(fields)
556
522
  end
557
523
 
558
- it 'should be able to reference metadata fields in `add_field` decorations' do
559
- config = %q[
560
- input {
561
- elasticsearch {
562
- hosts => ["localhost"]
563
- query => '{ "query": { "match": { "city_name": "Okinawa" } }, "fields": ["message"] }'
564
- docinfo => true
565
- add_field => {
566
- 'identifier' => "foo:%{[@metadata][_type]}:%{[@metadata][_id]}"
567
- }
568
- }
569
- }
570
- ]
524
+ context 'add_field' do
525
+ let(:config) { base_config.merge 'docinfo' => true,
526
+ 'add_field' => { 'identifier' => "foo:%{[@metadata][_type]}:%{[@metadata][_id]}" } }
571
527
 
572
- event = input(config) do |pipeline, queue|
573
- queue.pop
574
- end
528
+ it 'should be able to reference metadata fields in `add_field` decorations' do
529
+ expect(event.get('identifier')).to eq('foo:logs:C5b2xLQwTZa76jBmHIbwHQ')
530
+ end if ecs_select.active_mode == :disabled
575
531
 
576
- expect(event.get('identifier')).to eq('foo:logs:C5b2xLQwTZa76jBmHIbwHQ')
577
- end if ecs_select.active_mode == :disabled
532
+ end
578
533
 
579
534
  end
580
535
 
581
- end
536
+ context "when not defining the docinfo" do
537
+ let(:config) { base_config }
582
538
 
583
- context "when not defining the docinfo" do
584
- it 'should keep the document information in the root of the event' do
585
- config = %q[
586
- input {
587
- elasticsearch {
588
- hosts => ["localhost"]
589
- query => '{ "query": { "match": { "city_name": "Okinawa" } }, "fields": ["message"] }'
590
- }
591
- }
592
- ]
593
- event = input(config) do |pipeline, queue|
594
- queue.pop
539
+ it 'should keep the document information in the root of the event' do
540
+ expect(event.get("[@metadata]")).to be_empty
595
541
  end
596
-
597
- expect(event.get("[@metadata]")).to be_empty
598
542
  end
543
+
599
544
  end
600
545
  end
601
546
 
@@ -740,9 +685,7 @@ describe LogStash::Inputs::Elasticsearch, :ecs_compatibility_support do
740
685
  begin
741
686
  @server = WEBrick::HTTPServer.new :Port => 0, :DocumentRoot => ".",
742
687
  :Logger => Cabin::Channel.get, # silence WEBrick logging
743
- :StartCallback => Proc.new {
744
- queue.push("started")
745
- }
688
+ :StartCallback => Proc.new { queue.push("started") }
746
689
  @port = @server.config[:Port]
747
690
  @server.mount_proc '/' do |req, res|
748
691
  res.body = '''
@@ -811,11 +754,9 @@ describe LogStash::Inputs::Elasticsearch, :ecs_compatibility_support do
811
754
  @first_req_waiter.countDown()
812
755
  end
813
756
 
814
-
815
-
816
757
  @server.start
817
758
  rescue => e
818
- puts "Error in webserver thread #{e}"
759
+ warn "ERROR in webserver thread #{e.inspect}\n #{e.backtrace.join("\n ")}"
819
760
  # ignore
820
761
  end
821
762
  end
@@ -914,6 +855,8 @@ describe LogStash::Inputs::Elasticsearch, :ecs_compatibility_support do
914
855
 
915
856
  plugin.register
916
857
  end
858
+
859
+ after { plugin.do_stop }
917
860
  end
918
861
  end
919
862
 
@@ -933,7 +876,7 @@ describe LogStash::Inputs::Elasticsearch, :ecs_compatibility_support do
933
876
  {
934
877
  "hosts" => ["localhost"],
935
878
  "query" => '{ "query": { "match": { "city_name": "Okinawa" } }, "fields": ["message"] }',
936
- "schedule" => "* * * * * UTC"
879
+ "schedule" => "* * * * * * UTC" # every second
937
880
  }
938
881
  end
939
882
 
@@ -942,21 +885,17 @@ describe LogStash::Inputs::Elasticsearch, :ecs_compatibility_support do
942
885
  end
943
886
 
944
887
  it "should properly schedule" do
945
- Timecop.travel(Time.new(2000))
946
- Timecop.scale(60)
947
- runner = Thread.new do
888
+ begin
948
889
  expect(plugin).to receive(:do_run) {
949
890
  queue << LogStash::Event.new({})
950
891
  }.at_least(:twice)
951
-
952
- plugin.run(queue)
892
+ runner = Thread.start { plugin.run(queue) }
893
+ sleep 3.0
894
+ ensure
895
+ plugin.do_stop
896
+ runner.join if runner
953
897
  end
954
- sleep 3
955
- plugin.stop
956
- runner.kill
957
- runner.join
958
- expect(queue.size).to eq(2)
959
- Timecop.return
898
+ expect(queue.size).to be >= 2
960
899
  end
961
900
 
962
901
  end
@@ -6,29 +6,46 @@ require_relative "../../../spec/es_helper"
6
6
 
7
7
  describe LogStash::Inputs::Elasticsearch do
8
8
 
9
- let(:config) { { 'hosts' => [ESHelper.get_host_port],
9
+ SECURE_INTEGRATION = ENV['SECURE_INTEGRATION'].eql? 'true'
10
+
11
+ let(:config) { { 'hosts' => ["http#{SECURE_INTEGRATION ? 's' : nil}://#{ESHelper.get_host_port}"],
10
12
  'index' => 'logs',
11
13
  'query' => '{ "query": { "match": { "message": "Not found"} }}' } }
14
+
12
15
  let(:plugin) { described_class.new(config) }
13
16
  let(:event) { LogStash::Event.new({}) }
14
17
  let(:client_options) { Hash.new }
15
18
 
19
+ let(:user) { ENV['ELASTIC_USER'] || 'simpleuser' }
20
+ let(:password) { ENV['ELASTIC_PASSWORD'] || 'abc123' }
21
+ let(:ca_file) { "spec/fixtures/test_certs/ca.crt" }
22
+
23
+ let(:es_url) do
24
+ es_url = ESHelper.get_host_port
25
+ SECURE_INTEGRATION ? "https://#{es_url}" : "http://#{es_url}"
26
+ end
27
+
28
+ let(:curl_args) do
29
+ config['user'] ? "-u #{config['user']}:#{config['password']}" : ''
30
+ end
31
+
16
32
  before(:each) do
17
- @es = ESHelper.get_client(client_options)
18
33
  # Delete all templates first.
19
34
  # Clean ES of data before we start.
20
- @es.indices.delete_template(:name => "*")
35
+ ESHelper.curl_and_get_json_response "#{es_url}/_index_template/*", method: 'DELETE', args: curl_args
21
36
  # This can fail if there are no indexes, ignore failure.
22
- @es.indices.delete(:index => "*") rescue nil
37
+ ESHelper.curl_and_get_json_response( "#{es_url}/_index/*", method: 'DELETE', args: curl_args) rescue nil
38
+ doc_args = "#{curl_args} -H 'Content-Type: application/json' -d '{\"response\": 404, \"message\":\"Not Found\"}'"
23
39
  10.times do
24
- ESHelper.index_doc(@es, :index => 'logs', :body => { :response => 404, :message=> 'Not Found'})
40
+ ESHelper.curl_and_get_json_response "#{es_url}/logs/_doc", method: 'POST', args: doc_args
25
41
  end
26
- @es.indices.refresh
42
+ ESHelper.curl_and_get_json_response "#{es_url}/_refresh", method: 'POST', args: curl_args
27
43
  end
28
44
 
29
45
  after(:each) do
30
- @es.indices.delete_template(:name => "*")
31
- @es.indices.delete(:index => "*") rescue nil
46
+ ESHelper.curl_and_get_json_response "#{es_url}/_index_template/*", method: 'DELETE', args: curl_args
47
+ # This can fail if there are no indexes, ignore failure.
48
+ ESHelper.curl_and_get_json_response( "#{es_url}/_index/*", method: 'DELETE', args: curl_args) rescue nil
32
49
  end
33
50
 
34
51
  shared_examples 'an elasticsearch index plugin' do
@@ -45,7 +62,7 @@ describe LogStash::Inputs::Elasticsearch do
45
62
  end
46
63
  end
47
64
 
48
- describe 'against an unsecured elasticsearch', :integration => true do
65
+ describe 'against an unsecured elasticsearch', integration: true do
49
66
  before(:each) do
50
67
  plugin.register
51
68
  end
@@ -53,29 +70,70 @@ describe LogStash::Inputs::Elasticsearch do
53
70
  it_behaves_like 'an elasticsearch index plugin'
54
71
  end
55
72
 
56
- describe 'against a secured elasticsearch', :secure_integration => true do
57
- let(:user) { ENV['ELASTIC_USER'] || 'simpleuser' }
58
- let(:password) { ENV['ELASTIC_PASSWORD'] || 'abc123' }
59
- let(:ca_file) { "spec/fixtures/test_certs/ca.crt" }
73
+ describe 'against a secured elasticsearch', secure_integration: true do
60
74
 
75
+ # client_options is for an out-of-band helper
61
76
  let(:client_options) { { :ca_file => ca_file, :user => user, :password => password } }
62
77
 
63
- let(:config) { super().merge('user' => user, 'password' => password, 'ssl' => true, 'ca_file' => ca_file) }
78
+ let(:config) { super().merge('user' => user, 'password' => password) }
64
79
 
65
- it_behaves_like 'an elasticsearch index plugin'
80
+ shared_examples 'secured_elasticsearch' do
81
+ it_behaves_like 'an elasticsearch index plugin'
82
+
83
+ context "incorrect auth credentials" do
66
84
 
67
- context "incorrect auth credentials" do
85
+ let(:config) do
86
+ super().merge('user' => 'archer', 'password' => 'b0gus!')
87
+ end
68
88
 
69
- let(:config) do
70
- super().merge('user' => 'archer', 'password' => 'b0gus!')
89
+ let(:queue) { [] }
90
+
91
+ it "fails to run the plugin" do
92
+ expect { plugin.register }.to raise_error Elasticsearch::Transport::Transport::Errors::Unauthorized
93
+ end
71
94
  end
95
+ end
72
96
 
73
- let(:queue) { [] }
97
+ context 'with ca_file' do
98
+ let(:config) { super().merge('ssl' => true, 'ca_file' => ca_file) }
99
+ it_behaves_like 'secured_elasticsearch'
100
+ end
101
+
102
+ context 'with `ca_trusted_fingerprint`' do
103
+ let(:ca_trusted_fingerprint) { File.read("spec/fixtures/test_certs/ca.der.sha256").chomp }
104
+ let(:config) { super().merge('ssl' => true, 'ca_trusted_fingerprint' => ca_trusted_fingerprint) }
74
105
 
75
- it "fails to run the plugin" do
76
- expect { plugin.register }.to raise_error Elasticsearch::Transport::Transport::Errors::Unauthorized
106
+ if Gem::Version.create(LOGSTASH_VERSION) >= Gem::Version.create("8.3.0")
107
+ it_behaves_like 'secured_elasticsearch'
108
+ else
109
+ it 'raises a configuration error' do
110
+ expect { plugin }.to raise_exception(LogStash::ConfigurationError, a_string_including("ca_trusted_fingerprint"))
111
+ end
77
112
  end
78
113
  end
114
+ end
115
+
116
+ context 'setting host:port', integration: true do
117
+
118
+ let(:config) do
119
+ super().merge "hosts" => [ESHelper.get_host_port]
120
+ end
121
+
122
+ it_behaves_like 'an elasticsearch index plugin'
79
123
 
80
124
  end
125
+
126
+ context 'setting host:port (and ssl)', secure_integration: true do
127
+
128
+ let(:client_options) { { :ca_file => ca_file, :user => user, :password => password } }
129
+
130
+ let(:config) do
131
+ config = super().merge "hosts" => [ESHelper.get_host_port]
132
+ config.merge('user' => user, 'password' => password, 'ssl' => true, 'ca_file' => ca_file)
133
+ end
134
+
135
+ it_behaves_like 'an elasticsearch index plugin'
136
+
137
+ end
138
+
81
139
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-input-elasticsearch
3
3
  version: !ruby/object:Gem::Version
4
- version: 4.12.1
4
+ version: 4.13.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Elastic
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2021-10-15 00:00:00.000000000 Z
11
+ date: 2022-05-24 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement
@@ -77,7 +77,7 @@ dependencies:
77
77
  requirements:
78
78
  - - ">="
79
79
  - !ruby/object:Gem::Version
80
- version: 7.14.0
80
+ version: 7.17.1
81
81
  name: elasticsearch
82
82
  prerelease: false
83
83
  type: :runtime
@@ -85,7 +85,21 @@ dependencies:
85
85
  requirements:
86
86
  - - ">="
87
87
  - !ruby/object:Gem::Version
88
- version: 7.14.0
88
+ version: 7.17.1
89
+ - !ruby/object:Gem::Dependency
90
+ requirement: !ruby/object:Gem::Requirement
91
+ requirements:
92
+ - - "~>"
93
+ - !ruby/object:Gem::Version
94
+ version: '1.0'
95
+ name: logstash-mixin-ca_trusted_fingerprint_support
96
+ prerelease: false
97
+ type: :runtime
98
+ version_requirements: !ruby/object:Gem::Requirement
99
+ requirements:
100
+ - - "~>"
101
+ - !ruby/object:Gem::Version
102
+ version: '1.0'
89
103
  - !ruby/object:Gem::Dependency
90
104
  requirement: !ruby/object:Gem::Requirement
91
105
  requirements:
@@ -156,20 +170,6 @@ dependencies:
156
170
  - - ">="
157
171
  - !ruby/object:Gem::Version
158
172
  version: '0'
159
- - !ruby/object:Gem::Dependency
160
- requirement: !ruby/object:Gem::Requirement
161
- requirements:
162
- - - "~>"
163
- - !ruby/object:Gem::Version
164
- version: '1'
165
- name: faraday
166
- prerelease: false
167
- type: :development
168
- version_requirements: !ruby/object:Gem::Requirement
169
- requirements:
170
- - - "~>"
171
- - !ruby/object:Gem::Version
172
- version: '1'
173
173
  - !ruby/object:Gem::Dependency
174
174
  requirement: !ruby/object:Gem::Requirement
175
175
  requirements:
@@ -226,6 +226,20 @@ dependencies:
226
226
  - - ">="
227
227
  - !ruby/object:Gem::Version
228
228
  version: '0'
229
+ - !ruby/object:Gem::Dependency
230
+ requirement: !ruby/object:Gem::Requirement
231
+ requirements:
232
+ - - "~>"
233
+ - !ruby/object:Gem::Version
234
+ version: 3.0.9
235
+ name: rufus-scheduler
236
+ prerelease: false
237
+ type: :development
238
+ version_requirements: !ruby/object:Gem::Requirement
239
+ requirements:
240
+ - - "~>"
241
+ - !ruby/object:Gem::Version
242
+ version: 3.0.9
229
243
  description: This gem is a Logstash plugin required to be installed on top of the
230
244
  Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This
231
245
  gem is not a stand-alone program
@@ -247,6 +261,7 @@ files:
247
261
  - logstash-input-elasticsearch.gemspec
248
262
  - spec/es_helper.rb
249
263
  - spec/fixtures/test_certs/ca.crt
264
+ - spec/fixtures/test_certs/ca.der.sha256
250
265
  - spec/fixtures/test_certs/ca.key
251
266
  - spec/fixtures/test_certs/es.crt
252
267
  - spec/fixtures/test_certs/es.key
@@ -280,6 +295,7 @@ summary: Reads query results from an Elasticsearch cluster
280
295
  test_files:
281
296
  - spec/es_helper.rb
282
297
  - spec/fixtures/test_certs/ca.crt
298
+ - spec/fixtures/test_certs/ca.der.sha256
283
299
  - spec/fixtures/test_certs/ca.key
284
300
  - spec/fixtures/test_certs/es.crt
285
301
  - spec/fixtures/test_certs/es.key