logstash-output-elasticsearch 6.2.3-java → 6.2.4-java
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +4 -0
- data/README.md +3 -7
- data/lib/logstash/outputs/elasticsearch/http_client_builder.rb +0 -14
- data/logstash-output-elasticsearch.gemspec +1 -1
- data/spec/fixtures/htpasswd +2 -0
- data/spec/fixtures/nginx_reverse_proxy.conf +22 -0
- data/spec/integration/outputs/index_spec.rb +97 -47
- metadata +6 -4
- data/spec/integration/outputs/secure_spec.rb +0 -104
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: c2ae1de8b2ef04b483cc7ea7d90a9abe2d23493b
|
4
|
+
data.tar.gz: ccbbb21f0a3c56a03f7a4c209d1cdbe80aa8706d
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 0b894a22dc7344fa44b9dd8f3a14f3575be12e31c982ef1424f915094bfb69d7378492b17b8bc6357a0b8fce2379a8691b30a6294ce5de7fc2b04ff404f7f217
|
7
|
+
data.tar.gz: a888417183f7851d31b0cfc567bb0c0c0d8285c518224c715cfd82db96d3e4b2c2bf5c1026f49a82d91ba0223a8f4aa4d61acb0f83cff809673db8e461525dea
|
data/CHANGELOG.md
CHANGED
@@ -1,3 +1,7 @@
|
|
1
|
+
## 6.2.4
|
2
|
+
- Fix bug where using escaped characters in the password field would attempt to show a warning but instead crash.
|
3
|
+
The warning was also not necessary since escaped characters never worked there before.
|
4
|
+
|
1
5
|
## 6.2.3
|
2
6
|
- Fixed a bug introduced in 6.2.2 where passwords needing escapes were not actually sent to ES properly
|
3
7
|
encoded.
|
data/README.md
CHANGED
@@ -47,14 +47,10 @@ bundle exec rspec
|
|
47
47
|
|
48
48
|
- Run integration tests
|
49
49
|
|
50
|
-
Dependencies: [Docker](http://docker.com)
|
51
|
-
|
52
|
-
Before the test suite is run, we will load and run an
|
53
|
-
Elasticsearch instance within a docker container. This container
|
54
|
-
will be cleaned up when suite has finished.
|
55
|
-
|
56
50
|
```sh
|
57
|
-
|
51
|
+
export INTEGRATION=true
|
52
|
+
export ES_VERSION=5.1.1
|
53
|
+
./travis-run.sh
|
58
54
|
```
|
59
55
|
|
60
56
|
### 2. Running your unpublished Plugin in Logstash
|
@@ -124,20 +124,6 @@ module LogStash; module Outputs; class ElasticSearch;
|
|
124
124
|
unsafe_password = password && password.value
|
125
125
|
unsafe_escaped_password = unsafe_password ? CGI.escape(unsafe_password) : nil
|
126
126
|
|
127
|
-
# TODO: Remove this when we release LS6.0.0
|
128
|
-
if unsafe_password =~ /%[0-9A-Fa-f]{2}/
|
129
|
-
m <<-EOM
|
130
|
-
The Elasticsearch output was provided a password that looks like it includes URL encoded characters.
|
131
|
-
Previous versions of this plugin had a bug that required a workaround where users needed to manually
|
132
|
-
URL encode special characters in the password field. Given this, URL encoded strings will
|
133
|
-
be doubly escaped making authentication fail. This may not apply to you.
|
134
|
-
If your password just happens to include string parts that simply look
|
135
|
-
like URL encoded strings like '%2F' but are in fact just a part of your
|
136
|
-
password then you can safely ignore this message.
|
137
|
-
EOM
|
138
|
-
@logger.warn(m)
|
139
|
-
end
|
140
|
-
|
141
127
|
return {} unless user && unsafe_escaped_password
|
142
128
|
|
143
129
|
{
|
@@ -1,7 +1,7 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
|
3
3
|
s.name = 'logstash-output-elasticsearch'
|
4
|
-
s.version = '6.2.
|
4
|
+
s.version = '6.2.4'
|
5
5
|
s.licenses = ['apache-2.0']
|
6
6
|
s.summary = "Logstash Output to Elasticsearch"
|
7
7
|
s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
|
@@ -0,0 +1,22 @@
|
|
1
|
+
worker_processes 1;
|
2
|
+
daemon off; # run in foreground
|
3
|
+
|
4
|
+
events {
|
5
|
+
worker_connections 1024;
|
6
|
+
}
|
7
|
+
|
8
|
+
http {
|
9
|
+
server {
|
10
|
+
listen 9900 default_server;
|
11
|
+
ssl on;
|
12
|
+
ssl_certificate server.crt;
|
13
|
+
ssl_certificate_key server.key;
|
14
|
+
client_max_body_size 200m;
|
15
|
+
|
16
|
+
location / {
|
17
|
+
proxy_pass http://localhost:9200;
|
18
|
+
auth_basic "Restricted Content";
|
19
|
+
auth_basic_user_file htpasswd;
|
20
|
+
}
|
21
|
+
}
|
22
|
+
}
|
@@ -1,47 +1,6 @@
|
|
1
1
|
require_relative "../../../spec/es_spec_helper"
|
2
2
|
require "logstash/outputs/elasticsearch"
|
3
3
|
|
4
|
-
shared_examples "an indexer" do
|
5
|
-
let(:event) { LogStash::Event.new("message" => "Hello World!", "type" => type) }
|
6
|
-
let(:index) { 10.times.collect { rand(10).to_s }.join("") }
|
7
|
-
let(:type) { 10.times.collect { rand(10).to_s }.join("") }
|
8
|
-
let(:event_count) { 10000 + rand(500) }
|
9
|
-
let(:config) { "not implemented" }
|
10
|
-
let(:events) { event_count.times.map { event }.to_a }
|
11
|
-
subject { LogStash::Outputs::ElasticSearch.new(config) }
|
12
|
-
|
13
|
-
before do
|
14
|
-
subject.register
|
15
|
-
end
|
16
|
-
|
17
|
-
it "ships events" do
|
18
|
-
subject.multi_receive(events)
|
19
|
-
index_url = "http://#{get_host_port}/#{index}"
|
20
|
-
|
21
|
-
ftw = FTW::Agent.new
|
22
|
-
ftw.post!("#{index_url}/_refresh")
|
23
|
-
|
24
|
-
# Wait until all events are available.
|
25
|
-
Stud::try(10.times) do
|
26
|
-
data = ""
|
27
|
-
response = ftw.get!("#{index_url}/_count?q=*")
|
28
|
-
response.read_body { |chunk| data << chunk }
|
29
|
-
result = LogStash::Json.load(data)
|
30
|
-
cur_count = result["count"]
|
31
|
-
insist { cur_count } == event_count
|
32
|
-
end
|
33
|
-
|
34
|
-
response = ftw.get!("#{index_url}/_search?q=*&size=1000")
|
35
|
-
data = ""
|
36
|
-
response.read_body { |chunk| data << chunk }
|
37
|
-
result = LogStash::Json.load(data)
|
38
|
-
result["hits"]["hits"].each do |doc|
|
39
|
-
insist { doc["_type"] } == type
|
40
|
-
insist { doc["_index"] } == index
|
41
|
-
end
|
42
|
-
end
|
43
|
-
end
|
44
|
-
|
45
4
|
describe "TARGET_BULK_BYTES", :integration => true do
|
46
5
|
let(:target_bulk_bytes) { LogStash::Outputs::ElasticSearch::TARGET_BULK_BYTES }
|
47
6
|
let(:event_count) { 1000 }
|
@@ -56,7 +15,6 @@ describe "TARGET_BULK_BYTES", :integration => true do
|
|
56
15
|
let(:type) { 10.times.collect { rand(10).to_s }.join("") }
|
57
16
|
subject { LogStash::Outputs::ElasticSearch.new(config) }
|
58
17
|
|
59
|
-
|
60
18
|
before do
|
61
19
|
subject.register
|
62
20
|
allow(subject.client).to receive(:bulk_send).with(any_args).and_call_original
|
@@ -86,20 +44,57 @@ describe "TARGET_BULK_BYTES", :integration => true do
|
|
86
44
|
end
|
87
45
|
end
|
88
46
|
|
47
|
+
describe "indexing" do
|
48
|
+
let(:event) { LogStash::Event.new("message" => "Hello World!", "type" => type) }
|
49
|
+
let(:index) { 10.times.collect { rand(10).to_s }.join("") }
|
50
|
+
let(:type) { 10.times.collect { rand(10).to_s }.join("") }
|
51
|
+
let(:event_count) { 10000 + rand(500) }
|
52
|
+
let(:config) { "not implemented" }
|
53
|
+
let(:events) { event_count.times.map { event }.to_a }
|
54
|
+
subject { LogStash::Outputs::ElasticSearch.new(config) }
|
55
|
+
|
56
|
+
let(:es_url) { "http://#{get_host_port}" }
|
57
|
+
let(:index_url) {"#{es_url}/#{index}"}
|
58
|
+
let(:http_client_options) { {} }
|
59
|
+
let(:http_client) do
|
60
|
+
Manticore::Client.new(http_client_options)
|
61
|
+
end
|
89
62
|
|
90
|
-
|
91
|
-
|
63
|
+
before do
|
64
|
+
subject.register
|
65
|
+
end
|
66
|
+
|
67
|
+
shared_examples "an indexer" do
|
68
|
+
it "ships events" do
|
69
|
+
subject.multi_receive(events)
|
70
|
+
|
71
|
+
http_client.post("#{es_url}/_refresh").call
|
72
|
+
|
73
|
+
response = http_client.get("#{index_url}/_count?q=*")
|
74
|
+
result = LogStash::Json.load(response.body)
|
75
|
+
cur_count = result["count"]
|
76
|
+
expect(cur_count).to eq(event_count)
|
77
|
+
|
78
|
+
response = http_client.get("#{index_url}/_search?q=*&size=1000")
|
79
|
+
result = LogStash::Json.load(response.body)
|
80
|
+
result["hits"]["hits"].each do |doc|
|
81
|
+
expect(doc["_type"]).to eq(type)
|
82
|
+
expect(doc["_index"]).to eq(index)
|
83
|
+
end
|
84
|
+
end
|
85
|
+
end
|
86
|
+
|
87
|
+
describe "an indexer with custom index_type", :integration => true do
|
92
88
|
let(:config) {
|
93
89
|
{
|
94
90
|
"hosts" => get_host_port,
|
95
91
|
"index" => index
|
96
92
|
}
|
97
93
|
}
|
94
|
+
it_behaves_like("an indexer")
|
98
95
|
end
|
99
|
-
end
|
100
96
|
|
101
|
-
describe "an indexer with no type value set (default to logs)", :integration => true do
|
102
|
-
it_behaves_like "an indexer" do
|
97
|
+
describe "an indexer with no type value set (default to logs)", :integration => true do
|
103
98
|
let(:type) { "logs" }
|
104
99
|
let(:config) {
|
105
100
|
{
|
@@ -107,5 +102,60 @@ describe "an indexer with no type value set (default to logs)", :integration =>
|
|
107
102
|
"index" => index
|
108
103
|
}
|
109
104
|
}
|
105
|
+
it_behaves_like("an indexer")
|
106
|
+
end
|
107
|
+
|
108
|
+
describe "a secured indexer", :integration => true do
|
109
|
+
let(:user) { "simpleuser" }
|
110
|
+
let(:password) { "abc123" }
|
111
|
+
let(:cacert) { "spec/fixtures/server.crt" }
|
112
|
+
let(:es_url) {"https://localhost:9900"}
|
113
|
+
let(:config) do
|
114
|
+
{
|
115
|
+
"hosts" => ["localhost:9900"],
|
116
|
+
"user" => user,
|
117
|
+
"password" => password,
|
118
|
+
"ssl" => true,
|
119
|
+
"cacert" => "spec/fixtures/server.crt",
|
120
|
+
"index" => index
|
121
|
+
}
|
122
|
+
end
|
123
|
+
let(:http_client_options) do
|
124
|
+
{
|
125
|
+
:auth => {
|
126
|
+
:user => user,
|
127
|
+
:password => password
|
128
|
+
},
|
129
|
+
:ssl => {
|
130
|
+
:enabled => true,
|
131
|
+
:ca_file => cacert
|
132
|
+
}
|
133
|
+
}
|
134
|
+
end
|
135
|
+
it_behaves_like("an indexer")
|
136
|
+
|
137
|
+
describe "with a password requiring escaping" do
|
138
|
+
let(:user) { "fancyuser" }
|
139
|
+
let(:password) { "ab%12#" }
|
140
|
+
|
141
|
+
include_examples("an indexer")
|
142
|
+
end
|
143
|
+
|
144
|
+
describe "with a password requiring escaping in the URL" do
|
145
|
+
let(:config) do
|
146
|
+
{
|
147
|
+
"hosts" => ["https://#{user}:#{CGI.escape(password)}@localhost:9900"],
|
148
|
+
"ssl" => true,
|
149
|
+
"cacert" => "spec/fixtures/server.crt",
|
150
|
+
"index" => index
|
151
|
+
}
|
152
|
+
end
|
153
|
+
|
154
|
+
begin
|
155
|
+
include_examples("an indexer")
|
156
|
+
rescue => e
|
157
|
+
require 'pry'; binding.pry
|
158
|
+
end
|
159
|
+
end
|
110
160
|
end
|
111
161
|
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-output-elasticsearch
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 6.2.
|
4
|
+
version: 6.2.4
|
5
5
|
platform: java
|
6
6
|
authors:
|
7
7
|
- Elastic
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2017-01-
|
11
|
+
date: 2017-01-21 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|
@@ -206,6 +206,8 @@ files:
|
|
206
206
|
- lib/logstash/outputs/elasticsearch/template_manager.rb
|
207
207
|
- logstash-output-elasticsearch.gemspec
|
208
208
|
- spec/es_spec_helper.rb
|
209
|
+
- spec/fixtures/htpasswd
|
210
|
+
- spec/fixtures/nginx_reverse_proxy.conf
|
209
211
|
- spec/fixtures/scripts/scripted_update.groovy
|
210
212
|
- spec/fixtures/scripts/scripted_update_nested.groovy
|
211
213
|
- spec/fixtures/scripts/scripted_upsert.groovy
|
@@ -217,7 +219,6 @@ files:
|
|
217
219
|
- spec/integration/outputs/pipeline_spec.rb
|
218
220
|
- spec/integration/outputs/retry_spec.rb
|
219
221
|
- spec/integration/outputs/routing_spec.rb
|
220
|
-
- spec/integration/outputs/secure_spec.rb
|
221
222
|
- spec/integration/outputs/sniffer_spec.rb
|
222
223
|
- spec/integration/outputs/templates_5x_spec.rb
|
223
224
|
- spec/integration/outputs/templates_spec.rb
|
@@ -259,6 +260,8 @@ specification_version: 4
|
|
259
260
|
summary: Logstash Output to Elasticsearch
|
260
261
|
test_files:
|
261
262
|
- spec/es_spec_helper.rb
|
263
|
+
- spec/fixtures/htpasswd
|
264
|
+
- spec/fixtures/nginx_reverse_proxy.conf
|
262
265
|
- spec/fixtures/scripts/scripted_update.groovy
|
263
266
|
- spec/fixtures/scripts/scripted_update_nested.groovy
|
264
267
|
- spec/fixtures/scripts/scripted_upsert.groovy
|
@@ -270,7 +273,6 @@ test_files:
|
|
270
273
|
- spec/integration/outputs/pipeline_spec.rb
|
271
274
|
- spec/integration/outputs/retry_spec.rb
|
272
275
|
- spec/integration/outputs/routing_spec.rb
|
273
|
-
- spec/integration/outputs/secure_spec.rb
|
274
276
|
- spec/integration/outputs/sniffer_spec.rb
|
275
277
|
- spec/integration/outputs/templates_5x_spec.rb
|
276
278
|
- spec/integration/outputs/templates_spec.rb
|
@@ -1,104 +0,0 @@
|
|
1
|
-
require_relative "../../../spec/es_spec_helper"
|
2
|
-
|
3
|
-
describe "send messages to ElasticSearch using HTTPS", :elasticsearch_secure => true do
|
4
|
-
subject do
|
5
|
-
require "logstash/outputs/elasticsearch"
|
6
|
-
settings = {
|
7
|
-
"node_name" => "logstash",
|
8
|
-
"cluster" => "elasticsearch",
|
9
|
-
"hosts" => "node01",
|
10
|
-
"user" => "user",
|
11
|
-
"password" => "changeme",
|
12
|
-
"ssl" => true,
|
13
|
-
"cacert" => "/tmp/ca/certs/cacert.pem",
|
14
|
-
# or
|
15
|
-
#"truststore" => "/tmp/ca/truststore.jks",
|
16
|
-
#"truststore_password" => "testeteste"
|
17
|
-
}
|
18
|
-
next LogStash::Outputs::ElasticSearch.new(settings)
|
19
|
-
end
|
20
|
-
|
21
|
-
before :each do
|
22
|
-
subject.register
|
23
|
-
end
|
24
|
-
|
25
|
-
it "sends events to ES" do
|
26
|
-
expect {
|
27
|
-
subject.multi_receive([LogStash::Event.new("message" => "sample message here")])
|
28
|
-
}.to_not raise_error
|
29
|
-
end
|
30
|
-
end
|
31
|
-
|
32
|
-
describe "connect using HTTP Authentication", :elasticsearch_secure => true do
|
33
|
-
subject do
|
34
|
-
require "logstash/outputs/elasticsearch"
|
35
|
-
settings = {
|
36
|
-
"cluster" => "elasticsearch",
|
37
|
-
"hosts" => "node01",
|
38
|
-
"user" => "user",
|
39
|
-
"password" => "changeme",
|
40
|
-
}
|
41
|
-
next LogStash::Outputs::ElasticSearch.new(settings)
|
42
|
-
end
|
43
|
-
|
44
|
-
before :each do
|
45
|
-
subject.register
|
46
|
-
end
|
47
|
-
|
48
|
-
it "sends events to ES" do
|
49
|
-
expect {
|
50
|
-
subject.multi_receive([LogStash::Event.new("message" => "sample message here")])
|
51
|
-
}.to_not raise_error
|
52
|
-
end
|
53
|
-
end
|
54
|
-
|
55
|
-
describe "send messages to ElasticSearch using HTTPS", :elasticsearch_secure => true do
|
56
|
-
subject do
|
57
|
-
require "logstash/outputs/elasticsearch"
|
58
|
-
settings = {
|
59
|
-
"node_name" => "logstash",
|
60
|
-
"cluster" => "elasticsearch",
|
61
|
-
"hosts" => "node01",
|
62
|
-
"user" => "user",
|
63
|
-
"password" => "changeme",
|
64
|
-
"ssl" => true,
|
65
|
-
"cacert" => "/tmp/ca/certs/cacert.pem",
|
66
|
-
# or
|
67
|
-
#"truststore" => "/tmp/ca/truststore.jks",
|
68
|
-
#"truststore_password" => "testeteste"
|
69
|
-
}
|
70
|
-
next LogStash::Outputs::ElasticSearch.new(settings)
|
71
|
-
end
|
72
|
-
|
73
|
-
before :each do
|
74
|
-
subject.register
|
75
|
-
end
|
76
|
-
|
77
|
-
it "sends events to ES" do
|
78
|
-
expect {
|
79
|
-
subject.multi_receive([LogStash::Event.new("message" => "sample message here")])
|
80
|
-
}.to_not raise_error
|
81
|
-
end
|
82
|
-
end
|
83
|
-
|
84
|
-
describe "connect using HTTP Authentication", :elasticsearch_secure => true do
|
85
|
-
subject do
|
86
|
-
require "logstash/outputs/elasticsearch"
|
87
|
-
settings = {
|
88
|
-
"hosts" => "node01",
|
89
|
-
"user" => "user",
|
90
|
-
"password" => "changeme",
|
91
|
-
}
|
92
|
-
next LogStash::Outputs::ElasticSearch.new(settings)
|
93
|
-
end
|
94
|
-
|
95
|
-
before :each do
|
96
|
-
subject.register
|
97
|
-
end
|
98
|
-
|
99
|
-
it "sends events to ES" do
|
100
|
-
expect {
|
101
|
-
subject.multi_receive([LogStash::Event.new("message" => "sample message here")])
|
102
|
-
}.to_not raise_error
|
103
|
-
end
|
104
|
-
end
|