logstash-output-elasticsearch 7.4.1-java → 7.4.2-java
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +5 -0
- data/docs/index.asciidoc +23 -6
- data/lib/logstash/outputs/elasticsearch/common.rb +1 -1
- data/lib/logstash/outputs/elasticsearch/http_client/manticore_adapter.rb +2 -2
- data/logstash-output-elasticsearch.gemspec +1 -1
- data/spec/es_spec_helper.rb +1 -1
- data/spec/integration/outputs/groovy_update_spec.rb +1 -1
- data/spec/integration/outputs/painless_update_spec.rb +73 -44
- data/spec/integration/outputs/parent_spec.rb +119 -20
- data/spec/unit/outputs/elasticsearch/http_client/manticore_adapter_spec.rb +30 -5
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 770e27fd1c21bc02c120904d170fea45c7891fbd
|
4
|
+
data.tar.gz: 06db893104a68aaaa310efcfe5516bb3ac09b22a
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: de3a1cb1c10024d856ee02498cf3389c55c94c62e8ea42d76a5a830df8446448d772874a0e4568be5f16522b9a637c8a4b0bddd9b662c917f27805ed020243f5
|
7
|
+
data.tar.gz: 07402d951b56e246098da251d58cc64a51ea54e1d532f584569a0ed94543593b788906b48bd01279b38cfd9b7f0644acba106e2a41f1e835d957f9e2a1f804f0
|
data/CHANGELOG.md
CHANGED
data/docs/index.asciidoc
CHANGED
@@ -23,7 +23,7 @@ include::{include_path}/plugin_header.asciidoc[]
|
|
23
23
|
.Compatibility Note
|
24
24
|
[NOTE]
|
25
25
|
================================================================================
|
26
|
-
Starting with Elasticsearch 5.3, there's an {ref}modules-http.html[HTTP setting]
|
26
|
+
Starting with Elasticsearch 5.3, there's an {ref}/modules-http.html[HTTP setting]
|
27
27
|
called `http.content_type.required`. If this option is set to `true`, and you
|
28
28
|
are using Logstash 2.4 through 5.2, you need to update the Elasticsearch output
|
29
29
|
plugin to version 6.2.5 or higher.
|
@@ -41,14 +41,17 @@ to upgrade Logstash in lock-step.
|
|
41
41
|
You can learn more about Elasticsearch at <https://www.elastic.co/products/elasticsearch>
|
42
42
|
|
43
43
|
==== Template management for Elasticsearch 5.x
|
44
|
+
|
44
45
|
Index template for this version (Logstash 5.0) has been changed to reflect Elasticsearch's mapping changes in version 5.0.
|
45
46
|
Most importantly, the subfield for string multi-fields has changed from `.raw` to `.keyword` to match ES default
|
46
47
|
behavior.
|
47
48
|
|
48
|
-
**
|
49
|
+
**Users installing ES 5.x and LS 5.x**
|
50
|
+
|
49
51
|
This change will not affect you and you will continue to use the ES defaults.
|
50
52
|
|
51
|
-
**
|
53
|
+
**Users upgrading from LS 2.x to LS 5.x with ES 5.x**
|
54
|
+
|
52
55
|
LS will not force upgrade the template, if `logstash` template already exists. This means you will still use
|
53
56
|
`.raw` for sub-fields coming from 2.x. If you choose to use the new template, you will have to reindex your data after
|
54
57
|
the new template is installed.
|
@@ -63,13 +66,27 @@ request are handled differently than error codes for individual documents.
|
|
63
66
|
HTTP requests to the bulk API are expected to return a 200 response code. All other response codes are retried indefinitely.
|
64
67
|
|
65
68
|
The following document errors are handled as follows:
|
66
|
-
|
67
|
-
|
69
|
+
|
70
|
+
* 400 and 404 errors are sent to the dead letter queue (DLQ), if enabled. If a DLQ is not enabled, a log message will be emitted, and the event will be dropped. See <<dlq-policy>> for more info.
|
71
|
+
* 409 errors (conflict) are logged as a warning and dropped.
|
68
72
|
|
69
73
|
Note that 409 exceptions are no longer retried. Please set a higher `retry_on_conflict` value if you experience 409 exceptions.
|
70
74
|
It is more performant for Elasticsearch to retry these exceptions than this plugin.
|
71
75
|
|
72
|
-
|
76
|
+
[[dlq-policy]]
|
77
|
+
==== DLQ Policy
|
78
|
+
|
79
|
+
Mapping (404) errors from Elasticsearch can lead to data loss. Unfortunately
|
80
|
+
mapping errors cannot be handled without human intervention and without looking
|
81
|
+
at the field that caused the mapping mismatch. If the DLQ is enabled, the
|
82
|
+
original events causing the mapping errors are stored in a file that can be
|
83
|
+
processed at a later time. Often times, the offending field can be removed and
|
84
|
+
re-indexed to Elasticsearch. If the DLQ is not enabled, and a mapping error
|
85
|
+
happens, the problem is logged as a warning, and the event is dropped. See
|
86
|
+
<<dead-letter-queues>> for more information about processing events in the DLQ.
|
87
|
+
|
88
|
+
==== Batch Sizes
|
89
|
+
|
73
90
|
This plugin attempts to send batches of events as a single request. However, if
|
74
91
|
a request exceeds 20MB we will break it up until multiple batch requests. If a single document exceeds 20MB it will be sent as a single request.
|
75
92
|
|
@@ -247,7 +247,7 @@ module LogStash; module Outputs; class ElasticSearch;
|
|
247
247
|
retry unless @stopping.true?
|
248
248
|
rescue ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError => e
|
249
249
|
log_hash = {:code => e.response_code, :url => e.url.sanitized.to_s}
|
250
|
-
log_hash[:body] = e.
|
250
|
+
log_hash[:body] = e.response_body if @logger.debug? # Generally this is too verbose
|
251
251
|
message = "Encountered a retryable error. Will Retry with exponential backoff "
|
252
252
|
|
253
253
|
# We treat 429s as a special case because these really aren't errors, but
|
@@ -66,7 +66,7 @@ module LogStash; module Outputs; class ElasticSearch; class HttpClient;
|
|
66
66
|
|
67
67
|
request_uri = format_url(url, path)
|
68
68
|
|
69
|
-
resp = @manticore.send(method.downcase, request_uri, params)
|
69
|
+
resp = @manticore.send(method.downcase, request_uri.to_s, params)
|
70
70
|
|
71
71
|
# Manticore returns lazy responses by default
|
72
72
|
# We want to block for our usage, this will wait for the repsonse
|
@@ -106,7 +106,7 @@ module LogStash; module Outputs; class ElasticSearch; class HttpClient;
|
|
106
106
|
|
107
107
|
request_uri.path = "#{request_uri.path}/#{parsed_path_and_query.path}".gsub(/\/{2,}/, "/")
|
108
108
|
|
109
|
-
request_uri
|
109
|
+
request_uri
|
110
110
|
end
|
111
111
|
|
112
112
|
def close
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
s.name = 'logstash-output-elasticsearch'
|
3
|
-
s.version = '7.4.
|
3
|
+
s.version = '7.4.2'
|
4
4
|
s.licenses = ['apache-2.0']
|
5
5
|
s.summary = "Logstash Output to Elasticsearch"
|
6
6
|
s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
|
data/spec/es_spec_helper.rb
CHANGED
@@ -11,7 +11,7 @@ module ESHelper
|
|
11
11
|
Elasticsearch::Client.new(:hosts => [get_host_port])
|
12
12
|
end
|
13
13
|
|
14
|
-
def self.es_version_satisfies?(requirement)
|
14
|
+
def self.es_version_satisfies?(*requirement)
|
15
15
|
es_version = RSpec.configuration.filter[:es_version] || ENV['ES_VERSION']
|
16
16
|
es_release_version = Gem::Version.new(es_version).release
|
17
17
|
Gem::Requirement.new(requirement).satisfied_by?(es_release_version)
|
@@ -1,6 +1,6 @@
|
|
1
1
|
require_relative "../../../spec/es_spec_helper"
|
2
2
|
|
3
|
-
if ESHelper.es_version_satisfies?(
|
3
|
+
if ESHelper.es_version_satisfies?('>= 2', '< 6')
|
4
4
|
describe "Update actions using groovy scripts", :integration => true, :update_tests => 'groovy' do
|
5
5
|
require "logstash/outputs/elasticsearch"
|
6
6
|
|
@@ -10,9 +10,11 @@ if ESHelper.es_version_satisfies?(">= 5")
|
|
10
10
|
"index" => "logstash-update",
|
11
11
|
"template_overwrite" => true,
|
12
12
|
"hosts" => get_host_port(),
|
13
|
-
"action" => "update"
|
14
|
-
"script_lang" => "painless"
|
13
|
+
"action" => "update"
|
15
14
|
}
|
15
|
+
if ESHelper.es_version_satisfies?('<6')
|
16
|
+
settings.merge!({"script_lang" => "painless"})
|
17
|
+
end
|
16
18
|
LogStash::Outputs::ElasticSearch.new(settings.merge!(options))
|
17
19
|
end
|
18
20
|
|
@@ -33,27 +35,30 @@ if ESHelper.es_version_satisfies?(">= 5")
|
|
33
35
|
end
|
34
36
|
|
35
37
|
context "scripted updates" do
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
38
|
+
if ESHelper.es_version_satisfies?('<6')
|
39
|
+
context 'with file based scripts' do
|
40
|
+
it "should increment a counter with event/doc 'count' variable" do
|
41
|
+
subject = get_es_output({ 'document_id' => "123", 'script' => 'scripted_update', 'script_type' => 'file' })
|
42
|
+
subject.register
|
43
|
+
subject.multi_receive([LogStash::Event.new("count" => 2)])
|
44
|
+
r = @es.get(:index => 'logstash-update', :type => 'logs', :id => "123", :refresh => true)
|
45
|
+
insist { r["_source"]["counter"] } == 3
|
46
|
+
end
|
43
47
|
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
48
|
+
it "should increment a counter with event/doc '[data][count]' nested variable" do
|
49
|
+
subject = get_es_output({ 'document_id' => "123", 'script' => 'scripted_update_nested', 'script_type' => 'file' })
|
50
|
+
subject.register
|
51
|
+
subject.multi_receive([LogStash::Event.new("data" => { "count" => 3 })])
|
52
|
+
r = @es.get(:index => 'logstash-update', :type => 'logs', :id => "123", :refresh => true)
|
53
|
+
insist { r["_source"]["counter"] } == 4
|
54
|
+
end
|
55
|
+
end
|
50
56
|
end
|
51
57
|
|
52
58
|
it "should increment a counter with event/doc 'count' variable with inline script" do
|
53
59
|
subject = get_es_output({
|
54
60
|
'document_id' => "123",
|
55
61
|
'script' => 'ctx._source.counter += params.event.counter',
|
56
|
-
'script_lang' => 'painless',
|
57
62
|
'script_type' => 'inline'
|
58
63
|
})
|
59
64
|
subject.register
|
@@ -67,7 +72,6 @@ if ESHelper.es_version_satisfies?(">= 5")
|
|
67
72
|
'document_id' => "123",
|
68
73
|
'doc_as_upsert' => true,
|
69
74
|
'script' => 'if( ctx._source.containsKey("counter") ){ ctx._source.counter += params.event.counter; } else { ctx._source.counter = params.event.counter; }',
|
70
|
-
'script_lang' => 'painless',
|
71
75
|
'script_type' => 'inline'
|
72
76
|
})
|
73
77
|
subject.register
|
@@ -81,7 +85,6 @@ if ESHelper.es_version_satisfies?(">= 5")
|
|
81
85
|
'document_id' => "456",
|
82
86
|
'doc_as_upsert' => true,
|
83
87
|
'script' => 'if( ctx._source.containsKey("counter") ){ ctx._source.counter += params.event.counter; } else { ctx._source.counter = params.event.counter; }',
|
84
|
-
'script_lang' => 'painless',
|
85
88
|
'script_type' => 'inline'
|
86
89
|
})
|
87
90
|
subject.register
|
@@ -90,20 +93,23 @@ if ESHelper.es_version_satisfies?(">= 5")
|
|
90
93
|
insist { r["_source"]["counter"] } == 3
|
91
94
|
end
|
92
95
|
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
|
96
|
+
if ESHelper.es_version_satisfies?('<6')
|
97
|
+
context 'with an indexed script' do
|
98
|
+
it "should increment a counter with event/doc 'count' variable with indexed script" do
|
99
|
+
@es.put_script lang: 'painless', id: 'indexed_update', body: { script: 'ctx._source.counter += params.event.count' }
|
100
|
+
subject = get_es_output({
|
101
|
+
'document_id' => "123",
|
102
|
+
'script' => 'indexed_update',
|
103
|
+
'script_type' => 'indexed'
|
104
|
+
})
|
105
|
+
subject.register
|
106
|
+
subject.multi_receive([LogStash::Event.new("count" => 4 )])
|
107
|
+
r = @es.get(:index => 'logstash-update', :type => 'logs', :id => "123", :refresh => true)
|
108
|
+
insist { r["_source"]["counter"] } == 5
|
109
|
+
end
|
110
|
+
end
|
105
111
|
end
|
106
|
-
|
112
|
+
end
|
107
113
|
|
108
114
|
context "when update with upsert" do
|
109
115
|
it "should create new documents with provided upsert" do
|
@@ -129,21 +135,44 @@ if ESHelper.es_version_satisfies?(">= 5")
|
|
129
135
|
end
|
130
136
|
|
131
137
|
context "updates with scripted upsert" do
|
132
|
-
|
133
|
-
|
134
|
-
|
135
|
-
|
136
|
-
|
137
|
-
|
138
|
+
if ESHelper.es_version_satisfies?('<6')
|
139
|
+
context 'with file based scripts' do
|
140
|
+
it "should create new documents with upsert content" do
|
141
|
+
subject = get_es_output({ 'document_id' => "456", 'script' => 'scripted_update', 'upsert' => '{"message": "upsert message"}', 'script_type' => 'file' })
|
142
|
+
subject.register
|
143
|
+
subject.multi_receive([LogStash::Event.new("message" => "sample message here")])
|
144
|
+
r = @es.get(:index => 'logstash-update', :type => 'logs', :id => "456", :refresh => true)
|
145
|
+
insist { r["_source"]["message"] } == 'upsert message'
|
146
|
+
end
|
147
|
+
|
148
|
+
it "should create new documents with event/doc as script params" do
|
149
|
+
subject = get_es_output({ 'document_id' => "456", 'script' => 'scripted_upsert', 'scripted_upsert' => true, 'script_type' => 'file' })
|
150
|
+
subject.register
|
151
|
+
subject.multi_receive([LogStash::Event.new("counter" => 1)])
|
152
|
+
@es.indices.refresh
|
153
|
+
r = @es.get(:index => 'logstash-update', :type => 'logs', :id => "456", :refresh => true)
|
154
|
+
insist { r["_source"]["counter"] } == 1
|
155
|
+
end
|
156
|
+
end
|
138
157
|
end
|
139
158
|
|
140
|
-
|
141
|
-
|
142
|
-
|
143
|
-
|
144
|
-
|
145
|
-
|
146
|
-
|
159
|
+
context 'with an inline script' do
|
160
|
+
it "should create new documents with upsert content" do
|
161
|
+
subject = get_es_output({ 'document_id' => "456", 'script' => 'ctx._source.counter = params.event.counter', 'upsert' => '{"message": "upsert message"}', 'script_type' => 'inline' })
|
162
|
+
subject.register
|
163
|
+
subject.multi_receive([LogStash::Event.new("message" => "sample message here")])
|
164
|
+
r = @es.get(:index => 'logstash-update', :type => 'logs', :id => "456", :refresh => true)
|
165
|
+
insist { r["_source"]["message"] } == 'upsert message'
|
166
|
+
end
|
167
|
+
|
168
|
+
it "should create new documents with event/doc as script params" do
|
169
|
+
subject = get_es_output({ 'document_id' => "456", 'script' => 'ctx._source.counter = params.event.counter', 'scripted_upsert' => true, 'script_type' => 'inline' })
|
170
|
+
subject.register
|
171
|
+
subject.multi_receive([LogStash::Event.new("counter" => 1)])
|
172
|
+
@es.indices.refresh
|
173
|
+
r = @es.get(:index => 'logstash-update', :type => 'logs', :id => "456", :refresh => true)
|
174
|
+
insist { r["_source"]["counter"] } == 1
|
175
|
+
end
|
147
176
|
end
|
148
177
|
end
|
149
178
|
end
|
@@ -1,8 +1,9 @@
|
|
1
1
|
require_relative "../../../spec/es_spec_helper"
|
2
2
|
require "logstash/outputs/elasticsearch"
|
3
3
|
|
4
|
-
if ESHelper.es_version_satisfies?("<= 5")
|
5
|
-
|
4
|
+
if ESHelper.es_version_satisfies?("<= 5.x")
|
5
|
+
context "when using elasticsearch 5.x and before", :integration => true do
|
6
|
+
shared_examples "a type based parent indexer" do
|
6
7
|
let(:index) { 10.times.collect { rand(10).to_s }.join("") }
|
7
8
|
let(:type) { 10.times.collect { rand(10).to_s }.join("") }
|
8
9
|
let(:event_count) { 10000 + rand(500) }
|
@@ -44,30 +45,128 @@ if ESHelper.es_version_satisfies?("<= 5")
|
|
44
45
|
insist { cur_count } == event_count
|
45
46
|
end
|
46
47
|
end
|
47
|
-
|
48
|
+
end
|
48
49
|
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
50
|
+
describe "(http protocol) index events with static parent" do
|
51
|
+
it_behaves_like 'a type based parent indexer' do
|
52
|
+
let(:parent) { "test" }
|
53
|
+
let(:config) {
|
54
|
+
{
|
55
|
+
"hosts" => get_host_port,
|
56
|
+
"index" => index,
|
57
|
+
"parent" => parent
|
58
|
+
}
|
57
59
|
}
|
58
|
-
|
60
|
+
end
|
59
61
|
end
|
60
|
-
end
|
61
62
|
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
63
|
+
describe "(http_protocol) index events with fieldref in parent value" do
|
64
|
+
it_behaves_like 'a type based parent indexer' do
|
65
|
+
let(:config) {
|
66
|
+
{
|
67
|
+
"hosts" => get_host_port,
|
68
|
+
"index" => index,
|
69
|
+
"parent" => "%{link_to}"
|
70
|
+
}
|
69
71
|
}
|
72
|
+
end
|
73
|
+
end
|
74
|
+
end
|
75
|
+
end
|
76
|
+
|
77
|
+
if ESHelper.es_version_satisfies?(">= 5.6")
|
78
|
+
context "when using elasticsearch 5.6 and above", :integration => true do
|
79
|
+
|
80
|
+
shared_examples "a join field based parent indexer" do
|
81
|
+
let(:index) { 10.times.collect { rand(10).to_s }.join("") }
|
82
|
+
let(:type) { 10.times.collect { rand(10).to_s }.join("") }
|
83
|
+
let(:event_count) { 10000 + rand(500) }
|
84
|
+
let(:parent) { "not_implemented" }
|
85
|
+
let(:config) { "not_implemented" }
|
86
|
+
let(:parent_id) { "test" }
|
87
|
+
let(:join_field) { "join_field" }
|
88
|
+
let(:parent_relation) { "parent_type" }
|
89
|
+
let(:child_relation) { "child_type" }
|
90
|
+
let(:default_headers) {
|
91
|
+
{"Content-Type" => "application/json"}
|
70
92
|
}
|
93
|
+
subject { LogStash::Outputs::ElasticSearch.new(config) }
|
94
|
+
|
95
|
+
before do
|
96
|
+
# Add mapping and a parent document
|
97
|
+
index_url = "http://#{get_host_port()}/#{index}"
|
98
|
+
mapping = {
|
99
|
+
"mappings" => {
|
100
|
+
type => {
|
101
|
+
"properties" => {
|
102
|
+
join_field => {
|
103
|
+
"type" => "join",
|
104
|
+
"relations" => { parent_relation => child_relation }
|
105
|
+
}
|
106
|
+
}
|
107
|
+
}
|
108
|
+
}
|
109
|
+
}
|
110
|
+
if ESHelper.es_version_satisfies?('<6')
|
111
|
+
mapping.merge!({
|
112
|
+
"settings" => {
|
113
|
+
"mapping.single_type" => true
|
114
|
+
}})
|
115
|
+
end
|
116
|
+
Manticore.put("#{index_url}", {:body => mapping.to_json, :headers => default_headers}).call
|
117
|
+
pdoc = { "message" => "ohayo", join_field => parent_relation }
|
118
|
+
Manticore.put("#{index_url}/#{type}/#{parent_id}", {:body => pdoc.to_json, :headers => default_headers}).call
|
119
|
+
|
120
|
+
subject.register
|
121
|
+
subject.multi_receive(event_count.times.map { LogStash::Event.new("link_to" => parent_id, "message" => "Hello World!", join_field => child_relation) })
|
122
|
+
end
|
123
|
+
|
124
|
+
|
125
|
+
it "ships events" do
|
126
|
+
index_url = "http://#{get_host_port()}/#{index}"
|
127
|
+
|
128
|
+
Manticore.post("#{index_url}/_refresh").call
|
129
|
+
|
130
|
+
# Wait until all events are available.
|
131
|
+
Stud::try(10.times) do
|
132
|
+
query = { "query" => { "has_parent" => { "parent_type" => parent_relation, "query" => { "match_all" => { } } } } }
|
133
|
+
response = Manticore.post("#{index_url}/_count", {:body => query.to_json, :headers => default_headers})
|
134
|
+
data = response.body
|
135
|
+
result = LogStash::Json.load(data)
|
136
|
+
cur_count = result["count"]
|
137
|
+
insist { cur_count } == event_count
|
138
|
+
end
|
139
|
+
end
|
140
|
+
end
|
141
|
+
|
142
|
+
describe "(http protocol) index events with static parent" do
|
143
|
+
it_behaves_like 'a join field based parent indexer' do
|
144
|
+
let(:config) {
|
145
|
+
{
|
146
|
+
"hosts" => get_host_port,
|
147
|
+
"index" => index,
|
148
|
+
"parent" => parent_id,
|
149
|
+
"document_type" => type,
|
150
|
+
"join_field" => join_field,
|
151
|
+
"manage_template" => false
|
152
|
+
}
|
153
|
+
}
|
154
|
+
end
|
155
|
+
end
|
156
|
+
|
157
|
+
describe "(http_protocol) index events with fieldref in parent value" do
|
158
|
+
it_behaves_like 'a join field based parent indexer' do
|
159
|
+
let(:config) {
|
160
|
+
{
|
161
|
+
"hosts" => get_host_port,
|
162
|
+
"index" => index,
|
163
|
+
"parent" => "%{link_to}",
|
164
|
+
"document_type" => type,
|
165
|
+
"join_field" => join_field,
|
166
|
+
"manage_template" => false
|
167
|
+
}
|
168
|
+
}
|
169
|
+
end
|
71
170
|
end
|
72
171
|
end
|
73
172
|
end
|
@@ -44,19 +44,44 @@ describe LogStash::Outputs::ElasticSearch::HttpClient::ManticoreAdapter do
|
|
44
44
|
end
|
45
45
|
end
|
46
46
|
|
47
|
+
describe "bad response codes" do
|
48
|
+
let(:uri) { ::LogStash::Util::SafeURI.new("http://localhost:9200") }
|
49
|
+
|
50
|
+
it "should raise a bad response code error" do
|
51
|
+
resp = double("response")
|
52
|
+
allow(resp).to receive(:call)
|
53
|
+
allow(resp).to receive(:code).and_return(500)
|
54
|
+
allow(resp).to receive(:body).and_return("a body")
|
55
|
+
|
56
|
+
expect(subject.manticore).to receive(:get).
|
57
|
+
with(uri.to_s + "/", anything).
|
58
|
+
and_return(resp)
|
59
|
+
|
60
|
+
uri_with_path = uri.clone
|
61
|
+
uri_with_path.path = "/"
|
62
|
+
|
63
|
+
expect(::LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError).to receive(:new).
|
64
|
+
with(resp.code, uri_with_path, nil, resp.body).and_call_original
|
65
|
+
|
66
|
+
expect do
|
67
|
+
subject.perform_request(uri, :get, "/")
|
68
|
+
end.to raise_error(::LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError)
|
69
|
+
end
|
70
|
+
end
|
71
|
+
|
47
72
|
describe "format_url" do
|
48
73
|
let(:url) { ::LogStash::Util::SafeURI.new("http://localhost:9200/path/") }
|
49
74
|
let(:path) { "_bulk" }
|
50
75
|
subject { described_class.new(double("logger"), {}) }
|
51
76
|
|
52
77
|
it "should add the path argument to the uri's path" do
|
53
|
-
expect(
|
78
|
+
expect(subject.format_url(url, path).path).to eq("/path/_bulk")
|
54
79
|
end
|
55
80
|
|
56
81
|
context "when uri contains query parameters" do
|
57
82
|
let(:query_params) { "query=value&key=value2" }
|
58
83
|
let(:url) { ::LogStash::Util::SafeURI.new("http://localhost:9200/path/?#{query_params}") }
|
59
|
-
let(:formatted) {
|
84
|
+
let(:formatted) { subject.format_url(url, path)}
|
60
85
|
|
61
86
|
it "should retain query_params after format" do
|
62
87
|
expect(formatted.query).to eq(query_params)
|
@@ -73,7 +98,7 @@ describe LogStash::Outputs::ElasticSearch::HttpClient::ManticoreAdapter do
|
|
73
98
|
|
74
99
|
context "when the path contains query parameters" do
|
75
100
|
let(:path) { "/special_bulk?pathParam=1"}
|
76
|
-
let(:formatted) {
|
101
|
+
let(:formatted) { subject.format_url(url, path) }
|
77
102
|
|
78
103
|
it "should add the path correctly" do
|
79
104
|
expect(formatted.path).to eq("#{url.path}special_bulk")
|
@@ -86,10 +111,10 @@ describe LogStash::Outputs::ElasticSearch::HttpClient::ManticoreAdapter do
|
|
86
111
|
|
87
112
|
context "when uri contains credentials" do
|
88
113
|
let(:url) { ::LogStash::Util::SafeURI.new("http://myuser:mypass@localhost:9200") }
|
89
|
-
let(:formatted) {
|
114
|
+
let(:formatted) { subject.format_url(url, path) }
|
90
115
|
|
91
116
|
it "should remove credentials after format" do
|
92
|
-
expect(formatted.
|
117
|
+
expect(formatted.userinfo).to be_nil
|
93
118
|
end
|
94
119
|
end
|
95
120
|
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-output-elasticsearch
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 7.4.
|
4
|
+
version: 7.4.2
|
5
5
|
platform: java
|
6
6
|
authors:
|
7
7
|
- Elastic
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2017-09-
|
11
|
+
date: 2017-09-22 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|