logstash-output-elasticsearch 0.1.18-java → 0.1.19-java
Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 88af0f35fb188226ed8a3af96835a57aef17cde6
|
4
|
+
data.tar.gz: 4e826e7a0dc993a21ca18a894d59fc2a73670f3d
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 542687e54823d84d895ac468a484ba80f76ff7f31909e424d26f065db1d5b167c20950f7003b6722b39ef9d6eb6aed9a546c5366ef1d249c5daf0b1fc992180b
|
7
|
+
data.tar.gz: f86b66b834d212bbd43ad77b48c273e1e960ff57c216c3b1129edfc1591d3377e660753d084f262cfc391a3999c1b8ae380a7a1cb42460fccb4109c20e384f7c
|
@@ -194,11 +194,11 @@ class LogStash::Outputs::ElasticSearch < LogStash::Outputs::Base
|
|
194
194
|
# For more details on actions, check out the http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/docs-bulk.html[Elasticsearch bulk API documentation]
|
195
195
|
config :action, :validate => :string, :default => "index"
|
196
196
|
|
197
|
-
# Username and password (HTTP
|
197
|
+
# Username and password (only valid when protocol is HTTP; this setting works with HTTP or HTTPS auth)
|
198
198
|
config :user, :validate => :string
|
199
199
|
config :password, :validate => :password
|
200
200
|
|
201
|
-
# SSL Configurations (HTTP
|
201
|
+
# SSL Configurations (only valid when protocol is HTTP)
|
202
202
|
#
|
203
203
|
# Enable SSL
|
204
204
|
config :ssl, :validate => :boolean, :default => false
|
@@ -425,7 +425,7 @@ class LogStash::Outputs::ElasticSearch < LogStash::Outputs::Base
|
|
425
425
|
@logger.warn "failed action with response of #{resp_code}, dropping action: #{action}"
|
426
426
|
end
|
427
427
|
end
|
428
|
-
retry_push(actions_to_retry)
|
428
|
+
retry_push(actions_to_retry) unless actions_to_retry.empty?
|
429
429
|
end
|
430
430
|
end
|
431
431
|
|
@@ -78,6 +78,24 @@ module LogStash::Outputs::Elasticsearch
|
|
78
78
|
Elasticsearch::Client.new client_options
|
79
79
|
end
|
80
80
|
|
81
|
+
def self.normalize_bulk_response(bulk_response)
|
82
|
+
if bulk_response["errors"]
|
83
|
+
# The structure of the response from the REST Bulk API is follows:
|
84
|
+
# {"took"=>74, "errors"=>true, "items"=>[{"create"=>{"_index"=>"logstash-2014.11.17",
|
85
|
+
# "_type"=>"logs",
|
86
|
+
# "_id"=>"AUxTS2C55Jrgi-hC6rQF",
|
87
|
+
# "_version"=>1,
|
88
|
+
# "status"=>400,
|
89
|
+
# "error"=>"MapperParsingException[failed to parse]..."}}]}
|
90
|
+
# where each `item` is a hash of {OPTYPE => Hash[]}. calling first, will retrieve
|
91
|
+
# this hash as a single array with two elements, where the value is the second element (i.first[1])
|
92
|
+
# then the status of that item is retrieved.
|
93
|
+
{"errors" => true, "statuses" => bulk_response["items"].map { |i| i.first[1]['status'] }}
|
94
|
+
else
|
95
|
+
{"errors" => false}
|
96
|
+
end
|
97
|
+
end
|
98
|
+
|
81
99
|
def bulk(actions)
|
82
100
|
bulk_response = @client.bulk(:body => actions.collect do |action, args, source|
|
83
101
|
if source
|
@@ -86,11 +104,8 @@ module LogStash::Outputs::Elasticsearch
|
|
86
104
|
next { action => args }
|
87
105
|
end
|
88
106
|
end.flatten)
|
89
|
-
|
90
|
-
|
91
|
-
else
|
92
|
-
return {"errors" => false}
|
93
|
-
end
|
107
|
+
|
108
|
+
self.class.normalize_bulk_response(bulk_response)
|
94
109
|
end # def bulk
|
95
110
|
|
96
111
|
def template_exists?(name)
|
@@ -187,6 +202,16 @@ module LogStash::Outputs::Elasticsearch
|
|
187
202
|
return nodebuilder.settings(@settings).node.client
|
188
203
|
end # def build_client
|
189
204
|
|
205
|
+
def self.normalize_bulk_response(bulk_response)
|
206
|
+
# TODO(talevy): parse item response objects to retrieve correct 200 (OK) or 201(created) status codes
|
207
|
+
if bulk_response.has_failures()
|
208
|
+
{"errors" => true,
|
209
|
+
"statuses" => bulk_response.map { |i| (i.is_failed && i.get_failure.get_status.get_status) || 200 }}
|
210
|
+
else
|
211
|
+
{"errors" => false}
|
212
|
+
end
|
213
|
+
end
|
214
|
+
|
190
215
|
def bulk(actions)
|
191
216
|
# Actions an array of [ action, action_metadata, source ]
|
192
217
|
prep = @client.prepareBulk
|
@@ -195,14 +220,7 @@ module LogStash::Outputs::Elasticsearch
|
|
195
220
|
end
|
196
221
|
response = prep.execute.actionGet()
|
197
222
|
|
198
|
-
|
199
|
-
return {"errors" => true,
|
200
|
-
"statuses" => response.map { |i| (i.is_failed && i.get_failure.get_status.get_status) || 200 }}
|
201
|
-
else
|
202
|
-
return {"errors" => false}
|
203
|
-
end
|
204
|
-
# returns 200 for all successful actions, represents 201 & 200
|
205
|
-
# TODO(talevy): parse item response objects to retrieve correct 200 (OK) or 201(created) status codes
|
223
|
+
self.class.normalize_bulk_response(response)
|
206
224
|
end # def bulk
|
207
225
|
|
208
226
|
def build_request(action, args, source)
|
@@ -0,0 +1,52 @@
|
|
1
|
+
require "logstash/devutils/rspec/spec_helper"
|
2
|
+
require "logstash/outputs/elasticsearch/protocol"
|
3
|
+
require "java"
|
4
|
+
|
5
|
+
describe LogStash::Outputs::Elasticsearch::Protocols::NodeClient do
|
6
|
+
context "successful" do
|
7
|
+
it "should map correctly" do
|
8
|
+
index_response = org.elasticsearch.action.index.IndexResponse.new("my_index", "my_type", "my_id", 123, true)
|
9
|
+
delete_response = org.elasticsearch.action.delete.DeleteResponse.new("my_index", "my_type", "my_id", 123, true)
|
10
|
+
bulk_item_response_index = org.elasticsearch.action.bulk.BulkItemResponse.new(32, "index", index_response)
|
11
|
+
bulk_item_response_delete = org.elasticsearch.action.bulk.BulkItemResponse.new(32, "delete", delete_response)
|
12
|
+
bulk_response = org.elasticsearch.action.bulk.BulkResponse.new([bulk_item_response_index, bulk_item_response_delete], 0)
|
13
|
+
ret = LogStash::Outputs::Elasticsearch::Protocols::NodeClient.normalize_bulk_response(bulk_response)
|
14
|
+
insist { ret } == {"errors" => false}
|
15
|
+
end
|
16
|
+
end
|
17
|
+
|
18
|
+
context "contains failures" do
|
19
|
+
it "should map correctly" do
|
20
|
+
failure = org.elasticsearch.action.bulk.BulkItemResponse::Failure.new("my_index", "my_type", "my_id", "error message", org.elasticsearch.rest.RestStatus::BAD_REQUEST)
|
21
|
+
bulk_item_response_index = org.elasticsearch.action.bulk.BulkItemResponse.new(32, "index", failure)
|
22
|
+
bulk_item_response_delete = org.elasticsearch.action.bulk.BulkItemResponse.new(32, "delete", failure)
|
23
|
+
bulk_response = org.elasticsearch.action.bulk.BulkResponse.new([bulk_item_response_index, bulk_item_response_delete], 0)
|
24
|
+
actual = LogStash::Outputs::Elasticsearch::Protocols::NodeClient.normalize_bulk_response(bulk_response)
|
25
|
+
insist { actual } == {"errors" => true, "statuses" => [400, 400]}
|
26
|
+
end
|
27
|
+
end
|
28
|
+
end
|
29
|
+
|
30
|
+
describe LogStash::Outputs::Elasticsearch::Protocols::HTTPClient do
|
31
|
+
context "successful" do
|
32
|
+
it "should map correctly" do
|
33
|
+
bulk_response = {"took"=>74, "errors"=>false, "items"=>[{"create"=>{"_index"=>"logstash-2014.11.17",
|
34
|
+
"_type"=>"logs", "_id"=>"AUxTS2C55Jrgi-hC6rQF",
|
35
|
+
"_version"=>1, "status"=>201}}]}
|
36
|
+
actual = LogStash::Outputs::Elasticsearch::Protocols::HTTPClient.normalize_bulk_response(bulk_response)
|
37
|
+
insist { actual } == {"errors"=> false}
|
38
|
+
end
|
39
|
+
end
|
40
|
+
|
41
|
+
context "contains failures" do
|
42
|
+
it "should map correctly" do
|
43
|
+
bulk_response = {"took"=>71, "errors"=>true,
|
44
|
+
"items"=>[{"create"=>{"_index"=>"logstash-2014.11.17",
|
45
|
+
"_type"=>"logs", "_id"=>"AUxTQ_OI5Jrgi-hC6rQB", "status"=>400,
|
46
|
+
"error"=>"MapperParsingException[failed to parse]..."}}]}
|
47
|
+
actual = LogStash::Outputs::Elasticsearch::Protocols::HTTPClient.normalize_bulk_response(bulk_response)
|
48
|
+
insist { actual } == {"errors"=> true, "statuses"=> [400]}
|
49
|
+
end
|
50
|
+
end
|
51
|
+
end
|
52
|
+
|
@@ -478,6 +478,7 @@ describe "outputs/elasticsearch" do
|
|
478
478
|
let(:action1) { ["index", {:_id=>nil, :_index=>"logstash-2014.11.17", :_type=>"logs"}, event1] }
|
479
479
|
let(:event2) { LogStash::Event.new("geoip" => { "location" => [ 0.0, 0.0] }, "@timestamp" => "2014-11-17T20:37:17.223Z", "@metadata" => {"retry_count" => 0}) }
|
480
480
|
let(:action2) { ["index", {:_id=>nil, :_index=>"logstash-2014.11.17", :_type=>"logs"}, event2] }
|
481
|
+
let(:invalid_event) { LogStash::Event.new("geoip" => { "location" => "notlatlon" }, "@timestamp" => "2014-11-17T20:37:17.223Z") }
|
481
482
|
let(:max_retries) { 3 }
|
482
483
|
|
483
484
|
def mock_actions_with_response(*resp)
|
@@ -581,6 +582,32 @@ describe "outputs/elasticsearch" do
|
|
581
582
|
subject.buffer_flush(:final => true)
|
582
583
|
sleep(3)
|
583
584
|
end
|
585
|
+
|
586
|
+
it "non-retryable errors like mapping errors (400) should be dropped and not be retried (unfortunetly)" do
|
587
|
+
subject.register
|
588
|
+
subject.receive(invalid_event)
|
589
|
+
expect(subject).not_to receive(:retry_push)
|
590
|
+
subject.buffer_flush(:final => true)
|
591
|
+
|
592
|
+
@es.indices.refresh
|
593
|
+
Stud::try(10.times) do
|
594
|
+
r = @es.search
|
595
|
+
insist { r["hits"]["total"] } == 0
|
596
|
+
end
|
597
|
+
end
|
598
|
+
|
599
|
+
it "successful requests should not be appended to retry queue" do
|
600
|
+
subject.register
|
601
|
+
subject.receive(event1)
|
602
|
+
expect(subject).not_to receive(:retry_push)
|
603
|
+
subject.buffer_flush(:final => true)
|
604
|
+
|
605
|
+
@es.indices.refresh
|
606
|
+
Stud::try(10.times) do
|
607
|
+
r = @es.search
|
608
|
+
insist { r["hits"]["total"] } == 1
|
609
|
+
end
|
610
|
+
end
|
584
611
|
end
|
585
612
|
end
|
586
613
|
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-output-elasticsearch
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.1.
|
4
|
+
version: 0.1.19
|
5
5
|
platform: java
|
6
6
|
authors:
|
7
7
|
- Elasticsearch
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2015-03-
|
11
|
+
date: 2015-03-26 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: concurrent-ruby
|
@@ -212,6 +212,7 @@ files:
|
|
212
212
|
- lib/logstash/outputs/elasticsearch/elasticsearch-template.json
|
213
213
|
- lib/logstash/outputs/elasticsearch/protocol.rb
|
214
214
|
- logstash-output-elasticsearch.gemspec
|
215
|
+
- spec/outputs/elasticsearch/protocol_spec.rb
|
215
216
|
- spec/outputs/elasticsearch_spec.rb
|
216
217
|
homepage: http://logstash.net/
|
217
218
|
licenses:
|
@@ -241,4 +242,5 @@ signing_key:
|
|
241
242
|
specification_version: 4
|
242
243
|
summary: Logstash Output to Elasticsearch
|
243
244
|
test_files:
|
245
|
+
- spec/outputs/elasticsearch/protocol_spec.rb
|
244
246
|
- spec/outputs/elasticsearch_spec.rb
|