logstash-output-elasticsearch_java 1.0.0.beta1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (43) hide show
  1. checksums.yaml +7 -0
  2. data/.gitignore +5 -0
  3. data/CHANGELOG.md +2 -0
  4. data/CONTRIBUTORS +31 -0
  5. data/Gemfile +3 -0
  6. data/LICENSE +13 -0
  7. data/NOTICE.TXT +5 -0
  8. data/README.md +98 -0
  9. data/Rakefile +1 -0
  10. data/lib/logstash/outputs/elasticsearch_java/elasticsearch-template.json +41 -0
  11. data/lib/logstash/outputs/elasticsearch_java/protocol.rb +258 -0
  12. data/lib/logstash/outputs/elasticsearch_java.rb +545 -0
  13. data/lib/logstash-output-elasticsearch_java_jars.rb +5 -0
  14. data/logstash-output-elasticsearch_java.gemspec +32 -0
  15. data/spec/es_spec_helper.rb +81 -0
  16. data/spec/integration/outputs/elasticsearch/node_spec.rb +36 -0
  17. data/spec/integration/outputs/index_spec.rb +90 -0
  18. data/spec/integration/outputs/retry_spec.rb +148 -0
  19. data/spec/integration/outputs/routing_spec.rb +60 -0
  20. data/spec/integration/outputs/secure_spec.rb +113 -0
  21. data/spec/integration/outputs/templates_spec.rb +97 -0
  22. data/spec/integration/outputs/transport_create_spec.rb +94 -0
  23. data/spec/integration/outputs/update_spec.rb +88 -0
  24. data/spec/unit/outputs/elasticsearch/protocol_spec.rb +32 -0
  25. data/spec/unit/outputs/elasticsearch_spec.rb +79 -0
  26. data/vendor/jar-dependencies/runtime-jars/antlr-runtime-3.5.jar +0 -0
  27. data/vendor/jar-dependencies/runtime-jars/asm-4.1.jar +0 -0
  28. data/vendor/jar-dependencies/runtime-jars/asm-commons-4.1.jar +0 -0
  29. data/vendor/jar-dependencies/runtime-jars/elasticsearch-1.7.0.jar +0 -0
  30. data/vendor/jar-dependencies/runtime-jars/lucene-analyzers-common-4.10.4.jar +0 -0
  31. data/vendor/jar-dependencies/runtime-jars/lucene-core-4.10.4.jar +0 -0
  32. data/vendor/jar-dependencies/runtime-jars/lucene-grouping-4.10.4.jar +0 -0
  33. data/vendor/jar-dependencies/runtime-jars/lucene-highlighter-4.10.4.jar +0 -0
  34. data/vendor/jar-dependencies/runtime-jars/lucene-join-4.10.4.jar +0 -0
  35. data/vendor/jar-dependencies/runtime-jars/lucene-memory-4.10.4.jar +0 -0
  36. data/vendor/jar-dependencies/runtime-jars/lucene-misc-4.10.4.jar +0 -0
  37. data/vendor/jar-dependencies/runtime-jars/lucene-queries-4.10.4.jar +0 -0
  38. data/vendor/jar-dependencies/runtime-jars/lucene-queryparser-4.10.4.jar +0 -0
  39. data/vendor/jar-dependencies/runtime-jars/lucene-sandbox-4.10.4.jar +0 -0
  40. data/vendor/jar-dependencies/runtime-jars/lucene-spatial-4.10.4.jar +0 -0
  41. data/vendor/jar-dependencies/runtime-jars/lucene-suggest-4.10.4.jar +0 -0
  42. data/vendor/jar-dependencies/runtime-jars/spatial4j-0.4.1.jar +0 -0
  43. metadata +241 -0
@@ -0,0 +1,36 @@
1
+ require_relative "../../../../spec/es_spec_helper"
2
+ require "logstash/outputs/elasticsearch_java/protocol"
3
+
4
+ describe "elasticsearch node client", :integration => true do
5
+ # Test ElasticSearch Node Client
6
+ # Reference: http://www.elasticsearch.org/guide/reference/modules/discovery/zen/
7
+
8
+ subject { LogStash::Outputs::ElasticSearchJavaPlugins::Protocols::NodeClient }
9
+
10
+ it "should support hosts in both string and array" do
11
+ # Because we defined *hosts* method in NodeClient as private,
12
+ # we use *obj.send :method,[args...]* to call method *hosts*
13
+
14
+ # Node client should support host in string
15
+ # Case 1: default :hosts in string
16
+ insist { subject.send :hosts, :hosts => "host",:port => 9300 } == "host:9300"
17
+ # Case 2: :port =~ /^\d+_\d+$/
18
+ insist { subject.send :hosts, :hosts => "host",:port => "9300-9302"} == "host:9300,host:9301,host:9302"
19
+ # Case 3: :hosts =~ /^.+:.+$/
20
+ insist { subject.send :hosts, :hosts=> "host:9303",:port => 9300 } == "host:9303"
21
+ # Case 4: :hosts=~ /^.+:.+$/ and :port =~ /^\d+_\d+$/
22
+ insist { subject.send :hosts, :hosts => "host:9303",:port => "9300-9302"} == "host:9303"
23
+
24
+ # Node client should support host in array
25
+ # Case 5: :hosts in array with single item
26
+ insist { subject.send :hosts, :hosts => ["host"],:port => 9300 } == ("host:9300")
27
+ # Case 6: :hostsin array with more than one items
28
+ insist { subject.send :hosts, :hosts=> ["host1","host2"],:port => 9300 } == "host1:9300,host2:9300"
29
+ # Case 7: :hostsin array with more than one items and :port =~ /^\d+_\d+$/
30
+ insist { subject.send :hosts, :hosts=> ["host1","host2"],:port => "9300-9302" } == "host1:9300,host1:9301,host1:9302,host2:9300,host2:9301,host2:9302"
31
+ # Case 8: :hostsin array with more than one items and some :hosts=~ /^.+:.+$/
32
+ insist { subject.send :hosts, :hosts=> ["host1","host2:9303"],:port => 9300 } == "host1:9300,host2:9303"
33
+ # Case 9: :hostsin array with more than one items, :port =~ /^\d+_\d+$/ and some :hosts=~ /^.+:.+$/
34
+ insist { subject.send :hosts, :hosts => ["host1","host2:9303"],:port => "9300-9302" } == "host1:9300,host1:9301,host1:9302,host2:9303"
35
+ end
36
+ end
@@ -0,0 +1,90 @@
1
+ require_relative "../../../spec/es_spec_helper"
2
+
3
+ shared_examples "an indexer" do
4
+ let(:index) { 10.times.collect { rand(10).to_s }.join("") }
5
+ let(:type) { 10.times.collect { rand(10).to_s }.join("") }
6
+ let(:event_count) { 10000 + rand(500) }
7
+ let(:flush_size) { rand(200) + 1 }
8
+ let(:config) { "not implemented" }
9
+
10
+ it "ships events" do
11
+ insist { config } != "not implemented"
12
+
13
+ pipeline = LogStash::Pipeline.new(config)
14
+ pipeline.run
15
+
16
+ index_url = "http://#{get_host}:#{get_port('http')}/#{index}"
17
+
18
+ ftw = FTW::Agent.new
19
+ ftw.post!("#{index_url}/_refresh")
20
+
21
+ # Wait until all events are available.
22
+ Stud::try(10.times) do
23
+ data = ""
24
+ response = ftw.get!("#{index_url}/_count?q=*")
25
+ response.read_body { |chunk| data << chunk }
26
+ result = LogStash::Json.load(data)
27
+ cur_count = result["count"]
28
+ insist { cur_count } == event_count
29
+ end
30
+
31
+ response = ftw.get!("#{index_url}/_search?q=*&size=1000")
32
+ data = ""
33
+ response.read_body { |chunk| data << chunk }
34
+ result = LogStash::Json.load(data)
35
+ result["hits"]["hits"].each do |doc|
36
+ insist { doc["_type"] } == type
37
+ insist { doc["_index"] } == index
38
+ end
39
+ end
40
+ end
41
+
42
+ describe "an indexer with custom index_type", :integration => true do
43
+ it_behaves_like "an indexer" do
44
+ let(:config) {
45
+ <<-CONFIG
46
+ input {
47
+ generator {
48
+ message => "hello world"
49
+ count => #{event_count}
50
+ type => "#{type}"
51
+ }
52
+ }
53
+ output {
54
+ elasticsearch_java {
55
+ hosts => "#{get_host()}"
56
+ port => "#{get_port('transport')}"
57
+ protocol => "transport"
58
+ index => "#{index}"
59
+ flush_size => #{flush_size}
60
+ }
61
+ }
62
+ CONFIG
63
+ }
64
+ end
65
+ end
66
+
67
+ describe "an indexer with no type value set (default to logs)", :integration => true do
68
+ it_behaves_like "an indexer" do
69
+ let(:type) { "logs" }
70
+ let(:config) {
71
+ <<-CONFIG
72
+ input {
73
+ generator {
74
+ message => "hello world"
75
+ count => #{event_count}
76
+ }
77
+ }
78
+ output {
79
+ elasticsearch_java {
80
+ hosts => "#{get_host()}"
81
+ port => "#{get_port('transport')}"
82
+ protocol => "transport"
83
+ index => "#{index}"
84
+ flush_size => #{flush_size}
85
+ }
86
+ }
87
+ CONFIG
88
+ }
89
+ end
90
+ end
@@ -0,0 +1,148 @@
1
+ require "elasticsearch"
2
+ require "logstash/outputs/elasticsearch_java"
3
+ require_relative "../../../spec/es_spec_helper"
4
+
5
+ describe "failures in bulk class expected behavior", :integration => true do
6
+ let(:template) { '{"template" : "not important, will be updated by :index"}' }
7
+ let(:event1) { LogStash::Event.new("somevalue" => 100, "@timestamp" => "2014-11-17T20:37:17.223Z", "@metadata" => {"retry_count" => 0}) }
8
+ let(:action1) { ["index", {:_id=>nil, :_routing=>nil, :_index=>"logstash-2014.11.17", :_type=>"logs"}, event1] }
9
+ let(:event2) { LogStash::Event.new("geoip" => { "location" => [ 0.0, 0.0] }, "@timestamp" => "2014-11-17T20:37:17.223Z", "@metadata" => {"retry_count" => 0}) }
10
+ let(:action2) { ["index", {:_id=>nil, :_routing=>nil, :_index=>"logstash-2014.11.17", :_type=>"logs"}, event2] }
11
+ let(:invalid_event) { LogStash::Event.new("geoip" => { "location" => "notlatlon" }, "@timestamp" => "2014-11-17T20:37:17.223Z") }
12
+ let(:max_retries) { 3 }
13
+
14
+ def mock_actions_with_response(*resp)
15
+ allow_any_instance_of(LogStash::Outputs::ElasticSearchJavaPlugins::Protocols::NodeClient).to receive(:bulk).and_return(*resp)
16
+ end
17
+
18
+
19
+ subject! do
20
+ settings = {
21
+ "manage_template" => true,
22
+ "index" => "logstash-2014.11.17",
23
+ "template_overwrite" => true,
24
+ "protocol" => 'transport',
25
+ "hosts" => get_host(),
26
+ "port" => get_port('transport'),
27
+ "retry_max_items" => 10,
28
+ "retry_max_interval" => 1,
29
+ "max_retries" => max_retries
30
+ }
31
+ next LogStash::Outputs::ElasticSearchJava.new(settings)
32
+ end
33
+
34
+ before :each do
35
+ # Clean ES of data before we start.
36
+ @es = get_client
37
+ @es.indices.delete_template(:name => "*")
38
+ @es.indices.delete(:index => "*")
39
+ @es.indices.refresh
40
+ end
41
+
42
+ it "should return no errors if all bulk actions are successful" do
43
+ mock_actions_with_response({"errors" => false})
44
+ expect(subject).to receive(:submit).with([action1, action2]).once.and_call_original
45
+ subject.register
46
+ subject.receive(event1)
47
+ subject.receive(event2)
48
+ subject.buffer_flush(:final => true)
49
+ sleep(2)
50
+ end
51
+
52
+ it "should raise exception and be retried by stud::buffer" do
53
+ call_count = 0
54
+ expect(subject).to receive(:submit).with([action1]).exactly(3).times do
55
+ if (call_count += 1) <= 2
56
+ raise "error first two times"
57
+ else
58
+ {"errors" => false}
59
+ end
60
+ end
61
+ subject.register
62
+ subject.receive(event1)
63
+ subject.teardown
64
+ end
65
+
66
+ it "should retry actions with response status of 503" do
67
+ mock_actions_with_response({"errors" => true, "statuses" => [200, 200, 503, 503]},
68
+ {"errors" => true, "statuses" => [200, 503]},
69
+ {"errors" => false})
70
+ expect(subject).to receive(:submit).with([action1, action1, action1, action2]).ordered.once.and_call_original
71
+ expect(subject).to receive(:submit).with([action1, action2]).ordered.once.and_call_original
72
+ expect(subject).to receive(:submit).with([action2]).ordered.once.and_call_original
73
+
74
+ subject.register
75
+ subject.receive(event1)
76
+ subject.receive(event1)
77
+ subject.receive(event1)
78
+ subject.receive(event2)
79
+ subject.buffer_flush(:final => true)
80
+ sleep(3)
81
+ end
82
+
83
+ it "should retry actions with response status of 429" do
84
+ mock_actions_with_response({"errors" => true, "statuses" => [429]},
85
+ {"errors" => false})
86
+ expect(subject).to receive(:submit).with([action1]).twice.and_call_original
87
+ subject.register
88
+ subject.receive(event1)
89
+ subject.buffer_flush(:final => true)
90
+ sleep(3)
91
+ end
92
+
93
+ it "should retry an event until max_retries reached" do
94
+ mock_actions_with_response({"errors" => true, "statuses" => [429]},
95
+ {"errors" => true, "statuses" => [429]},
96
+ {"errors" => true, "statuses" => [429]},
97
+ {"errors" => true, "statuses" => [429]},
98
+ {"errors" => true, "statuses" => [429]},
99
+ {"errors" => true, "statuses" => [429]})
100
+ expect(subject).to receive(:submit).with([action1]).exactly(max_retries).times.and_call_original
101
+ subject.register
102
+ subject.receive(event1)
103
+ subject.buffer_flush(:final => true)
104
+ sleep(3)
105
+ end
106
+
107
+ it "non-retryable errors like mapping errors (400) should be dropped and not be retried (unfortunetly)" do
108
+ subject.register
109
+ subject.receive(invalid_event)
110
+ expect(subject).not_to receive(:retry_push)
111
+ subject.teardown
112
+
113
+ @es.indices.refresh
114
+ sleep(5)
115
+ Stud::try(10.times) do
116
+ r = @es.search
117
+ insist { r["hits"]["total"] } == 0
118
+ end
119
+ end
120
+
121
+ it "successful requests should not be appended to retry queue" do
122
+ subject.register
123
+ subject.receive(event1)
124
+ expect(subject).not_to receive(:retry_push)
125
+ subject.teardown
126
+
127
+ @es.indices.refresh
128
+ sleep(5)
129
+ Stud::try(10.times) do
130
+ r = @es.search
131
+ insist { r["hits"]["total"] } == 1
132
+ end
133
+ end
134
+
135
+ it "should only index proper events" do
136
+ subject.register
137
+ subject.receive(invalid_event)
138
+ subject.receive(event1)
139
+ subject.teardown
140
+ @es.indices.refresh
141
+ sleep(5)
142
+
143
+ Stud::try(10.times) do
144
+ r = @es.search
145
+ insist { r["hits"]["total"] } == 1
146
+ end
147
+ end
148
+ end
@@ -0,0 +1,60 @@
1
+ require_relative "../../../spec/es_spec_helper"
2
+
3
+ shared_examples "a routing indexer" do
4
+ let(:index) { 10.times.collect { rand(10).to_s }.join("") }
5
+ let(:type) { 10.times.collect { rand(10).to_s }.join("") }
6
+ let(:event_count) { 10000 + rand(500) }
7
+ let(:flush_size) { rand(200) + 1 }
8
+ let(:routing) { "not_implemented" }
9
+ let(:config) { "not_implemented" }
10
+
11
+ it "ships events" do
12
+ insist { routing } != "not_implemented"
13
+ insist { config } != "not_implemented"
14
+
15
+ pipeline = LogStash::Pipeline.new(config)
16
+ pipeline.run
17
+
18
+ index_url = "http://#{get_host()}:#{get_port('http')}/#{index}"
19
+
20
+ ftw = FTW::Agent.new
21
+ ftw.post!("#{index_url}/_refresh")
22
+
23
+ # Wait until all events are available.
24
+ Stud::try(10.times) do
25
+ data = ""
26
+ response = ftw.get!("#{index_url}/_count?q=*&routing=#{routing}")
27
+ response.read_body { |chunk| data << chunk }
28
+ result = LogStash::Json.load(data)
29
+ cur_count = result["count"]
30
+ insist { cur_count } == event_count
31
+ end
32
+ end
33
+ end
34
+
35
+ describe "(transport protocol) index events with fieldref in routing value", :integration => true do
36
+ it_behaves_like 'a routing indexer' do
37
+ let(:routing) { "test" }
38
+ let(:config) {
39
+ <<-CONFIG
40
+ input {
41
+ generator {
42
+ message => "#{routing}"
43
+ count => #{event_count}
44
+ type => "#{type}"
45
+ }
46
+ }
47
+ output {
48
+ elasticsearch_java {
49
+ hosts => "#{get_host()}"
50
+ port => "#{get_port('transport')}"
51
+ protocol => "transport"
52
+ index => "#{index}"
53
+ flush_size => #{flush_size}
54
+ routing => "%{message}"
55
+ }
56
+ }
57
+ CONFIG
58
+ }
59
+ end
60
+ end
@@ -0,0 +1,113 @@
1
+ require_relative "../../../spec/es_spec_helper"
2
+
3
+ describe "send messages to ElasticSearch using HTTPS", :elasticsearch_secure => true do
4
+ subject do
5
+ require "logstash/outputs/elasticsearch_java"
6
+ settings = {
7
+ "protocol" => "http",
8
+ "node_name" => "logstash",
9
+ "cluster" => "elasticsearch",
10
+ "hosts" => "node01",
11
+ "user" => "user",
12
+ "password" => "changeme",
13
+ "ssl" => true,
14
+ "cacert" => "/tmp/ca/certs/cacert.pem",
15
+ # or
16
+ #"truststore" => "/tmp/ca/truststore.jks",
17
+ #"truststore_password" => "testeteste"
18
+ }
19
+ next LogStash::Outputs::ElasticSearchJava.new(settings)
20
+ end
21
+
22
+ before :each do
23
+ subject.register
24
+ end
25
+
26
+ it "sends events to ES" do
27
+ expect {
28
+ subject.receive(LogStash::Event.new("message" => "sample message here"))
29
+ subject.buffer_flush(:final => true)
30
+ }.to_not raise_error
31
+ end
32
+ end
33
+
34
+ describe "connect using HTTP Authentication", :elasticsearch_secure => true do
35
+ subject do
36
+ require "logstash/outputs/elasticsearch_java"
37
+ settings = {
38
+ "protocol" => "http",
39
+ "cluster" => "elasticsearch",
40
+ "hosts" => "node01",
41
+ "user" => "user",
42
+ "password" => "changeme",
43
+ }
44
+ next LogStash::Outputs::ElasticSearchJava.new(settings)
45
+ end
46
+
47
+ before :each do
48
+ subject.register
49
+ end
50
+
51
+ it "sends events to ES" do
52
+ expect {
53
+ subject.receive(LogStash::Event.new("message" => "sample message here"))
54
+ subject.buffer_flush(:final => true)
55
+ }.to_not raise_error
56
+ end
57
+ end
58
+
59
+ describe "send messages to ElasticSearch using HTTPS", :elasticsearch_secure => true do
60
+ subject do
61
+ require "logstash/outputs/elasticsearch_java"
62
+ settings = {
63
+ "protocol" => "http",
64
+ "node_name" => "logstash",
65
+ "cluster" => "elasticsearch",
66
+ "hosts" => "node01",
67
+ "user" => "user",
68
+ "password" => "changeme",
69
+ "ssl" => true,
70
+ "cacert" => "/tmp/ca/certs/cacert.pem",
71
+ # or
72
+ #"truststore" => "/tmp/ca/truststore.jks",
73
+ #"truststore_password" => "testeteste"
74
+ }
75
+ next LogStash::Outputs::ElasticSearchJava.new(settings)
76
+ end
77
+
78
+ before :each do
79
+ subject.register
80
+ end
81
+
82
+ it "sends events to ES" do
83
+ expect {
84
+ subject.receive(LogStash::Event.new("message" => "sample message here"))
85
+ subject.buffer_flush(:final => true)
86
+ }.to_not raise_error
87
+ end
88
+ end
89
+
90
+ describe "connect using HTTP Authentication", :elasticsearch_secure => true do
91
+ subject do
92
+ require "logstash/outputs/elasticsearch_java"
93
+ settings = {
94
+ "protocol" => "http",
95
+ "cluster" => "elasticsearch",
96
+ "hosts" => "node01",
97
+ "user" => "user",
98
+ "password" => "changeme",
99
+ }
100
+ next LogStash::Outputs::ElasticSearchJava.new(settings)
101
+ end
102
+
103
+ before :each do
104
+ subject.register
105
+ end
106
+
107
+ it "sends events to ES" do
108
+ expect {
109
+ subject.receive(LogStash::Event.new("message" => "sample message here"))
110
+ subject.buffer_flush(:final => true)
111
+ }.to_not raise_error
112
+ end
113
+ end
@@ -0,0 +1,97 @@
1
+ require_relative "../../../spec/es_spec_helper"
2
+
3
+ describe "index template expected behavior", :integration => true do
4
+ ["transport"].each do |protocol|
5
+ context "with protocol => #{protocol}" do
6
+
7
+ subject! do
8
+ require "logstash/outputs/elasticsearch_java"
9
+ settings = {
10
+ "manage_template" => true,
11
+ "template_overwrite" => true,
12
+ "protocol" => protocol,
13
+ "hosts" => "#{get_host()}",
14
+ "port" => "#{get_port('transport')}"
15
+ }
16
+ next LogStash::Outputs::ElasticSearchJava.new(settings)
17
+ end
18
+
19
+ before :each do
20
+ # Delete all templates first.
21
+ require "elasticsearch"
22
+
23
+ # Clean ES of data before we start.
24
+ @es = get_client
25
+ @es.indices.delete_template(:name => "*")
26
+
27
+ # This can fail if there are no indexes, ignore failure.
28
+ @es.indices.delete(:index => "*") rescue nil
29
+
30
+ subject.register
31
+
32
+ subject.receive(LogStash::Event.new("message" => "sample message here"))
33
+ subject.receive(LogStash::Event.new("somevalue" => 100))
34
+ subject.receive(LogStash::Event.new("somevalue" => 10))
35
+ subject.receive(LogStash::Event.new("somevalue" => 1))
36
+ subject.receive(LogStash::Event.new("country" => "us"))
37
+ subject.receive(LogStash::Event.new("country" => "at"))
38
+ subject.receive(LogStash::Event.new("geoip" => { "location" => [ 0.0, 0.0 ] }))
39
+ subject.buffer_flush(:final => true)
40
+ @es.indices.refresh
41
+
42
+ # Wait or fail until everything's indexed.
43
+ Stud::try(20.times) do
44
+ r = @es.search
45
+ insist { r["hits"]["total"] } == 7
46
+ end
47
+ end
48
+
49
+ it "permits phrase searching on string fields" do
50
+ results = @es.search(:q => "message:\"sample message\"")
51
+ insist { results["hits"]["total"] } == 1
52
+ insist { results["hits"]["hits"][0]["_source"]["message"] } == "sample message here"
53
+ end
54
+
55
+ it "numbers dynamically map to a numeric type and permit range queries" do
56
+ results = @es.search(:q => "somevalue:[5 TO 105]")
57
+ insist { results["hits"]["total"] } == 2
58
+
59
+ values = results["hits"]["hits"].collect { |r| r["_source"]["somevalue"] }
60
+ insist { values }.include?(10)
61
+ insist { values }.include?(100)
62
+ reject { values }.include?(1)
63
+ end
64
+
65
+ it "does not create .raw field for the message field" do
66
+ results = @es.search(:q => "message.raw:\"sample message here\"")
67
+ insist { results["hits"]["total"] } == 0
68
+ end
69
+
70
+ it "creates .raw field from any string field which is not_analyzed" do
71
+ results = @es.search(:q => "country.raw:\"us\"")
72
+ insist { results["hits"]["total"] } == 1
73
+ insist { results["hits"]["hits"][0]["_source"]["country"] } == "us"
74
+
75
+ # partial or terms should not work.
76
+ results = @es.search(:q => "country.raw:\"u\"")
77
+ insist { results["hits"]["total"] } == 0
78
+ end
79
+
80
+ it "make [geoip][location] a geo_point" do
81
+ results = @es.search(:body => { "filter" => { "geo_distance" => { "distance" => "1000km", "geoip.location" => { "lat" => 0.5, "lon" => 0.5 } } } })
82
+ insist { results["hits"]["total"] } == 1
83
+ insist { results["hits"]["hits"][0]["_source"]["geoip"]["location"] } == [ 0.0, 0.0 ]
84
+ end
85
+
86
+ it "should index stopwords like 'at' " do
87
+ results = @es.search(:body => { "aggregations" => { "my_agg" => { "terms" => { "field" => "country" } } } })["aggregations"]["my_agg"]
88
+ terms = results["buckets"].collect { |b| b["key"] }
89
+
90
+ insist { terms }.include?("us")
91
+
92
+ # 'at' is a stopword, make sure stopwords are not ignored.
93
+ insist { terms }.include?("at")
94
+ end
95
+ end
96
+ end
97
+ end
@@ -0,0 +1,94 @@
1
+ require_relative "../../../spec/es_spec_helper"
2
+
3
+ describe "transport client create actions", :integration => true do
4
+ require "logstash/outputs/elasticsearch_java"
5
+ require "elasticsearch"
6
+
7
+ def get_es_output(action, id = nil)
8
+ settings = {
9
+ "manage_template" => true,
10
+ "index" => "logstash-create",
11
+ "template_overwrite" => true,
12
+ "protocol" => "transport",
13
+ "hosts" => get_host(),
14
+ "port" => get_port('transport'),
15
+ "action" => action
16
+ }
17
+ settings['document_id'] = id unless id.nil?
18
+ LogStash::Outputs::ElasticSearchJava.new(settings)
19
+ end
20
+
21
+ before :each do
22
+ @es = get_client
23
+ # Delete all templates first.
24
+ # Clean ES of data before we start.
25
+ @es.indices.delete_template(:name => "*")
26
+ # This can fail if there are no indexes, ignore failure.
27
+ @es.indices.delete(:index => "*") rescue nil
28
+ end
29
+
30
+ context "when action => create" do
31
+ it "should create new documents with or without id" do
32
+ subject = get_es_output("create", "id123")
33
+ subject.register
34
+ subject.receive(LogStash::Event.new("message" => "sample message here"))
35
+ subject.buffer_flush(:final => true)
36
+ @es.indices.refresh
37
+ # Wait or fail until everything's indexed.
38
+ Stud::try(3.times) do
39
+ r = @es.search
40
+ insist { r["hits"]["total"] } == 1
41
+ end
42
+ end
43
+
44
+ it "should create new documents without id" do
45
+ subject = get_es_output("create")
46
+ subject.register
47
+ subject.receive(LogStash::Event.new("message" => "sample message here"))
48
+ subject.buffer_flush(:final => true)
49
+ @es.indices.refresh
50
+ # Wait or fail until everything's indexed.
51
+ Stud::try(3.times) do
52
+ r = @es.search
53
+ insist { r["hits"]["total"] } == 1
54
+ end
55
+ end
56
+ end
57
+
58
+ context "when action => create_unless_exists" do
59
+ it "should create new documents when specific id is specified" do
60
+ subject = get_es_output("create_unless_exists", "id123")
61
+ subject.register
62
+ subject.receive(LogStash::Event.new("message" => "sample message here"))
63
+ subject.buffer_flush(:final => true)
64
+ @es.indices.refresh
65
+ # Wait or fail until everything's indexed.
66
+ Stud::try(3.times) do
67
+ r = @es.search
68
+ insist { r["hits"]["total"] } == 1
69
+ end
70
+ end
71
+
72
+ it "should fail to create a document when no id is specified" do
73
+ event = LogStash::Event.new("somevalue" => 100, "@timestamp" => "2014-11-17T20:37:17.223Z", "@metadata" => {"retry_count" => 0})
74
+ action = ["create_unless_exists", {:_id=>nil, :_index=>"logstash-2014.11.17", :_type=>"logs"}, event]
75
+ subject = get_es_output(action[0])
76
+ subject.register
77
+ expect { subject.flush([action]) }.to raise_error
78
+ end
79
+
80
+ it "should unsuccesfully submit two records with the same document id" do
81
+ subject = get_es_output("create_unless_exists", "id123")
82
+ subject.register
83
+ subject.receive(LogStash::Event.new("message" => "sample message here"))
84
+ subject.receive(LogStash::Event.new("message" => "sample message here")) # 400 status failure (same id)
85
+ subject.buffer_flush(:final => true)
86
+ @es.indices.refresh
87
+ # Wait or fail until everything's indexed.
88
+ Stud::try(3.times) do
89
+ r = @es.search
90
+ insist { r["hits"]["total"] } == 1
91
+ end
92
+ end
93
+ end
94
+ end