logstash-output-elasticsearch 7.4.3-java → 8.0.0-java
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +5 -5
- data/CHANGELOG.md +5 -18
- data/docs/index.asciidoc +13 -50
- data/lib/logstash/outputs/elasticsearch/common.rb +39 -43
- data/lib/logstash/outputs/elasticsearch/common_configs.rb +2 -11
- data/lib/logstash/outputs/elasticsearch/http_client.rb +22 -27
- data/lib/logstash/outputs/elasticsearch/http_client/manticore_adapter.rb +2 -2
- data/lib/logstash/outputs/elasticsearch/http_client/pool.rb +12 -31
- data/lib/logstash/outputs/elasticsearch/template_manager.rb +6 -4
- data/logstash-output-elasticsearch.gemspec +1 -1
- data/spec/es_spec_helper.rb +0 -6
- data/spec/integration/outputs/compressed_indexing_spec.rb +44 -46
- data/spec/integration/outputs/delete_spec.rb +49 -51
- data/spec/integration/outputs/groovy_update_spec.rb +129 -131
- data/spec/integration/outputs/index_version_spec.rb +81 -82
- data/spec/integration/outputs/ingest_pipeline_spec.rb +49 -51
- data/spec/integration/outputs/painless_update_spec.rb +130 -170
- data/spec/integration/outputs/parent_spec.rb +55 -149
- data/spec/integration/outputs/sniffer_spec.rb +2 -5
- data/spec/integration/outputs/templates_5x_spec.rb +82 -81
- data/spec/integration/outputs/templates_spec.rb +81 -81
- data/spec/integration/outputs/update_spec.rb +99 -101
- data/spec/unit/outputs/elasticsearch/http_client/manticore_adapter_spec.rb +5 -30
- data/spec/unit/outputs/elasticsearch/http_client/pool_spec.rb +0 -3
- data/spec/unit/outputs/elasticsearch/http_client_spec.rb +12 -11
- data/spec/unit/outputs/elasticsearch/template_manager_spec.rb +25 -13
- data/spec/unit/outputs/elasticsearch_spec.rb +1 -10
- metadata +4 -6
@@ -66,7 +66,7 @@ module LogStash; module Outputs; class ElasticSearch; class HttpClient;
|
|
66
66
|
|
67
67
|
request_uri = format_url(url, path)
|
68
68
|
|
69
|
-
resp = @manticore.send(method.downcase, request_uri
|
69
|
+
resp = @manticore.send(method.downcase, request_uri, params)
|
70
70
|
|
71
71
|
# Manticore returns lazy responses by default
|
72
72
|
# We want to block for our usage, this will wait for the repsonse
|
@@ -106,7 +106,7 @@ module LogStash; module Outputs; class ElasticSearch; class HttpClient;
|
|
106
106
|
|
107
107
|
request_uri.path = "#{request_uri.path}/#{parsed_path_and_query.path}".gsub(/\/{2,}/, "/")
|
108
108
|
|
109
|
-
request_uri
|
109
|
+
request_uri.to_s
|
110
110
|
end
|
111
111
|
|
112
112
|
def close
|
@@ -107,12 +107,6 @@ module LogStash; module Outputs; class ElasticSearch; class HttpClient;
|
|
107
107
|
@state_mutex.synchronize { @url_info }
|
108
108
|
end
|
109
109
|
|
110
|
-
def connected_es_versions
|
111
|
-
@state_mutex.synchronize do
|
112
|
-
@url_info.values.select {|v| v[:state] == :alive }.map {|v| v[:version] }
|
113
|
-
end
|
114
|
-
end
|
115
|
-
|
116
110
|
def urls
|
117
111
|
url_info.keys
|
118
112
|
end
|
@@ -160,7 +154,7 @@ module LogStash; module Outputs; class ElasticSearch; class HttpClient;
|
|
160
154
|
ES2_SNIFF_RE_URL = /([^\/]*)?\/?([^:]*):([0-9]+)/
|
161
155
|
# Sniffs and returns the results. Does not update internal URLs!
|
162
156
|
def check_sniff
|
163
|
-
_,
|
157
|
+
_, resp = perform_request(:get, @sniffing_path)
|
164
158
|
parsed = LogStash::Json.load(resp.body)
|
165
159
|
|
166
160
|
nodes = parsed['nodes']
|
@@ -168,10 +162,12 @@ module LogStash; module Outputs; class ElasticSearch; class HttpClient;
|
|
168
162
|
@logger.warn("Sniff returned no nodes! Will not update hosts.")
|
169
163
|
return nil
|
170
164
|
else
|
171
|
-
case major_version(
|
165
|
+
case major_version(nodes)
|
172
166
|
when 5, 6
|
173
167
|
sniff_5x_and_above(nodes)
|
174
|
-
when 2
|
168
|
+
when 2
|
169
|
+
sniff_2x_1x(nodes)
|
170
|
+
when 1
|
175
171
|
sniff_2x_1x(nodes)
|
176
172
|
else
|
177
173
|
@logger.warn("Could not determine version for nodes in ES cluster!")
|
@@ -180,8 +176,8 @@ module LogStash; module Outputs; class ElasticSearch; class HttpClient;
|
|
180
176
|
end
|
181
177
|
end
|
182
178
|
|
183
|
-
def major_version(
|
184
|
-
|
179
|
+
def major_version(nodes)
|
180
|
+
k,v = nodes.first; v['version'].split('.').first.to_i
|
185
181
|
end
|
186
182
|
|
187
183
|
def sniff_5x_and_above(nodes)
|
@@ -241,12 +237,7 @@ module LogStash; module Outputs; class ElasticSearch; class HttpClient;
|
|
241
237
|
response = perform_request_to_url(url, :head, @healthcheck_path)
|
242
238
|
# If no exception was raised it must have succeeded!
|
243
239
|
logger.warn("Restored connection to ES instance", :url => url.sanitized.to_s)
|
244
|
-
|
245
|
-
es_version = get_es_version(url)
|
246
|
-
@state_mutex.synchronize do
|
247
|
-
meta[:version] = es_version
|
248
|
-
meta[:state] = :alive
|
249
|
-
end
|
240
|
+
@state_mutex.synchronize { meta[:state] = :alive }
|
250
241
|
rescue HostUnreachableError, BadResponseCodeError => e
|
251
242
|
logger.warn("Attempted to resurrect connection to dead ES instance, but got an error.", url: url.sanitized.to_s, error_type: e.class, error: e.message)
|
252
243
|
end
|
@@ -262,16 +253,15 @@ module LogStash; module Outputs; class ElasticSearch; class HttpClient;
|
|
262
253
|
end
|
263
254
|
|
264
255
|
def perform_request(method, path, params={}, body=nil)
|
265
|
-
with_connection do |url
|
256
|
+
with_connection do |url|
|
266
257
|
resp = perform_request_to_url(url, method, path, params, body)
|
267
|
-
[url,
|
258
|
+
[url, resp]
|
268
259
|
end
|
269
260
|
end
|
270
261
|
|
271
262
|
[:get, :put, :post, :delete, :patch, :head].each do |method|
|
272
263
|
define_method(method) do |path, params={}, body=nil|
|
273
|
-
|
274
|
-
response
|
264
|
+
perform_request(method, path, params, body)
|
275
265
|
end
|
276
266
|
end
|
277
267
|
|
@@ -333,10 +323,6 @@ module LogStash; module Outputs; class ElasticSearch; class HttpClient;
|
|
333
323
|
@state_mutex.synchronize { @url_info.size }
|
334
324
|
end
|
335
325
|
|
336
|
-
def es_versions
|
337
|
-
@state_mutex.synchronize { @url_info.size }
|
338
|
-
end
|
339
|
-
|
340
326
|
def add_url(url)
|
341
327
|
@url_info[url] ||= empty_url_meta
|
342
328
|
end
|
@@ -358,7 +344,7 @@ module LogStash; module Outputs; class ElasticSearch; class HttpClient;
|
|
358
344
|
# Custom error class used here so that users may retry attempts if they receive this error
|
359
345
|
# should they choose to
|
360
346
|
raise NoConnectionAvailableError, "No Available connections" unless url
|
361
|
-
yield url
|
347
|
+
yield url
|
362
348
|
rescue HostUnreachableError => e
|
363
349
|
# Mark the connection as dead here since this is likely not transient
|
364
350
|
mark_dead(url, e)
|
@@ -429,10 +415,5 @@ module LogStash; module Outputs; class ElasticSearch; class HttpClient;
|
|
429
415
|
end
|
430
416
|
end
|
431
417
|
end
|
432
|
-
|
433
|
-
def get_es_version(url)
|
434
|
-
request = perform_request_to_url(url, :get, ROOT_URI_PATH)
|
435
|
-
LogStash::Json.load(request.body)["version"]["number"]
|
436
|
-
end
|
437
418
|
end
|
438
419
|
end; end; end; end;
|
@@ -12,10 +12,12 @@ module LogStash; module Outputs; class ElasticSearch
|
|
12
12
|
end
|
13
13
|
|
14
14
|
private
|
15
|
+
def self.get_es_version(client)
|
16
|
+
client.get_version
|
17
|
+
end
|
18
|
+
|
15
19
|
def self.get_es_major_version(client)
|
16
|
-
|
17
|
-
# pick the biggest major version
|
18
|
-
client.connected_es_versions.uniq.map {|version| version.split(".").first.to_i}.max
|
20
|
+
get_es_version(client)["number"][0]
|
19
21
|
end
|
20
22
|
|
21
23
|
def self.get_template(path, es_major_version)
|
@@ -28,7 +30,7 @@ module LogStash; module Outputs; class ElasticSearch
|
|
28
30
|
end
|
29
31
|
|
30
32
|
def self.default_template_path(es_major_version)
|
31
|
-
template_version = es_major_version == 1 ? 2 : es_major_version
|
33
|
+
template_version = es_major_version == "1" ? "2" : es_major_version
|
32
34
|
default_template_name = "elasticsearch-template-es#{template_version}x.json"
|
33
35
|
::File.expand_path(default_template_name, ::File.dirname(__FILE__))
|
34
36
|
end
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
s.name = 'logstash-output-elasticsearch'
|
3
|
-
s.version = '
|
3
|
+
s.version = '8.0.0'
|
4
4
|
s.licenses = ['apache-2.0']
|
5
5
|
s.summary = "Logstash Output to Elasticsearch"
|
6
6
|
s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
|
data/spec/es_spec_helper.rb
CHANGED
@@ -10,12 +10,6 @@ module ESHelper
|
|
10
10
|
def get_client
|
11
11
|
Elasticsearch::Client.new(:hosts => [get_host_port])
|
12
12
|
end
|
13
|
-
|
14
|
-
def self.es_version_satisfies?(*requirement)
|
15
|
-
es_version = RSpec.configuration.filter[:es_version] || ENV['ES_VERSION']
|
16
|
-
es_release_version = Gem::Version.new(es_version).release
|
17
|
-
Gem::Requirement.new(requirement).satisfied_by?(es_release_version)
|
18
|
-
end
|
19
13
|
end
|
20
14
|
|
21
15
|
RSpec.configure do |config|
|
@@ -9,60 +9,58 @@ RSpec::Matchers.define :a_valid_gzip_encoded_string do
|
|
9
9
|
}
|
10
10
|
end
|
11
11
|
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
"http_compression" => true
|
24
|
-
}
|
12
|
+
describe "indexing with http_compression turned on", :integration => true, :version_greater_than_equal_to_5x => true do
|
13
|
+
let(:event) { LogStash::Event.new("message" => "Hello World!", "type" => type) }
|
14
|
+
let(:index) { 10.times.collect { rand(10).to_s }.join("") }
|
15
|
+
let(:type) { 10.times.collect { rand(10).to_s }.join("") }
|
16
|
+
let(:event_count) { 10000 + rand(500) }
|
17
|
+
let(:events) { event_count.times.map { event }.to_a }
|
18
|
+
let(:config) {
|
19
|
+
{
|
20
|
+
"hosts" => get_host_port,
|
21
|
+
"index" => index,
|
22
|
+
"http_compression" => true
|
25
23
|
}
|
26
|
-
|
27
|
-
|
28
|
-
let(:es_url) { "http://#{get_host_port}" }
|
29
|
-
let(:index_url) {"#{es_url}/#{index}"}
|
30
|
-
let(:http_client_options) { {} }
|
31
|
-
let(:http_client) do
|
32
|
-
Manticore::Client.new(http_client_options)
|
33
|
-
end
|
24
|
+
}
|
25
|
+
subject { LogStash::Outputs::ElasticSearch.new(config) }
|
34
26
|
|
35
|
-
|
36
|
-
|
37
|
-
|
27
|
+
let(:es_url) { "http://#{get_host_port}" }
|
28
|
+
let(:index_url) {"#{es_url}/#{index}"}
|
29
|
+
let(:http_client_options) { {} }
|
30
|
+
let(:http_client) do
|
31
|
+
Manticore::Client.new(http_client_options)
|
32
|
+
end
|
38
33
|
|
39
|
-
|
40
|
-
|
41
|
-
|
34
|
+
before do
|
35
|
+
subject.register
|
36
|
+
end
|
37
|
+
|
38
|
+
shared_examples "an indexer" do
|
39
|
+
it "ships events" do
|
40
|
+
subject.multi_receive(events)
|
42
41
|
|
43
|
-
|
42
|
+
http_client.post("#{es_url}/_refresh").call
|
44
43
|
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
44
|
+
response = http_client.get("#{index_url}/_count?q=*")
|
45
|
+
result = LogStash::Json.load(response.body)
|
46
|
+
cur_count = result["count"]
|
47
|
+
expect(cur_count).to eq(event_count)
|
49
48
|
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
end
|
49
|
+
response = http_client.get("#{index_url}/_search?q=*&size=1000")
|
50
|
+
result = LogStash::Json.load(response.body)
|
51
|
+
result["hits"]["hits"].each do |doc|
|
52
|
+
expect(doc["_type"]).to eq(type)
|
53
|
+
expect(doc["_index"]).to eq(index)
|
56
54
|
end
|
57
55
|
end
|
56
|
+
end
|
58
57
|
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
end
|
65
|
-
|
66
|
-
it_behaves_like("an indexer")
|
58
|
+
it "sets the correct content-encoding header and body is compressed" do
|
59
|
+
expect(subject.client.pool.adapter.client).to receive(:send).
|
60
|
+
with(anything, anything, {:headers=>{"Content-Encoding"=>"gzip", "Content-Type"=>"application/json"}, :body => a_valid_gzip_encoded_string}).
|
61
|
+
and_call_original
|
62
|
+
subject.multi_receive(events)
|
67
63
|
end
|
64
|
+
|
65
|
+
it_behaves_like("an indexer")
|
68
66
|
end
|
@@ -2,64 +2,62 @@ require_relative "../../../spec/es_spec_helper"
|
|
2
2
|
require "logstash/outputs/elasticsearch"
|
3
3
|
|
4
4
|
|
5
|
-
|
6
|
-
|
7
|
-
|
5
|
+
describe "Versioned delete", :integration => true, :version_greater_than_equal_to_2x => true do
|
6
|
+
require "logstash/outputs/elasticsearch"
|
7
|
+
|
8
|
+
let(:es) { get_client }
|
9
|
+
|
10
|
+
before :each do
|
11
|
+
# Delete all templates first.
|
12
|
+
# Clean ES of data before we start.
|
13
|
+
es.indices.delete_template(:name => "*")
|
14
|
+
# This can fail if there are no indexes, ignore failure.
|
15
|
+
es.indices.delete(:index => "*") rescue nil
|
16
|
+
es.indices.refresh
|
17
|
+
end
|
8
18
|
|
9
|
-
|
19
|
+
context "when delete only" do
|
20
|
+
subject { LogStash::Outputs::ElasticSearch.new(settings) }
|
10
21
|
|
11
|
-
before
|
12
|
-
|
13
|
-
# Clean ES of data before we start.
|
14
|
-
es.indices.delete_template(:name => "*")
|
15
|
-
# This can fail if there are no indexes, ignore failure.
|
16
|
-
es.indices.delete(:index => "*") rescue nil
|
17
|
-
es.indices.refresh
|
22
|
+
before do
|
23
|
+
subject.register
|
18
24
|
end
|
19
25
|
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
"hosts" => get_host_port(),
|
33
|
-
"document_id" => "%{my_id}",
|
34
|
-
"version" => "%{my_version}",
|
35
|
-
"version_type" => "external",
|
36
|
-
"action" => "%{my_action}"
|
37
|
-
}
|
38
|
-
end
|
39
|
-
|
40
|
-
it "should ignore non-monotonic external version updates" do
|
41
|
-
id = "ev2"
|
42
|
-
subject.multi_receive([LogStash::Event.new("my_id" => id, "my_action" => "index", "message" => "foo", "my_version" => 99)])
|
43
|
-
r = es.get(:index => 'logstash-delete', :type => 'logs', :id => id, :refresh => true)
|
44
|
-
expect(r['_version']).to eq(99)
|
45
|
-
expect(r['_source']['message']).to eq('foo')
|
26
|
+
let(:settings) do
|
27
|
+
{
|
28
|
+
"manage_template" => true,
|
29
|
+
"index" => "logstash-delete",
|
30
|
+
"template_overwrite" => true,
|
31
|
+
"hosts" => get_host_port(),
|
32
|
+
"document_id" => "%{my_id}",
|
33
|
+
"version" => "%{my_version}",
|
34
|
+
"version_type" => "external",
|
35
|
+
"action" => "%{my_action}"
|
36
|
+
}
|
37
|
+
end
|
46
38
|
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
39
|
+
it "should ignore non-monotonic external version updates" do
|
40
|
+
id = "ev2"
|
41
|
+
subject.multi_receive([LogStash::Event.new("my_id" => id, "my_action" => "index", "message" => "foo", "my_version" => 99)])
|
42
|
+
r = es.get(:index => 'logstash-delete', :type => 'logs', :id => id, :refresh => true)
|
43
|
+
expect(r['_version']).to eq(99)
|
44
|
+
expect(r['_source']['message']).to eq('foo')
|
45
|
+
|
46
|
+
subject.multi_receive([LogStash::Event.new("my_id" => id, "my_action" => "delete", "message" => "foo", "my_version" => 98)])
|
47
|
+
r2 = es.get(:index => 'logstash-delete', :type => 'logs', :id => id, :refresh => true)
|
48
|
+
expect(r2['_version']).to eq(99)
|
49
|
+
expect(r2['_source']['message']).to eq('foo')
|
50
|
+
end
|
52
51
|
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
52
|
+
it "should commit monotonic external version updates" do
|
53
|
+
id = "ev3"
|
54
|
+
subject.multi_receive([LogStash::Event.new("my_id" => id, "my_action" => "index", "message" => "foo", "my_version" => 99)])
|
55
|
+
r = es.get(:index => 'logstash-delete', :type => 'logs', :id => id, :refresh => true)
|
56
|
+
expect(r['_version']).to eq(99)
|
57
|
+
expect(r['_source']['message']).to eq('foo')
|
59
58
|
|
60
|
-
|
61
|
-
|
62
|
-
end
|
59
|
+
subject.multi_receive([LogStash::Event.new("my_id" => id, "my_action" => "delete", "message" => "foo", "my_version" => 100)])
|
60
|
+
expect { es.get(:index => 'logstash-delete', :type => 'logs', :id => id, :refresh => true) }.to raise_error(Elasticsearch::Transport::Transport::Errors::NotFound)
|
63
61
|
end
|
64
62
|
end
|
65
63
|
end
|
@@ -1,150 +1,148 @@
|
|
1
1
|
require_relative "../../../spec/es_spec_helper"
|
2
2
|
|
3
|
-
|
4
|
-
|
5
|
-
require "logstash/outputs/elasticsearch"
|
3
|
+
describe "Update actions using groovy scripts", :integration => true, :update_tests => 'groovy', :version_greater_than_equal_to_2x => true do
|
4
|
+
require "logstash/outputs/elasticsearch"
|
6
5
|
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
before :each do
|
20
|
-
@es = get_client
|
21
|
-
# Delete all templates first.
|
22
|
-
# Clean ES of data before we start.
|
23
|
-
@es.indices.delete_template(:name => "*")
|
24
|
-
# This can fail if there are no indexes, ignore failure.
|
25
|
-
@es.indices.delete(:index => "*") rescue nil
|
26
|
-
@es.index(
|
27
|
-
:index => 'logstash-update',
|
28
|
-
:type => 'logs',
|
29
|
-
:id => "123",
|
30
|
-
:body => { :message => 'Test', :counter => 1 }
|
31
|
-
)
|
32
|
-
@es.indices.refresh
|
33
|
-
end
|
6
|
+
def get_es_output( options={} )
|
7
|
+
settings = {
|
8
|
+
"manage_template" => true,
|
9
|
+
"index" => "logstash-update",
|
10
|
+
"template_overwrite" => true,
|
11
|
+
"hosts" => get_host_port(),
|
12
|
+
"action" => "update",
|
13
|
+
"script_lang" => "groovy"
|
14
|
+
}
|
15
|
+
LogStash::Outputs::ElasticSearch.new(settings.merge!(options))
|
16
|
+
end
|
34
17
|
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
18
|
+
before :each do
|
19
|
+
@es = get_client
|
20
|
+
# Delete all templates first.
|
21
|
+
# Clean ES of data before we start.
|
22
|
+
@es.indices.delete_template(:name => "*")
|
23
|
+
# This can fail if there are no indexes, ignore failure.
|
24
|
+
@es.indices.delete(:index => "*") rescue nil
|
25
|
+
@es.index(
|
26
|
+
:index => 'logstash-update',
|
27
|
+
:type => 'logs',
|
28
|
+
:id => "123",
|
29
|
+
:body => { :message => 'Test', :counter => 1 }
|
30
|
+
)
|
31
|
+
@es.indices.refresh
|
32
|
+
end
|
43
33
|
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
34
|
+
context "scripted updates" do
|
35
|
+
it "should increment a counter with event/doc 'count' variable" do
|
36
|
+
subject = get_es_output({ 'document_id' => "123", 'script' => 'scripted_update', 'script_type' => 'file' })
|
37
|
+
subject.register
|
38
|
+
subject.multi_receive([LogStash::Event.new("count" => 2)])
|
39
|
+
r = @es.get(:index => 'logstash-update', :type => 'logs', :id => "123", :refresh => true)
|
40
|
+
insist { r["_source"]["counter"] } == 3
|
41
|
+
end
|
51
42
|
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
subject.register
|
60
|
-
subject.multi_receive([LogStash::Event.new("counter" => 3 )])
|
61
|
-
r = @es.get(:index => 'logstash-update', :type => 'logs', :id => "123", :refresh => true)
|
62
|
-
insist { r["_source"]["counter"] } == 4
|
63
|
-
end
|
43
|
+
it "should increment a counter with event/doc '[data][count]' nested variable" do
|
44
|
+
subject = get_es_output({ 'document_id' => "123", 'script' => 'scripted_update_nested', 'script_type' => 'file' })
|
45
|
+
subject.register
|
46
|
+
subject.multi_receive([LogStash::Event.new("data" => { "count" => 3 })])
|
47
|
+
r = @es.get(:index => 'logstash-update', :type => 'logs', :id => "123", :refresh => true)
|
48
|
+
insist { r["_source"]["counter"] } == 4
|
49
|
+
end
|
64
50
|
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
end
|
51
|
+
it "should increment a counter with event/doc 'count' variable with inline script" do
|
52
|
+
subject = get_es_output({
|
53
|
+
'document_id' => "123",
|
54
|
+
'script' => 'ctx._source.counter += event["counter"]',
|
55
|
+
'script_lang' => 'groovy',
|
56
|
+
'script_type' => 'inline'
|
57
|
+
})
|
58
|
+
subject.register
|
59
|
+
subject.multi_receive([LogStash::Event.new("counter" => 3 )])
|
60
|
+
r = @es.get(:index => 'logstash-update', :type => 'logs', :id => "123", :refresh => true)
|
61
|
+
insist { r["_source"]["counter"] } == 4
|
62
|
+
end
|
78
63
|
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
64
|
+
it "should increment a counter with event/doc 'count' variable with event/doc as upsert and inline script" do
|
65
|
+
subject = get_es_output({
|
66
|
+
'document_id' => "123",
|
67
|
+
'doc_as_upsert' => true,
|
68
|
+
'script' => 'if( ctx._source.containsKey("counter") ){ ctx._source.counter += event["counter"]; } else { ctx._source.counter = event["counter"]; }',
|
69
|
+
'script_lang' => 'groovy',
|
70
|
+
'script_type' => 'inline'
|
71
|
+
})
|
72
|
+
subject.register
|
73
|
+
subject.multi_receive([LogStash::Event.new("counter" => 3 )])
|
74
|
+
r = @es.get(:index => 'logstash-update', :type => 'logs', :id => "123", :refresh => true)
|
75
|
+
insist { r["_source"]["counter"] } == 4
|
76
|
+
end
|
92
77
|
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
end
|
78
|
+
it "should, with new doc, set a counter with event/doc 'count' variable with event/doc as upsert and inline script" do
|
79
|
+
subject = get_es_output({
|
80
|
+
'document_id' => "456",
|
81
|
+
'doc_as_upsert' => true,
|
82
|
+
'script' => 'if( ctx._source.containsKey("counter") ){ ctx._source.counter += event["count"]; } else { ctx._source.counter = event["count"]; }',
|
83
|
+
'script_lang' => 'groovy',
|
84
|
+
'script_type' => 'inline'
|
85
|
+
})
|
86
|
+
subject.register
|
87
|
+
subject.multi_receive([LogStash::Event.new("counter" => 3 )])
|
88
|
+
r = @es.get(:index => 'logstash-update', :type => 'logs', :id => "456", :refresh => true)
|
89
|
+
insist { r["_source"]["counter"] } == 3
|
106
90
|
end
|
107
91
|
|
108
|
-
|
109
|
-
|
110
|
-
|
111
|
-
|
112
|
-
|
113
|
-
|
114
|
-
|
115
|
-
|
92
|
+
it "should increment a counter with event/doc 'count' variable with indexed script" do
|
93
|
+
@es.put_script lang: 'groovy', id: 'indexed_update', body: { script: 'ctx._source.counter += event["count"]' }
|
94
|
+
subject = get_es_output({
|
95
|
+
'document_id' => "123",
|
96
|
+
'script' => 'indexed_update',
|
97
|
+
'script_lang' => 'groovy',
|
98
|
+
'script_type' => 'indexed'
|
99
|
+
})
|
100
|
+
subject.register
|
101
|
+
subject.multi_receive([LogStash::Event.new("count" => 4 )])
|
102
|
+
r = @es.get(:index => 'logstash-update', :type => 'logs', :id => "123", :refresh => true)
|
103
|
+
insist { r["_source"]["counter"] } == 5
|
104
|
+
end
|
105
|
+
end
|
106
|
+
|
107
|
+
context "when update with upsert" do
|
108
|
+
it "should create new documents with provided upsert" do
|
109
|
+
subject = get_es_output({ 'document_id' => "456", 'upsert' => '{"message": "upsert message"}' })
|
110
|
+
subject.register
|
111
|
+
subject.multi_receive([LogStash::Event.new("message" => "sample message here")])
|
112
|
+
r = @es.get(:index => 'logstash-update', :type => 'logs', :id => "456", :refresh => true)
|
113
|
+
insist { r["_source"]["message"] } == 'upsert message'
|
114
|
+
end
|
116
115
|
|
117
|
-
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
116
|
+
it "should create new documents with event/doc as upsert" do
|
117
|
+
subject = get_es_output({ 'document_id' => "456", 'doc_as_upsert' => true })
|
118
|
+
subject.register
|
119
|
+
subject.multi_receive([LogStash::Event.new("message" => "sample message here")])
|
120
|
+
r = @es.get(:index => 'logstash-update', :type => 'logs', :id => "456", :refresh => true)
|
121
|
+
insist { r["_source"]["message"] } == 'sample message here'
|
122
|
+
end
|
124
123
|
|
125
|
-
|
126
|
-
|
127
|
-
|
128
|
-
end
|
124
|
+
it "should fail on documents with event/doc as upsert at external version" do
|
125
|
+
subject = get_es_output({ 'document_id' => "456", 'doc_as_upsert' => true, 'version' => 999, "version_type" => "external" })
|
126
|
+
expect { subject.register }.to raise_error(LogStash::ConfigurationError)
|
129
127
|
end
|
128
|
+
end
|
130
129
|
|
131
|
-
|
132
|
-
|
133
|
-
|
134
|
-
|
135
|
-
|
136
|
-
|
137
|
-
|
138
|
-
|
130
|
+
context "updates with scripted upsert" do
|
131
|
+
it "should create new documents with upsert content" do
|
132
|
+
subject = get_es_output({ 'document_id' => "456", 'script' => 'scripted_update', 'upsert' => '{"message": "upsert message"}', 'script_type' => 'file' })
|
133
|
+
subject.register
|
134
|
+
subject.multi_receive([LogStash::Event.new("message" => "sample message here")])
|
135
|
+
r = @es.get(:index => 'logstash-update', :type => 'logs', :id => "456", :refresh => true)
|
136
|
+
insist { r["_source"]["message"] } == 'upsert message'
|
137
|
+
end
|
139
138
|
|
140
|
-
|
141
|
-
|
142
|
-
|
143
|
-
|
144
|
-
|
145
|
-
|
146
|
-
|
147
|
-
end
|
139
|
+
it "should create new documents with event/doc as script params" do
|
140
|
+
subject = get_es_output({ 'document_id' => "456", 'script' => 'scripted_upsert', 'scripted_upsert' => true, 'script_type' => 'file' })
|
141
|
+
subject.register
|
142
|
+
subject.multi_receive([LogStash::Event.new("counter" => 1)])
|
143
|
+
@es.indices.refresh
|
144
|
+
r = @es.get(:index => 'logstash-update', :type => 'logs', :id => "456", :refresh => true)
|
145
|
+
insist { r["_source"]["counter"] } == 1
|
148
146
|
end
|
149
147
|
end
|
150
148
|
end
|