logstash-output-opensearch 1.0.0-java
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- checksums.yaml.gz.sig +0 -0
- data.tar.gz.sig +0 -0
- data/ADMINS.md +29 -0
- data/CODE_OF_CONDUCT.md +25 -0
- data/CONTRIBUTING.md +99 -0
- data/DEVELOPER_GUIDE.md +208 -0
- data/Gemfile +20 -0
- data/LICENSE +202 -0
- data/MAINTAINERS.md +71 -0
- data/NOTICE +2 -0
- data/README.md +37 -0
- data/RELEASING.md +36 -0
- data/SECURITY.md +3 -0
- data/lib/logstash/outputs/opensearch.rb +449 -0
- data/lib/logstash/outputs/opensearch/distribution_checker.rb +44 -0
- data/lib/logstash/outputs/opensearch/http_client.rb +465 -0
- data/lib/logstash/outputs/opensearch/http_client/manticore_adapter.rb +140 -0
- data/lib/logstash/outputs/opensearch/http_client/pool.rb +467 -0
- data/lib/logstash/outputs/opensearch/http_client_builder.rb +182 -0
- data/lib/logstash/outputs/opensearch/template_manager.rb +60 -0
- data/lib/logstash/outputs/opensearch/templates/ecs-disabled/1x.json +44 -0
- data/lib/logstash/outputs/opensearch/templates/ecs-disabled/7x.json +44 -0
- data/lib/logstash/plugin_mixins/opensearch/api_configs.rb +168 -0
- data/lib/logstash/plugin_mixins/opensearch/common.rb +294 -0
- data/lib/logstash/plugin_mixins/opensearch/noop_distribution_checker.rb +18 -0
- data/logstash-output-opensearch.gemspec +40 -0
- data/spec/fixtures/_nodes/nodes.json +74 -0
- data/spec/fixtures/htpasswd +2 -0
- data/spec/fixtures/nginx_reverse_proxy.conf +22 -0
- data/spec/fixtures/scripts/painless/scripted_update.painless +2 -0
- data/spec/fixtures/scripts/painless/scripted_update_nested.painless +1 -0
- data/spec/fixtures/scripts/painless/scripted_upsert.painless +1 -0
- data/spec/integration/outputs/compressed_indexing_spec.rb +76 -0
- data/spec/integration/outputs/create_spec.rb +76 -0
- data/spec/integration/outputs/delete_spec.rb +72 -0
- data/spec/integration/outputs/index_spec.rb +164 -0
- data/spec/integration/outputs/index_version_spec.rb +110 -0
- data/spec/integration/outputs/ingest_pipeline_spec.rb +82 -0
- data/spec/integration/outputs/metrics_spec.rb +75 -0
- data/spec/integration/outputs/no_opensearch_on_startup_spec.rb +67 -0
- data/spec/integration/outputs/painless_update_spec.rb +147 -0
- data/spec/integration/outputs/parent_spec.rb +103 -0
- data/spec/integration/outputs/retry_spec.rb +182 -0
- data/spec/integration/outputs/routing_spec.rb +70 -0
- data/spec/integration/outputs/sniffer_spec.rb +70 -0
- data/spec/integration/outputs/templates_spec.rb +105 -0
- data/spec/integration/outputs/update_spec.rb +123 -0
- data/spec/opensearch_spec_helper.rb +141 -0
- data/spec/spec_helper.rb +19 -0
- data/spec/unit/http_client_builder_spec.rb +194 -0
- data/spec/unit/outputs/error_whitelist_spec.rb +62 -0
- data/spec/unit/outputs/opensearch/http_client/manticore_adapter_spec.rb +159 -0
- data/spec/unit/outputs/opensearch/http_client/pool_spec.rb +306 -0
- data/spec/unit/outputs/opensearch/http_client_spec.rb +292 -0
- data/spec/unit/outputs/opensearch/template_manager_spec.rb +36 -0
- data/spec/unit/outputs/opensearch_proxy_spec.rb +112 -0
- data/spec/unit/outputs/opensearch_spec.rb +800 -0
- data/spec/unit/outputs/opensearch_ssl_spec.rb +179 -0
- metadata +289 -0
- metadata.gz.sig +0 -0
@@ -0,0 +1,182 @@
|
|
1
|
+
# SPDX-License-Identifier: Apache-2.0
|
2
|
+
#
|
3
|
+
# The OpenSearch Contributors require contributions made to
|
4
|
+
# this file be licensed under the Apache-2.0 license or a
|
5
|
+
# compatible open source license.
|
6
|
+
#
|
7
|
+
# Modifications Copyright OpenSearch Contributors. See
|
8
|
+
# GitHub history for details.
|
9
|
+
|
10
|
+
require "logstash/outputs/opensearch"
|
11
|
+
require_relative "../../../spec/opensearch_spec_helper"
|
12
|
+
|
13
|
+
describe "failures in bulk class expected behavior", :integration => true do
|
14
|
+
let(:template) { '{"template" : "not important, will be updated by :index"}' }
|
15
|
+
let(:event1) { LogStash::Event.new("somevalue" => 100, "@timestamp" => "2014-11-17T20:37:17.223Z", "@metadata" => {"retry_count" => 0}) }
|
16
|
+
let(:action1) do
|
17
|
+
["index", {:_id=>nil, routing_field_name =>nil, :_index=>"logstash-2014.11.17" }, event1.to_hash]
|
18
|
+
end
|
19
|
+
let(:event2) { LogStash::Event.new("geoip" => { "location" => [ 0.0, 0.0] }, "@timestamp" => "2014-11-17T20:37:17.223Z", "@metadata" => {"retry_count" => 0}) }
|
20
|
+
let(:action2) do
|
21
|
+
["index", {:_id=>nil, routing_field_name =>nil, :_index=>"logstash-2014.11.17" }, event2.to_hash]
|
22
|
+
end
|
23
|
+
let(:invalid_event) { LogStash::Event.new("geoip" => { "location" => "notlatlon" }, "@timestamp" => "2014-11-17T20:37:17.223Z") }
|
24
|
+
|
25
|
+
def mock_actions_with_response(*resp)
|
26
|
+
raise ArgumentError, "Cannot mock actions until subject is registered and has a client!" unless subject.client
|
27
|
+
|
28
|
+
expanded_responses = resp.map do |resp|
|
29
|
+
items = resp["statuses"] && resp["statuses"].map do |status|
|
30
|
+
{"create" => {"status" => status, "error" => "Error for #{status}"}}
|
31
|
+
end
|
32
|
+
|
33
|
+
{
|
34
|
+
"errors" => resp["errors"],
|
35
|
+
"items" => items
|
36
|
+
}
|
37
|
+
end
|
38
|
+
|
39
|
+
allow(subject.client).to receive(:bulk).and_return(*expanded_responses)
|
40
|
+
end
|
41
|
+
|
42
|
+
subject! do
|
43
|
+
settings = {
|
44
|
+
"manage_template" => true,
|
45
|
+
"index" => "logstash-2014.11.17",
|
46
|
+
"template_overwrite" => true,
|
47
|
+
"hosts" => get_host_port(),
|
48
|
+
"retry_max_interval" => 64,
|
49
|
+
"retry_initial_interval" => 2
|
50
|
+
}
|
51
|
+
next LogStash::Outputs::OpenSearch.new(settings)
|
52
|
+
end
|
53
|
+
|
54
|
+
before :each do
|
55
|
+
# Delete all templates first.
|
56
|
+
require "elasticsearch"
|
57
|
+
allow(Stud).to receive(:stoppable_sleep)
|
58
|
+
|
59
|
+
# Clean OpenSearch of data before we start.
|
60
|
+
@client = get_client
|
61
|
+
@client.indices.delete_template(:name => "*")
|
62
|
+
@client.indices.delete(:index => "*")
|
63
|
+
@client.indices.refresh
|
64
|
+
end
|
65
|
+
|
66
|
+
after :each do
|
67
|
+
subject.close
|
68
|
+
end
|
69
|
+
|
70
|
+
it "should retry exactly once if all bulk actions are successful" do
|
71
|
+
expect(subject).to receive(:submit).with([action1, action2]).once.and_call_original
|
72
|
+
subject.register
|
73
|
+
mock_actions_with_response({"errors" => false})
|
74
|
+
subject.multi_receive([event1, event2])
|
75
|
+
end
|
76
|
+
|
77
|
+
it "retry exceptions within the submit body" do
|
78
|
+
call_count = 0
|
79
|
+
subject.register
|
80
|
+
|
81
|
+
expect(subject.client).to receive(:bulk).with(anything).exactly(3).times do
|
82
|
+
if (call_count += 1) <= 2
|
83
|
+
raise "error first two times"
|
84
|
+
else
|
85
|
+
{"errors" => false}
|
86
|
+
end
|
87
|
+
end
|
88
|
+
|
89
|
+
subject.multi_receive([event1])
|
90
|
+
end
|
91
|
+
|
92
|
+
it "should retry actions with response status of 503" do expect(subject).to receive(:submit).with([action1, action1, action1, action2]).ordered.once.and_call_original
|
93
|
+
expect(subject).to receive(:submit).with([action1, action2]).ordered.once.and_call_original
|
94
|
+
expect(subject).to receive(:submit).with([action2]).ordered.once.and_call_original
|
95
|
+
|
96
|
+
subject.register
|
97
|
+
mock_actions_with_response({"errors" => true, "statuses" => [200, 200, 503, 503]},
|
98
|
+
{"errors" => true, "statuses" => [200, 503]},
|
99
|
+
{"errors" => false})
|
100
|
+
|
101
|
+
subject.multi_receive([event1, event1, event1, event2])
|
102
|
+
end
|
103
|
+
|
104
|
+
retryable_codes = [429, 502, 503]
|
105
|
+
|
106
|
+
retryable_codes.each do |code|
|
107
|
+
it "should retry actions with response status of #{code}" do
|
108
|
+
subject.register
|
109
|
+
|
110
|
+
mock_actions_with_response({"errors" => true, "statuses" => [code]},
|
111
|
+
{"errors" => false})
|
112
|
+
expect(subject).to receive(:submit).with([action1]).twice.and_call_original
|
113
|
+
|
114
|
+
subject.multi_receive([event1])
|
115
|
+
end
|
116
|
+
end
|
117
|
+
|
118
|
+
it "should retry an event infinitely until a non retryable status occurs" do
|
119
|
+
expect(subject).to receive(:submit).with([action1]).exactly(6).times.and_call_original
|
120
|
+
subject.register
|
121
|
+
|
122
|
+
mock_actions_with_response({"errors" => true, "statuses" => [429]},
|
123
|
+
{"errors" => true, "statuses" => [429]},
|
124
|
+
{"errors" => true, "statuses" => [429]},
|
125
|
+
{"errors" => true, "statuses" => [429]},
|
126
|
+
{"errors" => true, "statuses" => [429]},
|
127
|
+
{"errors" => true, "statuses" => [400]})
|
128
|
+
|
129
|
+
subject.multi_receive([event1])
|
130
|
+
end
|
131
|
+
|
132
|
+
it "should sleep for an exponentially increasing amount of time on each retry, capped by the max" do
|
133
|
+
[2, 4, 8, 16, 32, 64, 64].each_with_index do |interval,i|
|
134
|
+
expect(Stud).to receive(:stoppable_sleep).with(interval).ordered
|
135
|
+
end
|
136
|
+
|
137
|
+
subject.register
|
138
|
+
|
139
|
+
mock_actions_with_response({"errors" => true, "statuses" => [429]},
|
140
|
+
{"errors" => true, "statuses" => [429]},
|
141
|
+
{"errors" => true, "statuses" => [429]},
|
142
|
+
{"errors" => true, "statuses" => [429]},
|
143
|
+
{"errors" => true, "statuses" => [429]},
|
144
|
+
{"errors" => true, "statuses" => [429]},
|
145
|
+
{"errors" => true, "statuses" => [429]},
|
146
|
+
{"errors" => true, "statuses" => [400]})
|
147
|
+
|
148
|
+
subject.multi_receive([event1])
|
149
|
+
end
|
150
|
+
|
151
|
+
it "non-retryable errors like mapping errors (400) should be dropped and not be retried (unfortunately)" do
|
152
|
+
subject.register
|
153
|
+
expect(subject).to receive(:submit).once.and_call_original
|
154
|
+
subject.multi_receive([invalid_event])
|
155
|
+
subject.close
|
156
|
+
|
157
|
+
@client.indices.refresh
|
158
|
+
r = @client.search(index: 'logstash-*')
|
159
|
+
expect(r).to have_hits(0)
|
160
|
+
end
|
161
|
+
|
162
|
+
it "successful requests should not be appended to retry queue" do
|
163
|
+
expect(subject).to receive(:submit).once.and_call_original
|
164
|
+
|
165
|
+
subject.register
|
166
|
+
subject.multi_receive([event1])
|
167
|
+
subject.close
|
168
|
+
@client.indices.refresh
|
169
|
+
r = @client.search(index: 'logstash-*')
|
170
|
+
expect(r).to have_hits(1)
|
171
|
+
end
|
172
|
+
|
173
|
+
it "should only index proper events" do
|
174
|
+
subject.register
|
175
|
+
subject.multi_receive([invalid_event, event1])
|
176
|
+
subject.close
|
177
|
+
|
178
|
+
@client.indices.refresh
|
179
|
+
r = @client.search(index: 'logstash-*')
|
180
|
+
expect(r).to have_hits(1)
|
181
|
+
end
|
182
|
+
end
|
@@ -0,0 +1,70 @@
|
|
1
|
+
# SPDX-License-Identifier: Apache-2.0
|
2
|
+
#
|
3
|
+
# The OpenSearch Contributors require contributions made to
|
4
|
+
# this file be licensed under the Apache-2.0 license or a
|
5
|
+
# compatible open source license.
|
6
|
+
#
|
7
|
+
# Modifications Copyright OpenSearch Contributors. See
|
8
|
+
# GitHub history for details.
|
9
|
+
|
10
|
+
require_relative "../../../spec/opensearch_spec_helper"
|
11
|
+
|
12
|
+
shared_examples "a routing indexer" do
|
13
|
+
let(:index) { 10.times.collect { rand(10).to_s }.join("") }
|
14
|
+
let(:type) { 10.times.collect { rand(10).to_s }.join("") }
|
15
|
+
let(:event_count) { 10000 + rand(500) }
|
16
|
+
let(:routing) { "not_implemented" }
|
17
|
+
let(:config) { "not_implemented" }
|
18
|
+
subject { LogStash::Outputs::OpenSearch.new(config) }
|
19
|
+
|
20
|
+
before do
|
21
|
+
subject.register
|
22
|
+
event_count.times do
|
23
|
+
subject.multi_receive([LogStash::Event.new("message" => "test", "type" => type)])
|
24
|
+
end
|
25
|
+
end
|
26
|
+
|
27
|
+
|
28
|
+
it "ships events" do
|
29
|
+
index_url = "http://#{get_host_port()}/#{index}"
|
30
|
+
|
31
|
+
client = Manticore::Client.new
|
32
|
+
client.post("#{index_url}/_refresh").call
|
33
|
+
|
34
|
+
# Wait until all events are available.
|
35
|
+
Stud::try(10.times) do
|
36
|
+
data = ""
|
37
|
+
|
38
|
+
response = client.get("#{index_url}/_count?q=*&routing=#{routing}").call
|
39
|
+
result = LogStash::Json.load(response.body)
|
40
|
+
cur_count = result["count"]
|
41
|
+
expect(cur_count).to eq(event_count)
|
42
|
+
end
|
43
|
+
end
|
44
|
+
end
|
45
|
+
|
46
|
+
describe "(http protocol) index events with static routing", :integration => true do
|
47
|
+
it_behaves_like 'a routing indexer' do
|
48
|
+
let(:routing) { "test" }
|
49
|
+
let(:config) {
|
50
|
+
{
|
51
|
+
"hosts" => get_host_port,
|
52
|
+
"index" => index,
|
53
|
+
"routing" => routing
|
54
|
+
}
|
55
|
+
}
|
56
|
+
end
|
57
|
+
end
|
58
|
+
|
59
|
+
describe "(http_protocol) index events with fieldref in routing value", :integration => true do
|
60
|
+
it_behaves_like 'a routing indexer' do
|
61
|
+
let(:routing) { "test" }
|
62
|
+
let(:config) {
|
63
|
+
{
|
64
|
+
"hosts" => get_host_port,
|
65
|
+
"index" => index,
|
66
|
+
"routing" => "%{message}"
|
67
|
+
}
|
68
|
+
}
|
69
|
+
end
|
70
|
+
end
|
@@ -0,0 +1,70 @@
|
|
1
|
+
# SPDX-License-Identifier: Apache-2.0
|
2
|
+
#
|
3
|
+
# The OpenSearch Contributors require contributions made to
|
4
|
+
# this file be licensed under the Apache-2.0 license or a
|
5
|
+
# compatible open source license.
|
6
|
+
#
|
7
|
+
# Modifications Copyright OpenSearch Contributors. See
|
8
|
+
# GitHub history for details.
|
9
|
+
|
10
|
+
require_relative "../../../spec/opensearch_spec_helper"
|
11
|
+
require "logstash/outputs/opensearch/http_client"
|
12
|
+
require "json"
|
13
|
+
require "socket"
|
14
|
+
|
15
|
+
describe "pool sniffer", :integration => true do
|
16
|
+
let(:logger) { Cabin::Channel.get }
|
17
|
+
let(:adapter) { LogStash::Outputs::OpenSearch::HttpClient::ManticoreAdapter.new(logger) }
|
18
|
+
let(:es_host) { get_host_port.split(":").first }
|
19
|
+
let(:es_port) { get_host_port.split(":").last }
|
20
|
+
let(:es_ip) { IPSocket.getaddress(es_host) }
|
21
|
+
let(:initial_urls) { [::LogStash::Util::SafeURI.new("http://#{get_host_port}")] }
|
22
|
+
let(:options) do
|
23
|
+
{
|
24
|
+
:resurrect_delay => 2, # Shorten the delay a bit to speed up tests
|
25
|
+
:url_normalizer => proc {|u| u},
|
26
|
+
:metric => ::LogStash::Instrument::NullMetric.new(:dummy).namespace(:alsodummy)
|
27
|
+
}
|
28
|
+
end
|
29
|
+
|
30
|
+
subject { LogStash::Outputs::OpenSearch::HttpClient::Pool.new(logger, adapter, initial_urls, options) }
|
31
|
+
|
32
|
+
describe("Simple sniff parsing") do
|
33
|
+
before(:each) { subject.start }
|
34
|
+
|
35
|
+
context "with single node" do
|
36
|
+
it "should execute a sniff without error" do
|
37
|
+
expect { subject.check_sniff }.not_to raise_error
|
38
|
+
end
|
39
|
+
|
40
|
+
it "should return single sniff URL" do
|
41
|
+
uris = subject.check_sniff
|
42
|
+
|
43
|
+
expect(uris.size).to eq(1)
|
44
|
+
end
|
45
|
+
end
|
46
|
+
end
|
47
|
+
|
48
|
+
|
49
|
+
|
50
|
+
describe("Complex sniff parsing") do
|
51
|
+
before(:each) do
|
52
|
+
response_double = double("_nodes/http", body: File.read("spec/fixtures/_nodes/nodes.json"))
|
53
|
+
allow(subject).to receive(:perform_request).and_return([nil, { version: "any" }, response_double])
|
54
|
+
subject.start
|
55
|
+
end
|
56
|
+
|
57
|
+
context "with mixed master-only, data-only, and data + master nodes" do
|
58
|
+
it "should execute a sniff without error" do
|
59
|
+
expect { subject.check_sniff }.not_to raise_error
|
60
|
+
end
|
61
|
+
|
62
|
+
it "should return the correct sniff URLs" do
|
63
|
+
# ie. with the master-only node, and with the node name correctly set.
|
64
|
+
uris = subject.check_sniff
|
65
|
+
|
66
|
+
expect(uris).to include(::LogStash::Util::SafeURI.new("//dev-masterdata:9201"), ::LogStash::Util::SafeURI.new("//dev-data:9202"))
|
67
|
+
end
|
68
|
+
end
|
69
|
+
end
|
70
|
+
end
|
@@ -0,0 +1,105 @@
|
|
1
|
+
# SPDX-License-Identifier: Apache-2.0
|
2
|
+
#
|
3
|
+
# The OpenSearch Contributors require contributions made to
|
4
|
+
# this file be licensed under the Apache-2.0 license or a
|
5
|
+
# compatible open source license.
|
6
|
+
#
|
7
|
+
# Modifications Copyright OpenSearch Contributors. See
|
8
|
+
# GitHub history for details.
|
9
|
+
|
10
|
+
require_relative "../../../spec/opensearch_spec_helper"
|
11
|
+
|
12
|
+
describe "index template expected behavior", :integration => true do
|
13
|
+
subject! do
|
14
|
+
require "logstash/outputs/opensearch"
|
15
|
+
settings = {
|
16
|
+
"manage_template" => true,
|
17
|
+
"template_overwrite" => true,
|
18
|
+
"hosts" => "#{get_host_port()}"
|
19
|
+
}
|
20
|
+
next LogStash::Outputs::OpenSearch.new(settings)
|
21
|
+
end
|
22
|
+
|
23
|
+
before :each do
|
24
|
+
# Delete all templates first.
|
25
|
+
require "elasticsearch"
|
26
|
+
|
27
|
+
# Clean OpenSearch of data before we start.
|
28
|
+
@client = get_client
|
29
|
+
@client.indices.delete_template(:name => "*")
|
30
|
+
|
31
|
+
# This can fail if there are no indexes, ignore failure.
|
32
|
+
@client.indices.delete(:index => "*") rescue nil
|
33
|
+
|
34
|
+
subject.register
|
35
|
+
|
36
|
+
subject.multi_receive([
|
37
|
+
LogStash::Event.new("message" => "sample message here"),
|
38
|
+
LogStash::Event.new("somemessage" => { "message" => "sample nested message here" }),
|
39
|
+
LogStash::Event.new("somevalue" => 100),
|
40
|
+
LogStash::Event.new("somevalue" => 10),
|
41
|
+
LogStash::Event.new("somevalue" => 1),
|
42
|
+
LogStash::Event.new("country" => "us"),
|
43
|
+
LogStash::Event.new("country" => "at"),
|
44
|
+
LogStash::Event.new("geoip" => { "location" => [ 0.0, 0.0 ] })
|
45
|
+
])
|
46
|
+
|
47
|
+
@client.indices.refresh
|
48
|
+
|
49
|
+
# Wait or fail until everything's indexed.
|
50
|
+
Stud::try(20.times) do
|
51
|
+
r = @client.search(index: 'logstash-*')
|
52
|
+
expect(r).to have_hits(8)
|
53
|
+
end
|
54
|
+
end
|
55
|
+
|
56
|
+
it "permits phrase searching on string fields" do
|
57
|
+
results = @client.search(:q => "message:\"sample message\"")
|
58
|
+
expect(results).to have_hits(1)
|
59
|
+
expect(results["hits"]["hits"][0]["_source"]["message"]).to eq("sample message here")
|
60
|
+
end
|
61
|
+
|
62
|
+
it "numbers dynamically map to a numeric type and permit range queries" do
|
63
|
+
results = @client.search(:q => "somevalue:[5 TO 105]")
|
64
|
+
expect(results).to have_hits(2)
|
65
|
+
|
66
|
+
values = results["hits"]["hits"].collect { |r| r["_source"]["somevalue"] }
|
67
|
+
expect(values).to include(10)
|
68
|
+
expect(values).to include(100)
|
69
|
+
expect(values).to_not include(1)
|
70
|
+
end
|
71
|
+
|
72
|
+
it "does not create .keyword field for top-level message field" do
|
73
|
+
results = @client.search(:q => "message.keyword:\"sample message here\"")
|
74
|
+
expect(results).to have_hits(0)
|
75
|
+
end
|
76
|
+
|
77
|
+
it "creates .keyword field for nested message fields" do
|
78
|
+
results = @client.search(:q => "somemessage.message.keyword:\"sample nested message here\"")
|
79
|
+
expect(results).to have_hits(1)
|
80
|
+
end
|
81
|
+
|
82
|
+
it "creates .keyword field from any string field which is not_analyzed" do
|
83
|
+
results = @client.search(:q => "country.keyword:\"us\"")
|
84
|
+
expect(results).to have_hits(1)
|
85
|
+
expect(results["hits"]["hits"][0]["_source"]["country"]).to eq("us")
|
86
|
+
|
87
|
+
# partial or terms should not work.
|
88
|
+
results = @client.search(:q => "country.keyword:\"u\"")
|
89
|
+
expect(results).to have_hits(0)
|
90
|
+
end
|
91
|
+
|
92
|
+
it "make [geoip][location] a geo_point" do
|
93
|
+
expect(field_properties_from_template("logstash", "geoip")["location"]["type"]).to eq("geo_point")
|
94
|
+
end
|
95
|
+
|
96
|
+
it "aggregate .keyword results correctly " do
|
97
|
+
results = @client.search(:body => { "aggregations" => { "my_agg" => { "terms" => { "field" => "country.keyword" } } } })["aggregations"]["my_agg"]
|
98
|
+
terms = results["buckets"].collect { |b| b["key"] }
|
99
|
+
|
100
|
+
expect(terms).to include("us")
|
101
|
+
|
102
|
+
# 'at' is a stopword, make sure stopwords are not ignored.
|
103
|
+
expect(terms).to include("at")
|
104
|
+
end
|
105
|
+
end
|
@@ -0,0 +1,123 @@
|
|
1
|
+
# SPDX-License-Identifier: Apache-2.0
|
2
|
+
#
|
3
|
+
# The OpenSearch Contributors require contributions made to
|
4
|
+
# this file be licensed under the Apache-2.0 license or a
|
5
|
+
# compatible open source license.
|
6
|
+
#
|
7
|
+
# Modifications Copyright OpenSearch Contributors. See
|
8
|
+
# GitHub history for details.
|
9
|
+
|
10
|
+
require_relative "../../../spec/opensearch_spec_helper"
|
11
|
+
|
12
|
+
describe "Update actions without scripts", :integration => true do
|
13
|
+
require "logstash/outputs/opensearch"
|
14
|
+
|
15
|
+
def get_es_output( options={} )
|
16
|
+
settings = {
|
17
|
+
"manage_template" => true,
|
18
|
+
"index" => "logstash-update",
|
19
|
+
"template_overwrite" => true,
|
20
|
+
"hosts" => get_host_port(),
|
21
|
+
"action" => "update"
|
22
|
+
}
|
23
|
+
LogStash::Outputs::OpenSearch.new(settings.merge!(options))
|
24
|
+
end
|
25
|
+
|
26
|
+
before :each do
|
27
|
+
@client = get_client
|
28
|
+
# Delete all templates first.
|
29
|
+
# Clean OpenSearch of data before we start.
|
30
|
+
@client.indices.delete_template(:name => "*")
|
31
|
+
# This can fail if there are no indexes, ignore failure.
|
32
|
+
@client.indices.delete(:index => "*") rescue nil
|
33
|
+
@client.index(
|
34
|
+
:index => 'logstash-update',
|
35
|
+
:type => doc_type,
|
36
|
+
:id => "123",
|
37
|
+
:body => { :message => 'Test', :counter => 1 }
|
38
|
+
)
|
39
|
+
@client.indices.refresh
|
40
|
+
end
|
41
|
+
|
42
|
+
it "should fail without a document_id" do
|
43
|
+
subject = get_es_output
|
44
|
+
expect { subject.register }.to raise_error(LogStash::ConfigurationError)
|
45
|
+
end
|
46
|
+
|
47
|
+
context "when update only" do
|
48
|
+
it "should not create new document" do
|
49
|
+
subject = get_es_output({ 'document_id' => "456" } )
|
50
|
+
subject.register
|
51
|
+
subject.multi_receive([LogStash::Event.new("message" => "sample message here")])
|
52
|
+
expect {@client.get(:index => 'logstash-update', :type => doc_type, :id => "456", :refresh => true)}.to raise_error(Elasticsearch::Transport::Transport::Errors::NotFound)
|
53
|
+
end
|
54
|
+
|
55
|
+
it "should update existing document" do
|
56
|
+
subject = get_es_output({ 'document_id' => "123" })
|
57
|
+
subject.register
|
58
|
+
subject.multi_receive([LogStash::Event.new("message" => "updated message here")])
|
59
|
+
r = @client.get(:index => 'logstash-update', :type => doc_type, :id => "123", :refresh => true)
|
60
|
+
expect(r["_source"]["message"]).to eq('updated message here')
|
61
|
+
end
|
62
|
+
|
63
|
+
# The es ruby client treats the data field differently. Make sure this doesn't
|
64
|
+
# raise an exception
|
65
|
+
it "should update an existing document that has a 'data' field" do
|
66
|
+
subject = get_es_output({ 'document_id' => "123" })
|
67
|
+
subject.register
|
68
|
+
subject.multi_receive([LogStash::Event.new("data" => "updated message here", "message" => "foo")])
|
69
|
+
r = @client.get(:index => 'logstash-update', :type => doc_type, :id => "123", :refresh => true)
|
70
|
+
expect(r["_source"]["data"]).to eq('updated message here')
|
71
|
+
expect(r["_source"]["message"]).to eq('foo')
|
72
|
+
end
|
73
|
+
|
74
|
+
it "should allow default (internal) version" do
|
75
|
+
subject = get_es_output({ 'document_id' => "123", "version" => "99" })
|
76
|
+
subject.register
|
77
|
+
end
|
78
|
+
|
79
|
+
it "should allow internal version" do
|
80
|
+
subject = get_es_output({ 'document_id' => "123", "version" => "99", "version_type" => "internal" })
|
81
|
+
subject.register
|
82
|
+
end
|
83
|
+
|
84
|
+
it "should not allow external version" do
|
85
|
+
subject = get_es_output({ 'document_id' => "123", "version" => "99", "version_type" => "external" })
|
86
|
+
expect { subject.register }.to raise_error(LogStash::ConfigurationError)
|
87
|
+
end
|
88
|
+
|
89
|
+
it "should not allow external_gt version" do
|
90
|
+
subject = get_es_output({ 'document_id' => "123", "version" => "99", "version_type" => "external_gt" })
|
91
|
+
expect { subject.register }.to raise_error(LogStash::ConfigurationError)
|
92
|
+
end
|
93
|
+
|
94
|
+
it "should not allow external_gte version" do
|
95
|
+
subject = get_es_output({ 'document_id' => "123", "version" => "99", "version_type" => "external_gte" })
|
96
|
+
expect { subject.register }.to raise_error(LogStash::ConfigurationError)
|
97
|
+
end
|
98
|
+
|
99
|
+
end
|
100
|
+
|
101
|
+
context "when update with upsert" do
|
102
|
+
it "should create new documents with provided upsert" do
|
103
|
+
subject = get_es_output({ 'document_id' => "456", 'upsert' => '{"message": "upsert message"}' })
|
104
|
+
subject.register
|
105
|
+
subject.multi_receive([LogStash::Event.new("message" => "sample message here")])
|
106
|
+
r = @client.get(:index => 'logstash-update', :type => doc_type, :id => "456", :refresh => true)
|
107
|
+
expect(r["_source"]["message"]).to eq('upsert message')
|
108
|
+
end
|
109
|
+
|
110
|
+
it "should create new documents with event/doc as upsert" do
|
111
|
+
subject = get_es_output({ 'document_id' => "456", 'doc_as_upsert' => true })
|
112
|
+
subject.register
|
113
|
+
subject.multi_receive([LogStash::Event.new("message" => "sample message here")])
|
114
|
+
r = @client.get(:index => 'logstash-update', :type => doc_type, :id => "456", :refresh => true)
|
115
|
+
expect(r["_source"]["message"]).to eq('sample message here')
|
116
|
+
end
|
117
|
+
|
118
|
+
it "should fail on documents with event/doc as upsert at external version" do
|
119
|
+
subject = get_es_output({ 'document_id' => "456", 'doc_as_upsert' => true, 'version' => 999, "version_type" => "external" })
|
120
|
+
expect { subject.register }.to raise_error(LogStash::ConfigurationError)
|
121
|
+
end
|
122
|
+
end
|
123
|
+
end
|